Try AWS Native preview for resources not in the classic version.
aws.appflow.Flow
Explore with Pulumi AI
Try AWS Native preview for resources not in the classic version.
Provides an AppFlow flow resource.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const exampleSourceBucketV2 = new aws.s3.BucketV2("example_source", {bucket: "example-source"});
const exampleSource = aws.iam.getPolicyDocument({
statements: [{
sid: "AllowAppFlowSourceActions",
effect: "Allow",
principals: [{
type: "Service",
identifiers: ["appflow.amazonaws.com"],
}],
actions: [
"s3:ListBucket",
"s3:GetObject",
],
resources: [
"arn:aws:s3:::example-source",
"arn:aws:s3:::example-source/*",
],
}],
});
const exampleSourceBucketPolicy = new aws.s3.BucketPolicy("example_source", {
bucket: exampleSourceBucketV2.id,
policy: exampleSource.then(exampleSource => exampleSource.json),
});
const example = new aws.s3.BucketObjectv2("example", {
bucket: exampleSourceBucketV2.id,
key: "example_source.csv",
source: new pulumi.asset.FileAsset("example_source.csv"),
});
const exampleDestinationBucketV2 = new aws.s3.BucketV2("example_destination", {bucket: "example-destination"});
const exampleDestination = aws.iam.getPolicyDocument({
statements: [{
sid: "AllowAppFlowDestinationActions",
effect: "Allow",
principals: [{
type: "Service",
identifiers: ["appflow.amazonaws.com"],
}],
actions: [
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads",
"s3:GetBucketAcl",
"s3:PutObjectAcl",
],
resources: [
"arn:aws:s3:::example-destination",
"arn:aws:s3:::example-destination/*",
],
}],
});
const exampleDestinationBucketPolicy = new aws.s3.BucketPolicy("example_destination", {
bucket: exampleDestinationBucketV2.id,
policy: exampleDestination.then(exampleDestination => exampleDestination.json),
});
const exampleFlow = new aws.appflow.Flow("example", {
name: "example",
sourceFlowConfig: {
connectorType: "S3",
sourceConnectorProperties: {
s3: {
bucketName: exampleSourceBucketPolicy.bucket,
bucketPrefix: "example",
},
},
},
destinationFlowConfigs: [{
connectorType: "S3",
destinationConnectorProperties: {
s3: {
bucketName: exampleDestinationBucketPolicy.bucket,
s3OutputFormatConfig: {
prefixConfig: {
prefixType: "PATH",
},
},
},
},
}],
tasks: [{
sourceFields: ["exampleField"],
destinationField: "exampleField",
taskType: "Map",
connectorOperators: [{
s3: "NO_OP",
}],
}],
triggerConfig: {
triggerType: "OnDemand",
},
});
import pulumi
import pulumi_aws as aws
example_source_bucket_v2 = aws.s3.BucketV2("example_source", bucket="example-source")
example_source = aws.iam.get_policy_document(statements=[{
"sid": "AllowAppFlowSourceActions",
"effect": "Allow",
"principals": [{
"type": "Service",
"identifiers": ["appflow.amazonaws.com"],
}],
"actions": [
"s3:ListBucket",
"s3:GetObject",
],
"resources": [
"arn:aws:s3:::example-source",
"arn:aws:s3:::example-source/*",
],
}])
example_source_bucket_policy = aws.s3.BucketPolicy("example_source",
bucket=example_source_bucket_v2.id,
policy=example_source.json)
example = aws.s3.BucketObjectv2("example",
bucket=example_source_bucket_v2.id,
key="example_source.csv",
source=pulumi.FileAsset("example_source.csv"))
example_destination_bucket_v2 = aws.s3.BucketV2("example_destination", bucket="example-destination")
example_destination = aws.iam.get_policy_document(statements=[{
"sid": "AllowAppFlowDestinationActions",
"effect": "Allow",
"principals": [{
"type": "Service",
"identifiers": ["appflow.amazonaws.com"],
}],
"actions": [
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads",
"s3:GetBucketAcl",
"s3:PutObjectAcl",
],
"resources": [
"arn:aws:s3:::example-destination",
"arn:aws:s3:::example-destination/*",
],
}])
example_destination_bucket_policy = aws.s3.BucketPolicy("example_destination",
bucket=example_destination_bucket_v2.id,
policy=example_destination.json)
example_flow = aws.appflow.Flow("example",
name="example",
source_flow_config={
"connectorType": "S3",
"sourceConnectorProperties": {
"s3": {
"bucketName": example_source_bucket_policy.bucket,
"bucketPrefix": "example",
},
},
},
destination_flow_configs=[{
"connectorType": "S3",
"destinationConnectorProperties": {
"s3": {
"bucketName": example_destination_bucket_policy.bucket,
"s3OutputFormatConfig": {
"prefixConfig": {
"prefixType": "PATH",
},
},
},
},
}],
tasks=[{
"sourceFields": ["exampleField"],
"destinationField": "exampleField",
"taskType": "Map",
"connectorOperators": [{
"s3": "NO_OP",
}],
}],
trigger_config={
"triggerType": "OnDemand",
})
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/appflow"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/iam"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/s3"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
exampleSourceBucketV2, err := s3.NewBucketV2(ctx, "example_source", &s3.BucketV2Args{
Bucket: pulumi.String("example-source"),
})
if err != nil {
return err
}
exampleSource, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
Statements: []iam.GetPolicyDocumentStatement{
{
Sid: pulumi.StringRef("AllowAppFlowSourceActions"),
Effect: pulumi.StringRef("Allow"),
Principals: []iam.GetPolicyDocumentStatementPrincipal{
{
Type: "Service",
Identifiers: []string{
"appflow.amazonaws.com",
},
},
},
Actions: []string{
"s3:ListBucket",
"s3:GetObject",
},
Resources: []string{
"arn:aws:s3:::example-source",
"arn:aws:s3:::example-source/*",
},
},
},
}, nil)
if err != nil {
return err
}
exampleSourceBucketPolicy, err := s3.NewBucketPolicy(ctx, "example_source", &s3.BucketPolicyArgs{
Bucket: exampleSourceBucketV2.ID(),
Policy: pulumi.String(exampleSource.Json),
})
if err != nil {
return err
}
_, err = s3.NewBucketObjectv2(ctx, "example", &s3.BucketObjectv2Args{
Bucket: exampleSourceBucketV2.ID(),
Key: pulumi.String("example_source.csv"),
Source: pulumi.NewFileAsset("example_source.csv"),
})
if err != nil {
return err
}
exampleDestinationBucketV2, err := s3.NewBucketV2(ctx, "example_destination", &s3.BucketV2Args{
Bucket: pulumi.String("example-destination"),
})
if err != nil {
return err
}
exampleDestination, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
Statements: []iam.GetPolicyDocumentStatement{
{
Sid: pulumi.StringRef("AllowAppFlowDestinationActions"),
Effect: pulumi.StringRef("Allow"),
Principals: []iam.GetPolicyDocumentStatementPrincipal{
{
Type: "Service",
Identifiers: []string{
"appflow.amazonaws.com",
},
},
},
Actions: []string{
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads",
"s3:GetBucketAcl",
"s3:PutObjectAcl",
},
Resources: []string{
"arn:aws:s3:::example-destination",
"arn:aws:s3:::example-destination/*",
},
},
},
}, nil)
if err != nil {
return err
}
exampleDestinationBucketPolicy, err := s3.NewBucketPolicy(ctx, "example_destination", &s3.BucketPolicyArgs{
Bucket: exampleDestinationBucketV2.ID(),
Policy: pulumi.String(exampleDestination.Json),
})
if err != nil {
return err
}
_, err = appflow.NewFlow(ctx, "example", &appflow.FlowArgs{
Name: pulumi.String("example"),
SourceFlowConfig: &appflow.FlowSourceFlowConfigArgs{
ConnectorType: pulumi.String("S3"),
SourceConnectorProperties: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs{
S3: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args{
BucketName: exampleSourceBucketPolicy.Bucket,
BucketPrefix: pulumi.String("example"),
},
},
},
DestinationFlowConfigs: appflow.FlowDestinationFlowConfigArray{
&appflow.FlowDestinationFlowConfigArgs{
ConnectorType: pulumi.String("S3"),
DestinationConnectorProperties: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs{
S3: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args{
BucketName: exampleDestinationBucketPolicy.Bucket,
S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs{
PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs{
PrefixType: pulumi.String("PATH"),
},
},
},
},
},
},
Tasks: appflow.FlowTaskArray{
&appflow.FlowTaskArgs{
SourceFields: pulumi.StringArray{
pulumi.String("exampleField"),
},
DestinationField: pulumi.String("exampleField"),
TaskType: pulumi.String("Map"),
ConnectorOperators: appflow.FlowTaskConnectorOperatorArray{
&appflow.FlowTaskConnectorOperatorArgs{
S3: pulumi.String("NO_OP"),
},
},
},
},
TriggerConfig: &appflow.FlowTriggerConfigArgs{
TriggerType: pulumi.String("OnDemand"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var exampleSourceBucketV2 = new Aws.S3.BucketV2("example_source", new()
{
Bucket = "example-source",
});
var exampleSource = Aws.Iam.GetPolicyDocument.Invoke(new()
{
Statements = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
{
Sid = "AllowAppFlowSourceActions",
Effect = "Allow",
Principals = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
{
Type = "Service",
Identifiers = new[]
{
"appflow.amazonaws.com",
},
},
},
Actions = new[]
{
"s3:ListBucket",
"s3:GetObject",
},
Resources = new[]
{
"arn:aws:s3:::example-source",
"arn:aws:s3:::example-source/*",
},
},
},
});
var exampleSourceBucketPolicy = new Aws.S3.BucketPolicy("example_source", new()
{
Bucket = exampleSourceBucketV2.Id,
Policy = exampleSource.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
});
var example = new Aws.S3.BucketObjectv2("example", new()
{
Bucket = exampleSourceBucketV2.Id,
Key = "example_source.csv",
Source = new FileAsset("example_source.csv"),
});
var exampleDestinationBucketV2 = new Aws.S3.BucketV2("example_destination", new()
{
Bucket = "example-destination",
});
var exampleDestination = Aws.Iam.GetPolicyDocument.Invoke(new()
{
Statements = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
{
Sid = "AllowAppFlowDestinationActions",
Effect = "Allow",
Principals = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
{
Type = "Service",
Identifiers = new[]
{
"appflow.amazonaws.com",
},
},
},
Actions = new[]
{
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads",
"s3:GetBucketAcl",
"s3:PutObjectAcl",
},
Resources = new[]
{
"arn:aws:s3:::example-destination",
"arn:aws:s3:::example-destination/*",
},
},
},
});
var exampleDestinationBucketPolicy = new Aws.S3.BucketPolicy("example_destination", new()
{
Bucket = exampleDestinationBucketV2.Id,
Policy = exampleDestination.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
});
var exampleFlow = new Aws.AppFlow.Flow("example", new()
{
Name = "example",
SourceFlowConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigArgs
{
ConnectorType = "S3",
SourceConnectorProperties = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs
{
S3 = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args
{
BucketName = exampleSourceBucketPolicy.Bucket,
BucketPrefix = "example",
},
},
},
DestinationFlowConfigs = new[]
{
new Aws.AppFlow.Inputs.FlowDestinationFlowConfigArgs
{
ConnectorType = "S3",
DestinationConnectorProperties = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
{
S3 = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
{
BucketName = exampleDestinationBucketPolicy.Bucket,
S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
{
PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
{
PrefixType = "PATH",
},
},
},
},
},
},
Tasks = new[]
{
new Aws.AppFlow.Inputs.FlowTaskArgs
{
SourceFields = new[]
{
"exampleField",
},
DestinationField = "exampleField",
TaskType = "Map",
ConnectorOperators = new[]
{
new Aws.AppFlow.Inputs.FlowTaskConnectorOperatorArgs
{
S3 = "NO_OP",
},
},
},
},
TriggerConfig = new Aws.AppFlow.Inputs.FlowTriggerConfigArgs
{
TriggerType = "OnDemand",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.s3.BucketV2;
import com.pulumi.aws.s3.BucketV2Args;
import com.pulumi.aws.iam.IamFunctions;
import com.pulumi.aws.iam.inputs.GetPolicyDocumentArgs;
import com.pulumi.aws.s3.BucketPolicy;
import com.pulumi.aws.s3.BucketPolicyArgs;
import com.pulumi.aws.s3.BucketObjectv2;
import com.pulumi.aws.s3.BucketObjectv2Args;
import com.pulumi.aws.appflow.Flow;
import com.pulumi.aws.appflow.FlowArgs;
import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs;
import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowTaskArgs;
import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleSourceBucketV2 = new BucketV2("exampleSourceBucketV2", BucketV2Args.builder()
.bucket("example-source")
.build());
final var exampleSource = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
.statements(GetPolicyDocumentStatementArgs.builder()
.sid("AllowAppFlowSourceActions")
.effect("Allow")
.principals(GetPolicyDocumentStatementPrincipalArgs.builder()
.type("Service")
.identifiers("appflow.amazonaws.com")
.build())
.actions(
"s3:ListBucket",
"s3:GetObject")
.resources(
"arn:aws:s3:::example-source",
"arn:aws:s3:::example-source/*")
.build())
.build());
var exampleSourceBucketPolicy = new BucketPolicy("exampleSourceBucketPolicy", BucketPolicyArgs.builder()
.bucket(exampleSourceBucketV2.id())
.policy(exampleSource.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
.build());
var example = new BucketObjectv2("example", BucketObjectv2Args.builder()
.bucket(exampleSourceBucketV2.id())
.key("example_source.csv")
.source(new FileAsset("example_source.csv"))
.build());
var exampleDestinationBucketV2 = new BucketV2("exampleDestinationBucketV2", BucketV2Args.builder()
.bucket("example-destination")
.build());
final var exampleDestination = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
.statements(GetPolicyDocumentStatementArgs.builder()
.sid("AllowAppFlowDestinationActions")
.effect("Allow")
.principals(GetPolicyDocumentStatementPrincipalArgs.builder()
.type("Service")
.identifiers("appflow.amazonaws.com")
.build())
.actions(
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads",
"s3:GetBucketAcl",
"s3:PutObjectAcl")
.resources(
"arn:aws:s3:::example-destination",
"arn:aws:s3:::example-destination/*")
.build())
.build());
var exampleDestinationBucketPolicy = new BucketPolicy("exampleDestinationBucketPolicy", BucketPolicyArgs.builder()
.bucket(exampleDestinationBucketV2.id())
.policy(exampleDestination.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
.build());
var exampleFlow = new Flow("exampleFlow", FlowArgs.builder()
.name("example")
.sourceFlowConfig(FlowSourceFlowConfigArgs.builder()
.connectorType("S3")
.sourceConnectorProperties(FlowSourceFlowConfigSourceConnectorPropertiesArgs.builder()
.s3(FlowSourceFlowConfigSourceConnectorPropertiesS3Args.builder()
.bucketName(exampleSourceBucketPolicy.bucket())
.bucketPrefix("example")
.build())
.build())
.build())
.destinationFlowConfigs(FlowDestinationFlowConfigArgs.builder()
.connectorType("S3")
.destinationConnectorProperties(FlowDestinationFlowConfigDestinationConnectorPropertiesArgs.builder()
.s3(FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args.builder()
.bucketName(exampleDestinationBucketPolicy.bucket())
.s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs.builder()
.prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs.builder()
.prefixType("PATH")
.build())
.build())
.build())
.build())
.build())
.tasks(FlowTaskArgs.builder()
.sourceFields("exampleField")
.destinationField("exampleField")
.taskType("Map")
.connectorOperators(FlowTaskConnectorOperatorArgs.builder()
.s3("NO_OP")
.build())
.build())
.triggerConfig(FlowTriggerConfigArgs.builder()
.triggerType("OnDemand")
.build())
.build());
}
}
resources:
exampleSourceBucketV2:
type: aws:s3:BucketV2
name: example_source
properties:
bucket: example-source
exampleSourceBucketPolicy:
type: aws:s3:BucketPolicy
name: example_source
properties:
bucket: ${exampleSourceBucketV2.id}
policy: ${exampleSource.json}
example:
type: aws:s3:BucketObjectv2
properties:
bucket: ${exampleSourceBucketV2.id}
key: example_source.csv
source:
fn::FileAsset: example_source.csv
exampleDestinationBucketV2:
type: aws:s3:BucketV2
name: example_destination
properties:
bucket: example-destination
exampleDestinationBucketPolicy:
type: aws:s3:BucketPolicy
name: example_destination
properties:
bucket: ${exampleDestinationBucketV2.id}
policy: ${exampleDestination.json}
exampleFlow:
type: aws:appflow:Flow
name: example
properties:
name: example
sourceFlowConfig:
connectorType: S3
sourceConnectorProperties:
s3:
bucketName: ${exampleSourceBucketPolicy.bucket}
bucketPrefix: example
destinationFlowConfigs:
- connectorType: S3
destinationConnectorProperties:
s3:
bucketName: ${exampleDestinationBucketPolicy.bucket}
s3OutputFormatConfig:
prefixConfig:
prefixType: PATH
tasks:
- sourceFields:
- exampleField
destinationField: exampleField
taskType: Map
connectorOperators:
- s3: NO_OP
triggerConfig:
triggerType: OnDemand
variables:
exampleSource:
fn::invoke:
Function: aws:iam:getPolicyDocument
Arguments:
statements:
- sid: AllowAppFlowSourceActions
effect: Allow
principals:
- type: Service
identifiers:
- appflow.amazonaws.com
actions:
- s3:ListBucket
- s3:GetObject
resources:
- arn:aws:s3:::example-source
- arn:aws:s3:::example-source/*
exampleDestination:
fn::invoke:
Function: aws:iam:getPolicyDocument
Arguments:
statements:
- sid: AllowAppFlowDestinationActions
effect: Allow
principals:
- type: Service
identifiers:
- appflow.amazonaws.com
actions:
- s3:PutObject
- s3:AbortMultipartUpload
- s3:ListMultipartUploadParts
- s3:ListBucketMultipartUploads
- s3:GetBucketAcl
- s3:PutObjectAcl
resources:
- arn:aws:s3:::example-destination
- arn:aws:s3:::example-destination/*
Create Flow Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Flow(name: string, args: FlowArgs, opts?: CustomResourceOptions);
@overload
def Flow(resource_name: str,
args: FlowArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Flow(resource_name: str,
opts: Optional[ResourceOptions] = None,
destination_flow_configs: Optional[Sequence[FlowDestinationFlowConfigArgs]] = None,
source_flow_config: Optional[FlowSourceFlowConfigArgs] = None,
tasks: Optional[Sequence[FlowTaskArgs]] = None,
trigger_config: Optional[FlowTriggerConfigArgs] = None,
description: Optional[str] = None,
kms_arn: Optional[str] = None,
name: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None)
func NewFlow(ctx *Context, name string, args FlowArgs, opts ...ResourceOption) (*Flow, error)
public Flow(string name, FlowArgs args, CustomResourceOptions? opts = null)
type: aws:appflow:Flow
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var flowResource = new Aws.AppFlow.Flow("flowResource", new()
{
DestinationFlowConfigs = new[]
{
new Aws.AppFlow.Inputs.FlowDestinationFlowConfigArgs
{
ConnectorType = "string",
DestinationConnectorProperties = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
{
CustomConnector = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs
{
EntityName = "string",
CustomProperties =
{
{ "string", "string" },
},
ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs
{
BucketName = "string",
BucketPrefix = "string",
FailOnFirstDestinationError = false,
},
IdFieldNames = new[]
{
"string",
},
WriteOperationType = "string",
},
CustomerProfiles = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs
{
DomainName = "string",
ObjectTypeName = "string",
},
EventBridge = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs
{
Object = "string",
ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs
{
BucketName = "string",
BucketPrefix = "string",
FailOnFirstDestinationError = false,
},
},
Honeycode = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs
{
Object = "string",
ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs
{
BucketName = "string",
BucketPrefix = "string",
FailOnFirstDestinationError = false,
},
},
LookoutMetrics = null,
Marketo = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs
{
Object = "string",
ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs
{
BucketName = "string",
BucketPrefix = "string",
FailOnFirstDestinationError = false,
},
},
Redshift = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs
{
IntermediateBucketName = "string",
Object = "string",
BucketPrefix = "string",
ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs
{
BucketName = "string",
BucketPrefix = "string",
FailOnFirstDestinationError = false,
},
},
S3 = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
{
BucketName = "string",
BucketPrefix = "string",
S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
{
AggregationConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs
{
AggregationType = "string",
TargetFileSize = 0,
},
FileType = "string",
PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
{
PrefixFormat = "string",
PrefixType = "string",
},
PreserveSourceDataTyping = false,
},
},
Salesforce = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs
{
Object = "string",
ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs
{
BucketName = "string",
BucketPrefix = "string",
FailOnFirstDestinationError = false,
},
IdFieldNames = new[]
{
"string",
},
WriteOperationType = "string",
},
SapoData = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs
{
ObjectPath = "string",
ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs
{
BucketName = "string",
BucketPrefix = "string",
FailOnFirstDestinationError = false,
},
IdFieldNames = new[]
{
"string",
},
SuccessResponseHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs
{
BucketName = "string",
BucketPrefix = "string",
},
WriteOperationType = "string",
},
Snowflake = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs
{
IntermediateBucketName = "string",
Object = "string",
BucketPrefix = "string",
ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs
{
BucketName = "string",
BucketPrefix = "string",
FailOnFirstDestinationError = false,
},
},
Upsolver = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs
{
BucketName = "string",
S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs
{
PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs
{
PrefixType = "string",
PrefixFormat = "string",
},
AggregationConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs
{
AggregationType = "string",
},
FileType = "string",
},
BucketPrefix = "string",
},
Zendesk = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs
{
Object = "string",
ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs
{
BucketName = "string",
BucketPrefix = "string",
FailOnFirstDestinationError = false,
},
IdFieldNames = new[]
{
"string",
},
WriteOperationType = "string",
},
},
ApiVersion = "string",
ConnectorProfileName = "string",
},
},
SourceFlowConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigArgs
{
ConnectorType = "string",
SourceConnectorProperties = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs
{
Amplitude = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs
{
Object = "string",
},
CustomConnector = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs
{
EntityName = "string",
CustomProperties =
{
{ "string", "string" },
},
},
Datadog = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs
{
Object = "string",
},
Dynatrace = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs
{
Object = "string",
},
GoogleAnalytics = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs
{
Object = "string",
},
InforNexus = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs
{
Object = "string",
},
Marketo = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs
{
Object = "string",
},
S3 = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args
{
BucketName = "string",
BucketPrefix = "string",
S3InputFormatConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs
{
S3InputFileType = "string",
},
},
Salesforce = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs
{
Object = "string",
EnableDynamicFieldUpdate = false,
IncludeDeletedRecords = false,
},
SapoData = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs
{
ObjectPath = "string",
},
ServiceNow = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs
{
Object = "string",
},
Singular = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs
{
Object = "string",
},
Slack = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs
{
Object = "string",
},
Trendmicro = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs
{
Object = "string",
},
Veeva = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs
{
Object = "string",
DocumentType = "string",
IncludeAllVersions = false,
IncludeRenditions = false,
IncludeSourceFiles = false,
},
Zendesk = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs
{
Object = "string",
},
},
ApiVersion = "string",
ConnectorProfileName = "string",
IncrementalPullConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigIncrementalPullConfigArgs
{
DatetimeTypeFieldName = "string",
},
},
Tasks = new[]
{
new Aws.AppFlow.Inputs.FlowTaskArgs
{
TaskType = "string",
ConnectorOperators = new[]
{
new Aws.AppFlow.Inputs.FlowTaskConnectorOperatorArgs
{
Amplitude = "string",
CustomConnector = "string",
Datadog = "string",
Dynatrace = "string",
GoogleAnalytics = "string",
InforNexus = "string",
Marketo = "string",
S3 = "string",
Salesforce = "string",
SapoData = "string",
ServiceNow = "string",
Singular = "string",
Slack = "string",
Trendmicro = "string",
Veeva = "string",
Zendesk = "string",
},
},
DestinationField = "string",
SourceFields = new[]
{
"string",
},
TaskProperties =
{
{ "string", "string" },
},
},
},
TriggerConfig = new Aws.AppFlow.Inputs.FlowTriggerConfigArgs
{
TriggerType = "string",
TriggerProperties = new Aws.AppFlow.Inputs.FlowTriggerConfigTriggerPropertiesArgs
{
Scheduled = new Aws.AppFlow.Inputs.FlowTriggerConfigTriggerPropertiesScheduledArgs
{
ScheduleExpression = "string",
DataPullMode = "string",
FirstExecutionFrom = "string",
ScheduleEndTime = "string",
ScheduleOffset = 0,
ScheduleStartTime = "string",
Timezone = "string",
},
},
},
Description = "string",
KmsArn = "string",
Name = "string",
Tags =
{
{ "string", "string" },
},
});
example, err := appflow.NewFlow(ctx, "flowResource", &appflow.FlowArgs{
DestinationFlowConfigs: appflow.FlowDestinationFlowConfigArray{
&appflow.FlowDestinationFlowConfigArgs{
ConnectorType: pulumi.String("string"),
DestinationConnectorProperties: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs{
CustomConnector: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs{
EntityName: pulumi.String("string"),
CustomProperties: pulumi.StringMap{
"string": pulumi.String("string"),
},
ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
FailOnFirstDestinationError: pulumi.Bool(false),
},
IdFieldNames: pulumi.StringArray{
pulumi.String("string"),
},
WriteOperationType: pulumi.String("string"),
},
CustomerProfiles: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs{
DomainName: pulumi.String("string"),
ObjectTypeName: pulumi.String("string"),
},
EventBridge: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs{
Object: pulumi.String("string"),
ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
FailOnFirstDestinationError: pulumi.Bool(false),
},
},
Honeycode: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs{
Object: pulumi.String("string"),
ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
FailOnFirstDestinationError: pulumi.Bool(false),
},
},
LookoutMetrics: nil,
Marketo: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs{
Object: pulumi.String("string"),
ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
FailOnFirstDestinationError: pulumi.Bool(false),
},
},
Redshift: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs{
IntermediateBucketName: pulumi.String("string"),
Object: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
FailOnFirstDestinationError: pulumi.Bool(false),
},
},
S3: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs{
AggregationConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs{
AggregationType: pulumi.String("string"),
TargetFileSize: pulumi.Int(0),
},
FileType: pulumi.String("string"),
PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs{
PrefixFormat: pulumi.String("string"),
PrefixType: pulumi.String("string"),
},
PreserveSourceDataTyping: pulumi.Bool(false),
},
},
Salesforce: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs{
Object: pulumi.String("string"),
ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
FailOnFirstDestinationError: pulumi.Bool(false),
},
IdFieldNames: pulumi.StringArray{
pulumi.String("string"),
},
WriteOperationType: pulumi.String("string"),
},
SapoData: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs{
ObjectPath: pulumi.String("string"),
ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
FailOnFirstDestinationError: pulumi.Bool(false),
},
IdFieldNames: pulumi.StringArray{
pulumi.String("string"),
},
SuccessResponseHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
},
WriteOperationType: pulumi.String("string"),
},
Snowflake: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs{
IntermediateBucketName: pulumi.String("string"),
Object: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
FailOnFirstDestinationError: pulumi.Bool(false),
},
},
Upsolver: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs{
BucketName: pulumi.String("string"),
S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs{
PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs{
PrefixType: pulumi.String("string"),
PrefixFormat: pulumi.String("string"),
},
AggregationConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs{
AggregationType: pulumi.String("string"),
},
FileType: pulumi.String("string"),
},
BucketPrefix: pulumi.String("string"),
},
Zendesk: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs{
Object: pulumi.String("string"),
ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
FailOnFirstDestinationError: pulumi.Bool(false),
},
IdFieldNames: pulumi.StringArray{
pulumi.String("string"),
},
WriteOperationType: pulumi.String("string"),
},
},
ApiVersion: pulumi.String("string"),
ConnectorProfileName: pulumi.String("string"),
},
},
SourceFlowConfig: &appflow.FlowSourceFlowConfigArgs{
ConnectorType: pulumi.String("string"),
SourceConnectorProperties: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs{
Amplitude: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs{
Object: pulumi.String("string"),
},
CustomConnector: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs{
EntityName: pulumi.String("string"),
CustomProperties: pulumi.StringMap{
"string": pulumi.String("string"),
},
},
Datadog: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs{
Object: pulumi.String("string"),
},
Dynatrace: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs{
Object: pulumi.String("string"),
},
GoogleAnalytics: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs{
Object: pulumi.String("string"),
},
InforNexus: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs{
Object: pulumi.String("string"),
},
Marketo: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs{
Object: pulumi.String("string"),
},
S3: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args{
BucketName: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
S3InputFormatConfig: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs{
S3InputFileType: pulumi.String("string"),
},
},
Salesforce: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs{
Object: pulumi.String("string"),
EnableDynamicFieldUpdate: pulumi.Bool(false),
IncludeDeletedRecords: pulumi.Bool(false),
},
SapoData: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs{
ObjectPath: pulumi.String("string"),
},
ServiceNow: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs{
Object: pulumi.String("string"),
},
Singular: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs{
Object: pulumi.String("string"),
},
Slack: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs{
Object: pulumi.String("string"),
},
Trendmicro: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs{
Object: pulumi.String("string"),
},
Veeva: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs{
Object: pulumi.String("string"),
DocumentType: pulumi.String("string"),
IncludeAllVersions: pulumi.Bool(false),
IncludeRenditions: pulumi.Bool(false),
IncludeSourceFiles: pulumi.Bool(false),
},
Zendesk: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs{
Object: pulumi.String("string"),
},
},
ApiVersion: pulumi.String("string"),
ConnectorProfileName: pulumi.String("string"),
IncrementalPullConfig: &appflow.FlowSourceFlowConfigIncrementalPullConfigArgs{
DatetimeTypeFieldName: pulumi.String("string"),
},
},
Tasks: appflow.FlowTaskArray{
&appflow.FlowTaskArgs{
TaskType: pulumi.String("string"),
ConnectorOperators: appflow.FlowTaskConnectorOperatorArray{
&appflow.FlowTaskConnectorOperatorArgs{
Amplitude: pulumi.String("string"),
CustomConnector: pulumi.String("string"),
Datadog: pulumi.String("string"),
Dynatrace: pulumi.String("string"),
GoogleAnalytics: pulumi.String("string"),
InforNexus: pulumi.String("string"),
Marketo: pulumi.String("string"),
S3: pulumi.String("string"),
Salesforce: pulumi.String("string"),
SapoData: pulumi.String("string"),
ServiceNow: pulumi.String("string"),
Singular: pulumi.String("string"),
Slack: pulumi.String("string"),
Trendmicro: pulumi.String("string"),
Veeva: pulumi.String("string"),
Zendesk: pulumi.String("string"),
},
},
DestinationField: pulumi.String("string"),
SourceFields: pulumi.StringArray{
pulumi.String("string"),
},
TaskProperties: pulumi.StringMap{
"string": pulumi.String("string"),
},
},
},
TriggerConfig: &appflow.FlowTriggerConfigArgs{
TriggerType: pulumi.String("string"),
TriggerProperties: &appflow.FlowTriggerConfigTriggerPropertiesArgs{
Scheduled: &appflow.FlowTriggerConfigTriggerPropertiesScheduledArgs{
ScheduleExpression: pulumi.String("string"),
DataPullMode: pulumi.String("string"),
FirstExecutionFrom: pulumi.String("string"),
ScheduleEndTime: pulumi.String("string"),
ScheduleOffset: pulumi.Int(0),
ScheduleStartTime: pulumi.String("string"),
Timezone: pulumi.String("string"),
},
},
},
Description: pulumi.String("string"),
KmsArn: pulumi.String("string"),
Name: pulumi.String("string"),
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
})
var flowResource = new Flow("flowResource", FlowArgs.builder()
.destinationFlowConfigs(FlowDestinationFlowConfigArgs.builder()
.connectorType("string")
.destinationConnectorProperties(FlowDestinationFlowConfigDestinationConnectorPropertiesArgs.builder()
.customConnector(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs.builder()
.entityName("string")
.customProperties(Map.of("string", "string"))
.errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs.builder()
.bucketName("string")
.bucketPrefix("string")
.failOnFirstDestinationError(false)
.build())
.idFieldNames("string")
.writeOperationType("string")
.build())
.customerProfiles(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs.builder()
.domainName("string")
.objectTypeName("string")
.build())
.eventBridge(FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs.builder()
.object("string")
.errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs.builder()
.bucketName("string")
.bucketPrefix("string")
.failOnFirstDestinationError(false)
.build())
.build())
.honeycode(FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs.builder()
.object("string")
.errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs.builder()
.bucketName("string")
.bucketPrefix("string")
.failOnFirstDestinationError(false)
.build())
.build())
.lookoutMetrics()
.marketo(FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs.builder()
.object("string")
.errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs.builder()
.bucketName("string")
.bucketPrefix("string")
.failOnFirstDestinationError(false)
.build())
.build())
.redshift(FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs.builder()
.intermediateBucketName("string")
.object("string")
.bucketPrefix("string")
.errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs.builder()
.bucketName("string")
.bucketPrefix("string")
.failOnFirstDestinationError(false)
.build())
.build())
.s3(FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args.builder()
.bucketName("string")
.bucketPrefix("string")
.s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs.builder()
.aggregationConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs.builder()
.aggregationType("string")
.targetFileSize(0)
.build())
.fileType("string")
.prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs.builder()
.prefixFormat("string")
.prefixType("string")
.build())
.preserveSourceDataTyping(false)
.build())
.build())
.salesforce(FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs.builder()
.object("string")
.errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs.builder()
.bucketName("string")
.bucketPrefix("string")
.failOnFirstDestinationError(false)
.build())
.idFieldNames("string")
.writeOperationType("string")
.build())
.sapoData(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs.builder()
.objectPath("string")
.errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs.builder()
.bucketName("string")
.bucketPrefix("string")
.failOnFirstDestinationError(false)
.build())
.idFieldNames("string")
.successResponseHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs.builder()
.bucketName("string")
.bucketPrefix("string")
.build())
.writeOperationType("string")
.build())
.snowflake(FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs.builder()
.intermediateBucketName("string")
.object("string")
.bucketPrefix("string")
.errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs.builder()
.bucketName("string")
.bucketPrefix("string")
.failOnFirstDestinationError(false)
.build())
.build())
.upsolver(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs.builder()
.bucketName("string")
.s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs.builder()
.prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs.builder()
.prefixType("string")
.prefixFormat("string")
.build())
.aggregationConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs.builder()
.aggregationType("string")
.build())
.fileType("string")
.build())
.bucketPrefix("string")
.build())
.zendesk(FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs.builder()
.object("string")
.errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs.builder()
.bucketName("string")
.bucketPrefix("string")
.failOnFirstDestinationError(false)
.build())
.idFieldNames("string")
.writeOperationType("string")
.build())
.build())
.apiVersion("string")
.connectorProfileName("string")
.build())
.sourceFlowConfig(FlowSourceFlowConfigArgs.builder()
.connectorType("string")
.sourceConnectorProperties(FlowSourceFlowConfigSourceConnectorPropertiesArgs.builder()
.amplitude(FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs.builder()
.object("string")
.build())
.customConnector(FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs.builder()
.entityName("string")
.customProperties(Map.of("string", "string"))
.build())
.datadog(FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs.builder()
.object("string")
.build())
.dynatrace(FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs.builder()
.object("string")
.build())
.googleAnalytics(FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs.builder()
.object("string")
.build())
.inforNexus(FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs.builder()
.object("string")
.build())
.marketo(FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs.builder()
.object("string")
.build())
.s3(FlowSourceFlowConfigSourceConnectorPropertiesS3Args.builder()
.bucketName("string")
.bucketPrefix("string")
.s3InputFormatConfig(FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs.builder()
.s3InputFileType("string")
.build())
.build())
.salesforce(FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs.builder()
.object("string")
.enableDynamicFieldUpdate(false)
.includeDeletedRecords(false)
.build())
.sapoData(FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs.builder()
.objectPath("string")
.build())
.serviceNow(FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs.builder()
.object("string")
.build())
.singular(FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs.builder()
.object("string")
.build())
.slack(FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs.builder()
.object("string")
.build())
.trendmicro(FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs.builder()
.object("string")
.build())
.veeva(FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs.builder()
.object("string")
.documentType("string")
.includeAllVersions(false)
.includeRenditions(false)
.includeSourceFiles(false)
.build())
.zendesk(FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs.builder()
.object("string")
.build())
.build())
.apiVersion("string")
.connectorProfileName("string")
.incrementalPullConfig(FlowSourceFlowConfigIncrementalPullConfigArgs.builder()
.datetimeTypeFieldName("string")
.build())
.build())
.tasks(FlowTaskArgs.builder()
.taskType("string")
.connectorOperators(FlowTaskConnectorOperatorArgs.builder()
.amplitude("string")
.customConnector("string")
.datadog("string")
.dynatrace("string")
.googleAnalytics("string")
.inforNexus("string")
.marketo("string")
.s3("string")
.salesforce("string")
.sapoData("string")
.serviceNow("string")
.singular("string")
.slack("string")
.trendmicro("string")
.veeva("string")
.zendesk("string")
.build())
.destinationField("string")
.sourceFields("string")
.taskProperties(Map.of("string", "string"))
.build())
.triggerConfig(FlowTriggerConfigArgs.builder()
.triggerType("string")
.triggerProperties(FlowTriggerConfigTriggerPropertiesArgs.builder()
.scheduled(FlowTriggerConfigTriggerPropertiesScheduledArgs.builder()
.scheduleExpression("string")
.dataPullMode("string")
.firstExecutionFrom("string")
.scheduleEndTime("string")
.scheduleOffset(0)
.scheduleStartTime("string")
.timezone("string")
.build())
.build())
.build())
.description("string")
.kmsArn("string")
.name("string")
.tags(Map.of("string", "string"))
.build());
flow_resource = aws.appflow.Flow("flowResource",
destination_flow_configs=[{
"connectorType": "string",
"destinationConnectorProperties": {
"customConnector": {
"entityName": "string",
"customProperties": {
"string": "string",
},
"errorHandlingConfig": {
"bucketName": "string",
"bucketPrefix": "string",
"failOnFirstDestinationError": False,
},
"idFieldNames": ["string"],
"writeOperationType": "string",
},
"customerProfiles": {
"domainName": "string",
"objectTypeName": "string",
},
"eventBridge": {
"object": "string",
"errorHandlingConfig": {
"bucketName": "string",
"bucketPrefix": "string",
"failOnFirstDestinationError": False,
},
},
"honeycode": {
"object": "string",
"errorHandlingConfig": {
"bucketName": "string",
"bucketPrefix": "string",
"failOnFirstDestinationError": False,
},
},
"lookoutMetrics": {},
"marketo": {
"object": "string",
"errorHandlingConfig": {
"bucketName": "string",
"bucketPrefix": "string",
"failOnFirstDestinationError": False,
},
},
"redshift": {
"intermediateBucketName": "string",
"object": "string",
"bucketPrefix": "string",
"errorHandlingConfig": {
"bucketName": "string",
"bucketPrefix": "string",
"failOnFirstDestinationError": False,
},
},
"s3": {
"bucketName": "string",
"bucketPrefix": "string",
"s3OutputFormatConfig": {
"aggregationConfig": {
"aggregationType": "string",
"targetFileSize": 0,
},
"fileType": "string",
"prefixConfig": {
"prefixFormat": "string",
"prefixType": "string",
},
"preserveSourceDataTyping": False,
},
},
"salesforce": {
"object": "string",
"errorHandlingConfig": {
"bucketName": "string",
"bucketPrefix": "string",
"failOnFirstDestinationError": False,
},
"idFieldNames": ["string"],
"writeOperationType": "string",
},
"sapoData": {
"objectPath": "string",
"errorHandlingConfig": {
"bucketName": "string",
"bucketPrefix": "string",
"failOnFirstDestinationError": False,
},
"idFieldNames": ["string"],
"successResponseHandlingConfig": {
"bucketName": "string",
"bucketPrefix": "string",
},
"writeOperationType": "string",
},
"snowflake": {
"intermediateBucketName": "string",
"object": "string",
"bucketPrefix": "string",
"errorHandlingConfig": {
"bucketName": "string",
"bucketPrefix": "string",
"failOnFirstDestinationError": False,
},
},
"upsolver": {
"bucketName": "string",
"s3OutputFormatConfig": {
"prefixConfig": {
"prefixType": "string",
"prefixFormat": "string",
},
"aggregationConfig": {
"aggregationType": "string",
},
"fileType": "string",
},
"bucketPrefix": "string",
},
"zendesk": {
"object": "string",
"errorHandlingConfig": {
"bucketName": "string",
"bucketPrefix": "string",
"failOnFirstDestinationError": False,
},
"idFieldNames": ["string"],
"writeOperationType": "string",
},
},
"apiVersion": "string",
"connectorProfileName": "string",
}],
source_flow_config={
"connectorType": "string",
"sourceConnectorProperties": {
"amplitude": {
"object": "string",
},
"customConnector": {
"entityName": "string",
"customProperties": {
"string": "string",
},
},
"datadog": {
"object": "string",
},
"dynatrace": {
"object": "string",
},
"googleAnalytics": {
"object": "string",
},
"inforNexus": {
"object": "string",
},
"marketo": {
"object": "string",
},
"s3": {
"bucketName": "string",
"bucketPrefix": "string",
"s3InputFormatConfig": {
"s3InputFileType": "string",
},
},
"salesforce": {
"object": "string",
"enableDynamicFieldUpdate": False,
"includeDeletedRecords": False,
},
"sapoData": {
"objectPath": "string",
},
"serviceNow": {
"object": "string",
},
"singular": {
"object": "string",
},
"slack": {
"object": "string",
},
"trendmicro": {
"object": "string",
},
"veeva": {
"object": "string",
"documentType": "string",
"includeAllVersions": False,
"includeRenditions": False,
"includeSourceFiles": False,
},
"zendesk": {
"object": "string",
},
},
"apiVersion": "string",
"connectorProfileName": "string",
"incrementalPullConfig": {
"datetimeTypeFieldName": "string",
},
},
tasks=[{
"taskType": "string",
"connectorOperators": [{
"amplitude": "string",
"customConnector": "string",
"datadog": "string",
"dynatrace": "string",
"googleAnalytics": "string",
"inforNexus": "string",
"marketo": "string",
"s3": "string",
"salesforce": "string",
"sapoData": "string",
"serviceNow": "string",
"singular": "string",
"slack": "string",
"trendmicro": "string",
"veeva": "string",
"zendesk": "string",
}],
"destinationField": "string",
"sourceFields": ["string"],
"taskProperties": {
"string": "string",
},
}],
trigger_config={
"triggerType": "string",
"triggerProperties": {
"scheduled": {
"scheduleExpression": "string",
"dataPullMode": "string",
"firstExecutionFrom": "string",
"scheduleEndTime": "string",
"scheduleOffset": 0,
"scheduleStartTime": "string",
"timezone": "string",
},
},
},
description="string",
kms_arn="string",
name="string",
tags={
"string": "string",
})
const flowResource = new aws.appflow.Flow("flowResource", {
destinationFlowConfigs: [{
connectorType: "string",
destinationConnectorProperties: {
customConnector: {
entityName: "string",
customProperties: {
string: "string",
},
errorHandlingConfig: {
bucketName: "string",
bucketPrefix: "string",
failOnFirstDestinationError: false,
},
idFieldNames: ["string"],
writeOperationType: "string",
},
customerProfiles: {
domainName: "string",
objectTypeName: "string",
},
eventBridge: {
object: "string",
errorHandlingConfig: {
bucketName: "string",
bucketPrefix: "string",
failOnFirstDestinationError: false,
},
},
honeycode: {
object: "string",
errorHandlingConfig: {
bucketName: "string",
bucketPrefix: "string",
failOnFirstDestinationError: false,
},
},
lookoutMetrics: {},
marketo: {
object: "string",
errorHandlingConfig: {
bucketName: "string",
bucketPrefix: "string",
failOnFirstDestinationError: false,
},
},
redshift: {
intermediateBucketName: "string",
object: "string",
bucketPrefix: "string",
errorHandlingConfig: {
bucketName: "string",
bucketPrefix: "string",
failOnFirstDestinationError: false,
},
},
s3: {
bucketName: "string",
bucketPrefix: "string",
s3OutputFormatConfig: {
aggregationConfig: {
aggregationType: "string",
targetFileSize: 0,
},
fileType: "string",
prefixConfig: {
prefixFormat: "string",
prefixType: "string",
},
preserveSourceDataTyping: false,
},
},
salesforce: {
object: "string",
errorHandlingConfig: {
bucketName: "string",
bucketPrefix: "string",
failOnFirstDestinationError: false,
},
idFieldNames: ["string"],
writeOperationType: "string",
},
sapoData: {
objectPath: "string",
errorHandlingConfig: {
bucketName: "string",
bucketPrefix: "string",
failOnFirstDestinationError: false,
},
idFieldNames: ["string"],
successResponseHandlingConfig: {
bucketName: "string",
bucketPrefix: "string",
},
writeOperationType: "string",
},
snowflake: {
intermediateBucketName: "string",
object: "string",
bucketPrefix: "string",
errorHandlingConfig: {
bucketName: "string",
bucketPrefix: "string",
failOnFirstDestinationError: false,
},
},
upsolver: {
bucketName: "string",
s3OutputFormatConfig: {
prefixConfig: {
prefixType: "string",
prefixFormat: "string",
},
aggregationConfig: {
aggregationType: "string",
},
fileType: "string",
},
bucketPrefix: "string",
},
zendesk: {
object: "string",
errorHandlingConfig: {
bucketName: "string",
bucketPrefix: "string",
failOnFirstDestinationError: false,
},
idFieldNames: ["string"],
writeOperationType: "string",
},
},
apiVersion: "string",
connectorProfileName: "string",
}],
sourceFlowConfig: {
connectorType: "string",
sourceConnectorProperties: {
amplitude: {
object: "string",
},
customConnector: {
entityName: "string",
customProperties: {
string: "string",
},
},
datadog: {
object: "string",
},
dynatrace: {
object: "string",
},
googleAnalytics: {
object: "string",
},
inforNexus: {
object: "string",
},
marketo: {
object: "string",
},
s3: {
bucketName: "string",
bucketPrefix: "string",
s3InputFormatConfig: {
s3InputFileType: "string",
},
},
salesforce: {
object: "string",
enableDynamicFieldUpdate: false,
includeDeletedRecords: false,
},
sapoData: {
objectPath: "string",
},
serviceNow: {
object: "string",
},
singular: {
object: "string",
},
slack: {
object: "string",
},
trendmicro: {
object: "string",
},
veeva: {
object: "string",
documentType: "string",
includeAllVersions: false,
includeRenditions: false,
includeSourceFiles: false,
},
zendesk: {
object: "string",
},
},
apiVersion: "string",
connectorProfileName: "string",
incrementalPullConfig: {
datetimeTypeFieldName: "string",
},
},
tasks: [{
taskType: "string",
connectorOperators: [{
amplitude: "string",
customConnector: "string",
datadog: "string",
dynatrace: "string",
googleAnalytics: "string",
inforNexus: "string",
marketo: "string",
s3: "string",
salesforce: "string",
sapoData: "string",
serviceNow: "string",
singular: "string",
slack: "string",
trendmicro: "string",
veeva: "string",
zendesk: "string",
}],
destinationField: "string",
sourceFields: ["string"],
taskProperties: {
string: "string",
},
}],
triggerConfig: {
triggerType: "string",
triggerProperties: {
scheduled: {
scheduleExpression: "string",
dataPullMode: "string",
firstExecutionFrom: "string",
scheduleEndTime: "string",
scheduleOffset: 0,
scheduleStartTime: "string",
timezone: "string",
},
},
},
description: "string",
kmsArn: "string",
name: "string",
tags: {
string: "string",
},
});
type: aws:appflow:Flow
properties:
description: string
destinationFlowConfigs:
- apiVersion: string
connectorProfileName: string
connectorType: string
destinationConnectorProperties:
customConnector:
customProperties:
string: string
entityName: string
errorHandlingConfig:
bucketName: string
bucketPrefix: string
failOnFirstDestinationError: false
idFieldNames:
- string
writeOperationType: string
customerProfiles:
domainName: string
objectTypeName: string
eventBridge:
errorHandlingConfig:
bucketName: string
bucketPrefix: string
failOnFirstDestinationError: false
object: string
honeycode:
errorHandlingConfig:
bucketName: string
bucketPrefix: string
failOnFirstDestinationError: false
object: string
lookoutMetrics: {}
marketo:
errorHandlingConfig:
bucketName: string
bucketPrefix: string
failOnFirstDestinationError: false
object: string
redshift:
bucketPrefix: string
errorHandlingConfig:
bucketName: string
bucketPrefix: string
failOnFirstDestinationError: false
intermediateBucketName: string
object: string
s3:
bucketName: string
bucketPrefix: string
s3OutputFormatConfig:
aggregationConfig:
aggregationType: string
targetFileSize: 0
fileType: string
prefixConfig:
prefixFormat: string
prefixType: string
preserveSourceDataTyping: false
salesforce:
errorHandlingConfig:
bucketName: string
bucketPrefix: string
failOnFirstDestinationError: false
idFieldNames:
- string
object: string
writeOperationType: string
sapoData:
errorHandlingConfig:
bucketName: string
bucketPrefix: string
failOnFirstDestinationError: false
idFieldNames:
- string
objectPath: string
successResponseHandlingConfig:
bucketName: string
bucketPrefix: string
writeOperationType: string
snowflake:
bucketPrefix: string
errorHandlingConfig:
bucketName: string
bucketPrefix: string
failOnFirstDestinationError: false
intermediateBucketName: string
object: string
upsolver:
bucketName: string
bucketPrefix: string
s3OutputFormatConfig:
aggregationConfig:
aggregationType: string
fileType: string
prefixConfig:
prefixFormat: string
prefixType: string
zendesk:
errorHandlingConfig:
bucketName: string
bucketPrefix: string
failOnFirstDestinationError: false
idFieldNames:
- string
object: string
writeOperationType: string
kmsArn: string
name: string
sourceFlowConfig:
apiVersion: string
connectorProfileName: string
connectorType: string
incrementalPullConfig:
datetimeTypeFieldName: string
sourceConnectorProperties:
amplitude:
object: string
customConnector:
customProperties:
string: string
entityName: string
datadog:
object: string
dynatrace:
object: string
googleAnalytics:
object: string
inforNexus:
object: string
marketo:
object: string
s3:
bucketName: string
bucketPrefix: string
s3InputFormatConfig:
s3InputFileType: string
salesforce:
enableDynamicFieldUpdate: false
includeDeletedRecords: false
object: string
sapoData:
objectPath: string
serviceNow:
object: string
singular:
object: string
slack:
object: string
trendmicro:
object: string
veeva:
documentType: string
includeAllVersions: false
includeRenditions: false
includeSourceFiles: false
object: string
zendesk:
object: string
tags:
string: string
tasks:
- connectorOperators:
- amplitude: string
customConnector: string
datadog: string
dynatrace: string
googleAnalytics: string
inforNexus: string
marketo: string
s3: string
salesforce: string
sapoData: string
serviceNow: string
singular: string
slack: string
trendmicro: string
veeva: string
zendesk: string
destinationField: string
sourceFields:
- string
taskProperties:
string: string
taskType: string
triggerConfig:
triggerProperties:
scheduled:
dataPullMode: string
firstExecutionFrom: string
scheduleEndTime: string
scheduleExpression: string
scheduleOffset: 0
scheduleStartTime: string
timezone: string
triggerType: string
Flow Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Flow resource accepts the following input properties:
- Destination
Flow List<FlowConfigs Destination Flow Config> - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- Source
Flow FlowConfig Source Flow Config - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Tasks
List<Flow
Task> - A Task that Amazon AppFlow performs while transferring the data in the flow run.
- Trigger
Config FlowTrigger Config - A Trigger that determine how and when the flow runs.
- Description string
- Description of the flow you want to create.
- Kms
Arn string - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- Name string
- Name of the flow.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- Destination
Flow []FlowConfigs Destination Flow Config Args - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- Source
Flow FlowConfig Source Flow Config Args - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Tasks
[]Flow
Task Args - A Task that Amazon AppFlow performs while transferring the data in the flow run.
- Trigger
Config FlowTrigger Config Args - A Trigger that determine how and when the flow runs.
- Description string
- Description of the flow you want to create.
- Kms
Arn string - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- Name string
- Name of the flow.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- destination
Flow List<FlowConfigs Destination Flow Config> - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- source
Flow FlowConfig Source Flow Config - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks
List<Flow
Task> - A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config FlowTrigger Config - A Trigger that determine how and when the flow runs.
- description String
- Description of the flow you want to create.
- kms
Arn String - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name String
- Name of the flow.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- destination
Flow FlowConfigs Destination Flow Config[] - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- source
Flow FlowConfig Source Flow Config - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks
Flow
Task[] - A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config FlowTrigger Config - A Trigger that determine how and when the flow runs.
- description string
- Description of the flow you want to create.
- kms
Arn string - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name string
- Name of the flow.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- destination_
flow_ Sequence[Flowconfigs Destination Flow Config Args] - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- source_
flow_ Flowconfig Source Flow Config Args - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks
Sequence[Flow
Task Args] - A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger_
config FlowTrigger Config Args - A Trigger that determine how and when the flow runs.
- description str
- Description of the flow you want to create.
- kms_
arn str - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name str
- Name of the flow.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- destination
Flow List<Property Map>Configs - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- source
Flow Property MapConfig - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks List<Property Map>
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config Property Map - A Trigger that determine how and when the flow runs.
- description String
- Description of the flow you want to create.
- kms
Arn String - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name String
- Name of the flow.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
Outputs
All input properties are implicitly available as output properties. Additionally, the Flow resource produces the following output properties:
- Arn string
- Flow's ARN.
- Flow
Status string - The current status of the flow.
- Id string
- The provider-assigned unique ID for this managed resource.
- Dictionary<string, string>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.
- Arn string
- Flow's ARN.
- Flow
Status string - The current status of the flow.
- Id string
- The provider-assigned unique ID for this managed resource.
- map[string]string
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.
- arn String
- Flow's ARN.
- flow
Status String - The current status of the flow.
- id String
- The provider-assigned unique ID for this managed resource.
- Map<String,String>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.
- arn string
- Flow's ARN.
- flow
Status string - The current status of the flow.
- id string
- The provider-assigned unique ID for this managed resource.
- {[key: string]: string}
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.
- arn str
- Flow's ARN.
- flow_
status str - The current status of the flow.
- id str
- The provider-assigned unique ID for this managed resource.
- Mapping[str, str]
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.
- arn String
- Flow's ARN.
- flow
Status String - The current status of the flow.
- id String
- The provider-assigned unique ID for this managed resource.
- Map<String>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.
Look up Existing Flow Resource
Get an existing Flow resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: FlowState, opts?: CustomResourceOptions): Flow
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
arn: Optional[str] = None,
description: Optional[str] = None,
destination_flow_configs: Optional[Sequence[FlowDestinationFlowConfigArgs]] = None,
flow_status: Optional[str] = None,
kms_arn: Optional[str] = None,
name: Optional[str] = None,
source_flow_config: Optional[FlowSourceFlowConfigArgs] = None,
tags: Optional[Mapping[str, str]] = None,
tags_all: Optional[Mapping[str, str]] = None,
tasks: Optional[Sequence[FlowTaskArgs]] = None,
trigger_config: Optional[FlowTriggerConfigArgs] = None) -> Flow
func GetFlow(ctx *Context, name string, id IDInput, state *FlowState, opts ...ResourceOption) (*Flow, error)
public static Flow Get(string name, Input<string> id, FlowState? state, CustomResourceOptions? opts = null)
public static Flow get(String name, Output<String> id, FlowState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Arn string
- Flow's ARN.
- Description string
- Description of the flow you want to create.
- Destination
Flow List<FlowConfigs Destination Flow Config> - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- Flow
Status string - The current status of the flow.
- Kms
Arn string - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- Name string
- Name of the flow.
- Source
Flow FlowConfig Source Flow Config - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Dictionary<string, string>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - Tasks
List<Flow
Task> - A Task that Amazon AppFlow performs while transferring the data in the flow run.
- Trigger
Config FlowTrigger Config - A Trigger that determine how and when the flow runs.
- Arn string
- Flow's ARN.
- Description string
- Description of the flow you want to create.
- Destination
Flow []FlowConfigs Destination Flow Config Args - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- Flow
Status string - The current status of the flow.
- Kms
Arn string - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- Name string
- Name of the flow.
- Source
Flow FlowConfig Source Flow Config Args - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - map[string]string
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - Tasks
[]Flow
Task Args - A Task that Amazon AppFlow performs while transferring the data in the flow run.
- Trigger
Config FlowTrigger Config Args - A Trigger that determine how and when the flow runs.
- arn String
- Flow's ARN.
- description String
- Description of the flow you want to create.
- destination
Flow List<FlowConfigs Destination Flow Config> - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- flow
Status String - The current status of the flow.
- kms
Arn String - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name String
- Name of the flow.
- source
Flow FlowConfig Source Flow Config - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Map<String,String>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - tasks
List<Flow
Task> - A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config FlowTrigger Config - A Trigger that determine how and when the flow runs.
- arn string
- Flow's ARN.
- description string
- Description of the flow you want to create.
- destination
Flow FlowConfigs Destination Flow Config[] - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- flow
Status string - The current status of the flow.
- kms
Arn string - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name string
- Name of the flow.
- source
Flow FlowConfig Source Flow Config - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - {[key: string]: string}
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - tasks
Flow
Task[] - A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config FlowTrigger Config - A Trigger that determine how and when the flow runs.
- arn str
- Flow's ARN.
- description str
- Description of the flow you want to create.
- destination_
flow_ Sequence[Flowconfigs Destination Flow Config Args] - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- flow_
status str - The current status of the flow.
- kms_
arn str - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name str
- Name of the flow.
- source_
flow_ Flowconfig Source Flow Config Args - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Mapping[str, str]
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - tasks
Sequence[Flow
Task Args] - A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger_
config FlowTrigger Config Args - A Trigger that determine how and when the flow runs.
- arn String
- Flow's ARN.
- description String
- Description of the flow you want to create.
- destination
Flow List<Property Map>Configs - A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- flow
Status String - The current status of the flow.
- kms
Arn String - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name String
- Name of the flow.
- source
Flow Property MapConfig - The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Map<String>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - tasks List<Property Map>
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config Property Map - A Trigger that determine how and when the flow runs.
Supporting Types
FlowDestinationFlowConfig, FlowDestinationFlowConfigArgs
- Connector
Type string - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - Destination
Connector FlowProperties Destination Flow Config Destination Connector Properties - This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- Api
Version string - API version that the destination connector uses.
- Connector
Profile stringName - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- Connector
Type string - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - Destination
Connector FlowProperties Destination Flow Config Destination Connector Properties - This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- Api
Version string - API version that the destination connector uses.
- Connector
Profile stringName - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connector
Type String - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - destination
Connector FlowProperties Destination Flow Config Destination Connector Properties - This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- api
Version String - API version that the destination connector uses.
- connector
Profile StringName - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connector
Type string - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - destination
Connector FlowProperties Destination Flow Config Destination Connector Properties - This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- api
Version string - API version that the destination connector uses.
- connector
Profile stringName - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connector_
type str - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - destination_
connector_ Flowproperties Destination Flow Config Destination Connector Properties - This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- api_
version str - API version that the destination connector uses.
- connector_
profile_ strname - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connector
Type String - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - destination
Connector Property MapProperties - This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- api
Version String - API version that the destination connector uses.
- connector
Profile StringName - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
FlowDestinationFlowConfigDestinationConnectorProperties, FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
- Custom
Connector FlowDestination Flow Config Destination Connector Properties Custom Connector - Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- Customer
Profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles - Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- Event
Bridge FlowDestination Flow Config Destination Connector Properties Event Bridge - Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- Honeycode
Flow
Destination Flow Config Destination Connector Properties Honeycode - Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- Lookout
Metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics - Marketo
Flow
Destination Flow Config Destination Connector Properties Marketo - Properties that are required to query Marketo. See Generic Destination Properties for more details.
- Redshift
Flow
Destination Flow Config Destination Connector Properties Redshift - Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- S3
Flow
Destination Flow Config Destination Connector Properties S3 - Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- Salesforce
Flow
Destination Flow Config Destination Connector Properties Salesforce - Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- Sapo
Data FlowDestination Flow Config Destination Connector Properties Sapo Data - Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- Snowflake
Flow
Destination Flow Config Destination Connector Properties Snowflake - Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- Upsolver
Flow
Destination Flow Config Destination Connector Properties Upsolver - Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- Zendesk
Flow
Destination Flow Config Destination Connector Properties Zendesk - Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- Custom
Connector FlowDestination Flow Config Destination Connector Properties Custom Connector - Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- Customer
Profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles - Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- Event
Bridge FlowDestination Flow Config Destination Connector Properties Event Bridge - Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- Honeycode
Flow
Destination Flow Config Destination Connector Properties Honeycode - Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- Lookout
Metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics - Marketo
Flow
Destination Flow Config Destination Connector Properties Marketo - Properties that are required to query Marketo. See Generic Destination Properties for more details.
- Redshift
Flow
Destination Flow Config Destination Connector Properties Redshift - Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- S3
Flow
Destination Flow Config Destination Connector Properties S3 - Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- Salesforce
Flow
Destination Flow Config Destination Connector Properties Salesforce - Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- Sapo
Data FlowDestination Flow Config Destination Connector Properties Sapo Data - Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- Snowflake
Flow
Destination Flow Config Destination Connector Properties Snowflake - Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- Upsolver
Flow
Destination Flow Config Destination Connector Properties Upsolver - Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- Zendesk
Flow
Destination Flow Config Destination Connector Properties Zendesk - Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- custom
Connector FlowDestination Flow Config Destination Connector Properties Custom Connector - Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customer
Profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles - Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- event
Bridge FlowDestination Flow Config Destination Connector Properties Event Bridge - Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode
Flow
Destination Flow Config Destination Connector Properties Honeycode - Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookout
Metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics - marketo
Flow
Destination Flow Config Destination Connector Properties Marketo - Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift
Flow
Destination Flow Config Destination Connector Properties Redshift - Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3
Flow
Destination Flow Config Destination Connector Properties S3 - Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce
Flow
Destination Flow Config Destination Connector Properties Salesforce - Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapo
Data FlowDestination Flow Config Destination Connector Properties Sapo Data - Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake
Flow
Destination Flow Config Destination Connector Properties Snowflake - Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver
Flow
Destination Flow Config Destination Connector Properties Upsolver - Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk
Flow
Destination Flow Config Destination Connector Properties Zendesk - Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- custom
Connector FlowDestination Flow Config Destination Connector Properties Custom Connector - Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customer
Profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles - Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- event
Bridge FlowDestination Flow Config Destination Connector Properties Event Bridge - Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode
Flow
Destination Flow Config Destination Connector Properties Honeycode - Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookout
Metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics - marketo
Flow
Destination Flow Config Destination Connector Properties Marketo - Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift
Flow
Destination Flow Config Destination Connector Properties Redshift - Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3
Flow
Destination Flow Config Destination Connector Properties S3 - Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce
Flow
Destination Flow Config Destination Connector Properties Salesforce - Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapo
Data FlowDestination Flow Config Destination Connector Properties Sapo Data - Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake
Flow
Destination Flow Config Destination Connector Properties Snowflake - Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver
Flow
Destination Flow Config Destination Connector Properties Upsolver - Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk
Flow
Destination Flow Config Destination Connector Properties Zendesk - Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- custom_
connector FlowDestination Flow Config Destination Connector Properties Custom Connector - Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customer_
profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles - Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- event_
bridge FlowDestination Flow Config Destination Connector Properties Event Bridge - Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode
Flow
Destination Flow Config Destination Connector Properties Honeycode - Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookout_
metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics - marketo
Flow
Destination Flow Config Destination Connector Properties Marketo - Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift
Flow
Destination Flow Config Destination Connector Properties Redshift - Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3
Flow
Destination Flow Config Destination Connector Properties S3 - Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce
Flow
Destination Flow Config Destination Connector Properties Salesforce - Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapo_
data FlowDestination Flow Config Destination Connector Properties Sapo Data - Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake
Flow
Destination Flow Config Destination Connector Properties Snowflake - Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver
Flow
Destination Flow Config Destination Connector Properties Upsolver - Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk
Flow
Destination Flow Config Destination Connector Properties Zendesk - Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- custom
Connector Property Map - Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customer
Profiles Property Map - Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- event
Bridge Property Map - Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode Property Map
- Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookout
Metrics Property Map - marketo Property Map
- Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift Property Map
- Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3 Property Map
- Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce Property Map
- Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapo
Data Property Map - Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake Property Map
- Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver Property Map
- Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk Property Map
- Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs
- Entity
Name string - Custom
Properties Dictionary<string, string> - Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config - Id
Field List<string>Names - Write
Operation stringType
- Entity
Name string - Custom
Properties map[string]string - Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config - Id
Field []stringNames - Write
Operation stringType
- entity
Name String - custom
Properties Map<String,String> - error
Handling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config - id
Field List<String>Names - write
Operation StringType
- entity
Name string - custom
Properties {[key: string]: string} - error
Handling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config - id
Field string[]Names - write
Operation stringType
- entity
Name String - custom
Properties Map<String> - error
Handling Property MapConfig - id
Field List<String>Names - write
Operation StringType
FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string - Name of the Amazon S3 bucket.
- bucket
Prefix string - Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str - Name of the Amazon S3 bucket.
- bucket_
prefix str - Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs
- Domain
Name string - Object
Type stringName
- Domain
Name string - Object
Type stringName
- domain
Name String - object
Type StringName
- domain
Name string - object
Type stringName
- domain_
name str - object_
type_ strname
- domain
Name String - object
Type StringName
FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge, FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs
FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string - Name of the Amazon S3 bucket.
- bucket
Prefix string - Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str - Name of the Amazon S3 bucket.
- bucket_
prefix str - Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode, FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs
FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string - Name of the Amazon S3 bucket.
- bucket
Prefix string - Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str - Name of the Amazon S3 bucket.
- bucket_
prefix str - Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo, FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs
FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string - Name of the Amazon S3 bucket.
- bucket
Prefix string - Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str - Name of the Amazon S3 bucket.
- bucket_
prefix str - Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift, FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs
- intermediate
Bucket StringName - object String
- bucket
Prefix String - error
Handling Property MapConfig
FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string - Name of the Amazon S3 bucket.
- bucket
Prefix string - Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str - Name of the Amazon S3 bucket.
- bucket_
prefix str - Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesS3, FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
- bucket
Name String - bucket
Prefix String - s3Output
Format Property MapConfig
FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
- Aggregation
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- File
Type string - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
. - Prefix
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- Preserve
Source boolData Typing - Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
- Aggregation
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- File
Type string - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
. - Prefix
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- Preserve
Source boolData Typing - Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
- aggregation
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type String - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
. - prefix
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserve
Source BooleanData Typing - Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
- aggregation
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type string - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
. - prefix
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserve
Source booleanData Typing - Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
- aggregation_
config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file_
type str - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
. - prefix_
config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserve_
source_ booldata_ typing - Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
- aggregation
Config Property Map - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type String - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
. - prefix
Config Property Map - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserve
Source BooleanData Typing - Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs
- Aggregation
Type string - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
. - Target
File intSize - The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
- Aggregation
Type string - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
. - Target
File intSize - The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
- aggregation
Type String - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
. - target
File IntegerSize - The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
- aggregation
Type string - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
. - target
File numberSize - The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
- aggregation_
type str - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
. - target_
file_ intsize - The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
- aggregation
Type String - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
. - target
File NumberSize - The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
- Prefix
Format string - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
. - Prefix
Type string - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
- Prefix
Format string - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
. - Prefix
Type string - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
- prefix
Format String - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
. - prefix
Type String - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
- prefix
Format string - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
. - prefix
Type string - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
- prefix_
format str - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
. - prefix_
type str - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
- prefix
Format String - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
. - prefix
Type String - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce, FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs
- object String
- error
Handling Property MapConfig - id
Field List<String>Names - write
Operation StringType
FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string - Name of the Amazon S3 bucket.
- bucket
Prefix string - Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str - Name of the Amazon S3 bucket.
- bucket_
prefix str - Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs
- object
Path String - error
Handling Property MapConfig - id
Field List<String>Names - success
Response Property MapHandling Config - write
Operation StringType
FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string - Name of the Amazon S3 bucket.
- bucket
Prefix string - Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str - Name of the Amazon S3 bucket.
- bucket_
prefix str - Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- bucket
Name string - Name of the Amazon S3 bucket.
- bucket
Prefix string - Amazon S3 bucket prefix.
- bucket_
name str - Name of the Amazon S3 bucket.
- bucket_
prefix str - Amazon S3 bucket prefix.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake, FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs
- intermediate
Bucket StringName - object String
- bucket
Prefix String - error
Handling Property MapConfig
FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string - Name of the Amazon S3 bucket.
- bucket
Prefix string - Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str - Name of the Amazon S3 bucket.
- bucket_
prefix str - Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs
- bucket
Name String - s3Output
Format Property MapConfig - bucket
Prefix String
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs
- Prefix
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- Aggregation
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- File
Type string - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
- Prefix
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- Aggregation
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- File
Type string - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
- prefix
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregation
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type String - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
- prefix
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregation
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type string - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
- prefix_
config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregation_
config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file_
type str - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
- prefix
Config Property Map - Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregation
Config Property Map - Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type String - File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs
- Aggregation
Type string - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- Aggregation
Type string - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation
Type String - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation
Type string - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation_
type str - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation
Type String - Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs
- Prefix
Type string - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
. - Prefix
Format string - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
- Prefix
Type string - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
. - Prefix
Format string - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
- prefix
Type String - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
. - prefix
Format String - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
- prefix
Type string - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
. - prefix
Format string - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
- prefix_
type str - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
. - prefix_
format str - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
- prefix
Type String - Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
. - prefix
Format String - Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk, FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs
- object String
- error
Handling Property MapConfig - id
Field List<String>Names - write
Operation StringType
FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string - Name of the Amazon S3 bucket.
- Bucket
Prefix string - Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string - Name of the Amazon S3 bucket.
- bucket
Prefix string - Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str - Name of the Amazon S3 bucket.
- bucket_
prefix str - Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String - Name of the Amazon S3 bucket.
- bucket
Prefix String - Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error - If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowSourceFlowConfig, FlowSourceFlowConfigArgs
- Connector
Type string - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - Source
Connector FlowProperties Source Flow Config Source Connector Properties - Information that is required to query a particular source connector. See Source Connector Properties for details.
- Api
Version string - API version that the destination connector uses.
- Connector
Profile stringName - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- Incremental
Pull FlowConfig Source Flow Config Incremental Pull Config - Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- Connector
Type string - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - Source
Connector FlowProperties Source Flow Config Source Connector Properties - Information that is required to query a particular source connector. See Source Connector Properties for details.
- Api
Version string - API version that the destination connector uses.
- Connector
Profile stringName - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- Incremental
Pull FlowConfig Source Flow Config Incremental Pull Config - Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connector
Type String - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - source
Connector FlowProperties Source Flow Config Source Connector Properties - Information that is required to query a particular source connector. See Source Connector Properties for details.
- api
Version String - API version that the destination connector uses.
- connector
Profile StringName - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incremental
Pull FlowConfig Source Flow Config Incremental Pull Config - Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connector
Type string - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - source
Connector FlowProperties Source Flow Config Source Connector Properties - Information that is required to query a particular source connector. See Source Connector Properties for details.
- api
Version string - API version that the destination connector uses.
- connector
Profile stringName - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incremental
Pull FlowConfig Source Flow Config Incremental Pull Config - Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connector_
type str - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - source_
connector_ Flowproperties Source Flow Config Source Connector Properties - Information that is required to query a particular source connector. See Source Connector Properties for details.
- api_
version str - API version that the destination connector uses.
- connector_
profile_ strname - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incremental_
pull_ Flowconfig Source Flow Config Incremental Pull Config - Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connector
Type String - Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
. - source
Connector Property MapProperties - Information that is required to query a particular source connector. See Source Connector Properties for details.
- api
Version String - API version that the destination connector uses.
- connector
Profile StringName - Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incremental
Pull Property MapConfig - Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
FlowSourceFlowConfigIncrementalPullConfig, FlowSourceFlowConfigIncrementalPullConfigArgs
- Datetime
Type stringField Name - Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- Datetime
Type stringField Name - Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetime
Type StringField Name - Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetime
Type stringField Name - Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetime_
type_ strfield_ name - Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetime
Type StringField Name - Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
FlowSourceFlowConfigSourceConnectorProperties, FlowSourceFlowConfigSourceConnectorPropertiesArgs
- Amplitude
Flow
Source Flow Config Source Connector Properties Amplitude - Information that is required for querying Amplitude. See Generic Source Properties for more details.
- Custom
Connector FlowSource Flow Config Source Connector Properties Custom Connector - Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- Datadog
Flow
Source Flow Config Source Connector Properties Datadog - Information that is required for querying Datadog. See Generic Source Properties for more details.
- Dynatrace
Flow
Source Flow Config Source Connector Properties Dynatrace - Google
Analytics FlowSource Flow Config Source Connector Properties Google Analytics - Infor
Nexus FlowSource Flow Config Source Connector Properties Infor Nexus - Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- Marketo
Flow
Source Flow Config Source Connector Properties Marketo - Information that is required for querying Marketo. See Generic Source Properties for more details.
- S3
Flow
Source Flow Config Source Connector Properties S3 - Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- Salesforce
Flow
Source Flow Config Source Connector Properties Salesforce - Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- Sapo
Data FlowSource Flow Config Source Connector Properties Sapo Data - Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- Service
Now FlowSource Flow Config Source Connector Properties Service Now - Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- Singular
Flow
Source Flow Config Source Connector Properties Singular - Information that is required for querying Singular. See Generic Source Properties for more details.
- Slack
Flow
Source Flow Config Source Connector Properties Slack - Information that is required for querying Slack. See Generic Source Properties for more details.
- Trendmicro
Flow
Source Flow Config Source Connector Properties Trendmicro - Veeva
Flow
Source Flow Config Source Connector Properties Veeva - Information that is required for querying Veeva. See Veeva Source Properties for more details.
- Zendesk
Flow
Source Flow Config Source Connector Properties Zendesk - Information that is required for querying Zendesk. See Generic Source Properties for more details.
- Amplitude
Flow
Source Flow Config Source Connector Properties Amplitude - Information that is required for querying Amplitude. See Generic Source Properties for more details.
- Custom
Connector FlowSource Flow Config Source Connector Properties Custom Connector - Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- Datadog
Flow
Source Flow Config Source Connector Properties Datadog - Information that is required for querying Datadog. See Generic Source Properties for more details.
- Dynatrace
Flow
Source Flow Config Source Connector Properties Dynatrace - Google
Analytics FlowSource Flow Config Source Connector Properties Google Analytics - Infor
Nexus FlowSource Flow Config Source Connector Properties Infor Nexus - Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- Marketo
Flow
Source Flow Config Source Connector Properties Marketo - Information that is required for querying Marketo. See Generic Source Properties for more details.
- S3
Flow
Source Flow Config Source Connector Properties S3 - Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- Salesforce
Flow
Source Flow Config Source Connector Properties Salesforce - Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- Sapo
Data FlowSource Flow Config Source Connector Properties Sapo Data - Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- Service
Now FlowSource Flow Config Source Connector Properties Service Now - Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- Singular
Flow
Source Flow Config Source Connector Properties Singular - Information that is required for querying Singular. See Generic Source Properties for more details.
- Slack
Flow
Source Flow Config Source Connector Properties Slack - Information that is required for querying Slack. See Generic Source Properties for more details.
- Trendmicro
Flow
Source Flow Config Source Connector Properties Trendmicro - Veeva
Flow
Source Flow Config Source Connector Properties Veeva - Information that is required for querying Veeva. See Veeva Source Properties for more details.
- Zendesk
Flow
Source Flow Config Source Connector Properties Zendesk - Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude
Flow
Source Flow Config Source Connector Properties Amplitude - Information that is required for querying Amplitude. See Generic Source Properties for more details.
- custom
Connector FlowSource Flow Config Source Connector Properties Custom Connector - Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog
Flow
Source Flow Config Source Connector Properties Datadog - Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace
Flow
Source Flow Config Source Connector Properties Dynatrace - google
Analytics FlowSource Flow Config Source Connector Properties Google Analytics - infor
Nexus FlowSource Flow Config Source Connector Properties Infor Nexus - Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo
Flow
Source Flow Config Source Connector Properties Marketo - Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3
Flow
Source Flow Config Source Connector Properties S3 - Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce
Flow
Source Flow Config Source Connector Properties Salesforce - Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapo
Data FlowSource Flow Config Source Connector Properties Sapo Data - Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- service
Now FlowSource Flow Config Source Connector Properties Service Now - Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular
Flow
Source Flow Config Source Connector Properties Singular - Information that is required for querying Singular. See Generic Source Properties for more details.
- slack
Flow
Source Flow Config Source Connector Properties Slack - Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro
Flow
Source Flow Config Source Connector Properties Trendmicro - veeva
Flow
Source Flow Config Source Connector Properties Veeva - Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk
Flow
Source Flow Config Source Connector Properties Zendesk - Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude
Flow
Source Flow Config Source Connector Properties Amplitude - Information that is required for querying Amplitude. See Generic Source Properties for more details.
- custom
Connector FlowSource Flow Config Source Connector Properties Custom Connector - Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog
Flow
Source Flow Config Source Connector Properties Datadog - Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace
Flow
Source Flow Config Source Connector Properties Dynatrace - google
Analytics FlowSource Flow Config Source Connector Properties Google Analytics - infor
Nexus FlowSource Flow Config Source Connector Properties Infor Nexus - Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo
Flow
Source Flow Config Source Connector Properties Marketo - Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3
Flow
Source Flow Config Source Connector Properties S3 - Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce
Flow
Source Flow Config Source Connector Properties Salesforce - Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapo
Data FlowSource Flow Config Source Connector Properties Sapo Data - Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- service
Now FlowSource Flow Config Source Connector Properties Service Now - Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular
Flow
Source Flow Config Source Connector Properties Singular - Information that is required for querying Singular. See Generic Source Properties for more details.
- slack
Flow
Source Flow Config Source Connector Properties Slack - Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro
Flow
Source Flow Config Source Connector Properties Trendmicro - veeva
Flow
Source Flow Config Source Connector Properties Veeva - Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk
Flow
Source Flow Config Source Connector Properties Zendesk - Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude
Flow
Source Flow Config Source Connector Properties Amplitude - Information that is required for querying Amplitude. See Generic Source Properties for more details.
- custom_
connector FlowSource Flow Config Source Connector Properties Custom Connector - Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog
Flow
Source Flow Config Source Connector Properties Datadog - Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace
Flow
Source Flow Config Source Connector Properties Dynatrace - google_
analytics FlowSource Flow Config Source Connector Properties Google Analytics - infor_
nexus FlowSource Flow Config Source Connector Properties Infor Nexus - Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo
Flow
Source Flow Config Source Connector Properties Marketo - Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3
Flow
Source Flow Config Source Connector Properties S3 - Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce
Flow
Source Flow Config Source Connector Properties Salesforce - Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapo_
data FlowSource Flow Config Source Connector Properties Sapo Data - Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- service_
now FlowSource Flow Config Source Connector Properties Service Now - Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular
Flow
Source Flow Config Source Connector Properties Singular - Information that is required for querying Singular. See Generic Source Properties for more details.
- slack
Flow
Source Flow Config Source Connector Properties Slack - Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro
Flow
Source Flow Config Source Connector Properties Trendmicro - veeva
Flow
Source Flow Config Source Connector Properties Veeva - Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk
Flow
Source Flow Config Source Connector Properties Zendesk - Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude Property Map
- Information that is required for querying Amplitude. See Generic Source Properties for more details.
- custom
Connector Property Map - Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog Property Map
- Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace Property Map
- google
Analytics Property Map - infor
Nexus Property Map - Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo Property Map
- Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3 Property Map
- Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce Property Map
- Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapo
Data Property Map - Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- service
Now Property Map - Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular Property Map
- Information that is required for querying Singular. See Generic Source Properties for more details.
- slack Property Map
- Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro Property Map
- veeva Property Map
- Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk Property Map
- Information that is required for querying Zendesk. See Generic Source Properties for more details.
FlowSourceFlowConfigSourceConnectorPropertiesAmplitude, FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector, FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs
- Entity
Name string - Custom
Properties Dictionary<string, string>
- Entity
Name string - Custom
Properties map[string]string
- entity
Name String - custom
Properties Map<String,String>
- entity
Name string - custom
Properties {[key: string]: string}
- entity_
name str - custom_
properties Mapping[str, str]
- entity
Name String - custom
Properties Map<String>
FlowSourceFlowConfigSourceConnectorPropertiesDatadog, FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesDynatrace, FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics, FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesInforNexus, FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesMarketo, FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesS3, FlowSourceFlowConfigSourceConnectorPropertiesS3Args
- bucket
Name String - bucket
Prefix String - s3Input
Format Property MapConfig
FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig, FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs
- S3Input
File stringType - File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
- S3Input
File stringType - File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
- s3Input
File StringType - File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
- s3Input
File stringType - File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
- s3_
input_ strfile_ type - File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
- s3Input
File StringType - File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
FlowSourceFlowConfigSourceConnectorPropertiesSalesforce, FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs
- Object string
- Enable
Dynamic boolField Update - Include
Deleted boolRecords
- Object string
- Enable
Dynamic boolField Update - Include
Deleted boolRecords
- object String
- enable
Dynamic BooleanField Update - include
Deleted BooleanRecords
- object string
- enable
Dynamic booleanField Update - include
Deleted booleanRecords
- object str
- enable_
dynamic_ boolfield_ update - include_
deleted_ boolrecords
- object String
- enable
Dynamic BooleanField Update - include
Deleted BooleanRecords
FlowSourceFlowConfigSourceConnectorPropertiesSapoData, FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs
- Object
Path string
- Object
Path string
- object
Path String
- object
Path string
- object_
path str
- object
Path String
FlowSourceFlowConfigSourceConnectorPropertiesServiceNow, FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesSingular, FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesSlack, FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro, FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesVeeva, FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs
- Object string
- Document
Type string - Include
All boolVersions - Include
Renditions bool - Include
Source boolFiles
- Object string
- Document
Type string - Include
All boolVersions - Include
Renditions bool - Include
Source boolFiles
- object String
- document
Type String - include
All BooleanVersions - include
Renditions Boolean - include
Source BooleanFiles
- object string
- document
Type string - include
All booleanVersions - include
Renditions boolean - include
Source booleanFiles
- object str
- document_
type str - include_
all_ boolversions - include_
renditions bool - include_
source_ boolfiles
- object String
- document
Type String - include
All BooleanVersions - include
Renditions Boolean - include
Source BooleanFiles
FlowSourceFlowConfigSourceConnectorPropertiesZendesk, FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowTask, FlowTaskArgs
- Task
Type string - Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
. - Connector
Operators List<FlowTask Connector Operator> - Operation to be performed on the provided source fields. See Connector Operator for details.
- Destination
Field string - Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- Source
Fields List<string> - Source fields to which a particular task is applied.
- Task
Properties Dictionary<string, string> - Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
- Task
Type string - Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
. - Connector
Operators []FlowTask Connector Operator - Operation to be performed on the provided source fields. See Connector Operator for details.
- Destination
Field string - Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- Source
Fields []string - Source fields to which a particular task is applied.
- Task
Properties map[string]string - Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
- task
Type String - Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
. - connector
Operators List<FlowTask Connector Operator> - Operation to be performed on the provided source fields. See Connector Operator for details.
- destination
Field String - Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- source
Fields List<String> - Source fields to which a particular task is applied.
- task
Properties Map<String,String> - Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
- task
Type string - Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
. - connector
Operators FlowTask Connector Operator[] - Operation to be performed on the provided source fields. See Connector Operator for details.
- destination
Field string - Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- source
Fields string[] - Source fields to which a particular task is applied.
- task
Properties {[key: string]: string} - Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
- task_
type str - Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
. - connector_
operators Sequence[FlowTask Connector Operator] - Operation to be performed on the provided source fields. See Connector Operator for details.
- destination_
field str - Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- source_
fields Sequence[str] - Source fields to which a particular task is applied.
- task_
properties Mapping[str, str] - Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
- task
Type String - Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
. - connector
Operators List<Property Map> - Operation to be performed on the provided source fields. See Connector Operator for details.
- destination
Field String - Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- source
Fields List<String> - Source fields to which a particular task is applied.
- task
Properties Map<String> - Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
FlowTaskConnectorOperator, FlowTaskConnectorOperatorArgs
- Amplitude string
- Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
. - Custom
Connector string - Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Datadog string
- Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Dynatrace string
- Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Google
Analytics string - Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
. - Infor
Nexus string - Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Marketo string
- Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - S3 string
- Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Salesforce string
- Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Sapo
Data string - Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Service
Now string - Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Singular string
- Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Slack string
- Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Trendmicro string
- Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Veeva string
- Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Zendesk string
- Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
- Amplitude string
- Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
. - Custom
Connector string - Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Datadog string
- Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Dynatrace string
- Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Google
Analytics string - Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
. - Infor
Nexus string - Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Marketo string
- Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - S3 string
- Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Salesforce string
- Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Sapo
Data string - Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Service
Now string - Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Singular string
- Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Slack string
- Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Trendmicro string
- Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Veeva string
- Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - Zendesk string
- Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
- amplitude String
- Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
. - custom
Connector String - Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - datadog String
- Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - dynatrace String
- Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - google
Analytics String - Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
. - infor
Nexus String - Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - marketo String
- Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - s3 String
- Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - salesforce String
- Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - sapo
Data String - Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - service
Now String - Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - singular String
- Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - slack String
- Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - trendmicro String
- Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - veeva String
- Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - zendesk String
- Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
- amplitude string
- Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
. - custom
Connector string - Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - datadog string
- Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - dynatrace string
- Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - google
Analytics string - Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
. - infor
Nexus string - Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - marketo string
- Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - s3 string
- Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - salesforce string
- Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - sapo
Data string - Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - service
Now string - Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - singular string
- Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - slack string
- Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - trendmicro string
- Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - veeva string
- Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - zendesk string
- Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
- amplitude str
- Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
. - custom_
connector str - Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - datadog str
- Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - dynatrace str
- Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - google_
analytics str - Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
. - infor_
nexus str - Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - marketo str
- Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - s3 str
- Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - salesforce str
- Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - sapo_
data str - Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - service_
now str - Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - singular str
- Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - slack str
- Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - trendmicro str
- Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - veeva str
- Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - zendesk str
- Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
- amplitude String
- Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
. - custom
Connector String - Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - datadog String
- Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - dynatrace String
- Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - google
Analytics String - Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
. - infor
Nexus String - Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - marketo String
- Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - s3 String
- Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - salesforce String
- Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - sapo
Data String - Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - service
Now String - Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - singular String
- Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - slack String
- Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - trendmicro String
- Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - veeva String
- Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
. - zendesk String
- Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
FlowTriggerConfig, FlowTriggerConfigArgs
- Trigger
Type string - Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
. - Trigger
Properties FlowTrigger Config Trigger Properties - Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
- Trigger
Type string - Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
. - Trigger
Properties FlowTrigger Config Trigger Properties - Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
- trigger
Type String - Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
. - trigger
Properties FlowTrigger Config Trigger Properties - Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
- trigger
Type string - Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
. - trigger
Properties FlowTrigger Config Trigger Properties - Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
- trigger_
type str - Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
. - trigger_
properties FlowTrigger Config Trigger Properties - Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
- trigger
Type String - Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
. - trigger
Properties Property Map - Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
FlowTriggerConfigTriggerProperties, FlowTriggerConfigTriggerPropertiesArgs
FlowTriggerConfigTriggerPropertiesScheduled, FlowTriggerConfigTriggerPropertiesScheduledArgs
- Schedule
Expression string - Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
. - Data
Pull stringMode - Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
. - First
Execution stringFrom - Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- Schedule
End stringTime - Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- Schedule
Offset int - Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- Schedule
Start stringTime - Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- Timezone string
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- Schedule
Expression string - Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
. - Data
Pull stringMode - Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
. - First
Execution stringFrom - Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- Schedule
End stringTime - Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- Schedule
Offset int - Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- Schedule
Start stringTime - Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- Timezone string
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- schedule
Expression String - Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
. - data
Pull StringMode - Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
. - first
Execution StringFrom - Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- schedule
End StringTime - Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- schedule
Offset Integer - Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- schedule
Start StringTime - Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone String
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- schedule
Expression string - Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
. - data
Pull stringMode - Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
. - first
Execution stringFrom - Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- schedule
End stringTime - Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- schedule
Offset number - Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- schedule
Start stringTime - Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone string
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- schedule_
expression str - Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
. - data_
pull_ strmode - Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
. - first_
execution_ strfrom - Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- schedule_
end_ strtime - Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- schedule_
offset int - Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- schedule_
start_ strtime - Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone str
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- schedule
Expression String - Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
. - data
Pull StringMode - Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
. - first
Execution StringFrom - Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- schedule
End StringTime - Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- schedule
Offset Number - Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- schedule
Start StringTime - Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone String
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
Import
Using pulumi import
, import AppFlow flows using the arn
. For example:
$ pulumi import aws:appflow/flow:Flow example arn:aws:appflow:us-west-2:123456789012:flow/example-flow
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
aws
Terraform Provider.
Try AWS Native preview for resources not in the classic version.