Try AWS Native preview for resources not in the classic version.
aws.pipes.Pipe
Explore with Pulumi AI
Try AWS Native preview for resources not in the classic version.
Resource for managing an AWS EventBridge Pipes Pipe.
You can find out more about EventBridge Pipes in the User Guide.
EventBridge Pipes are very configurable, and may require IAM permissions to work correctly. More information on the configuration options and IAM permissions can be found in the User Guide.
Note: EventBridge was formerly known as CloudWatch Events. The functionality is identical.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const main = aws.getCallerIdentity({});
const example = new aws.iam.Role("example", {assumeRolePolicy: JSON.stringify({
Version: "2012-10-17",
Statement: {
Effect: "Allow",
Action: "sts:AssumeRole",
Principal: {
Service: "pipes.amazonaws.com",
},
Condition: {
StringEquals: {
"aws:SourceAccount": main.then(main => main.accountId),
},
},
},
})});
const sourceQueue = new aws.sqs.Queue("source", {});
const source = new aws.iam.RolePolicy("source", {
role: example.id,
policy: pulumi.jsonStringify({
Version: "2012-10-17",
Statement: [{
Effect: "Allow",
Action: [
"sqs:DeleteMessage",
"sqs:GetQueueAttributes",
"sqs:ReceiveMessage",
],
Resource: [sourceQueue.arn],
}],
}),
});
const targetQueue = new aws.sqs.Queue("target", {});
const target = new aws.iam.RolePolicy("target", {
role: example.id,
policy: pulumi.jsonStringify({
Version: "2012-10-17",
Statement: [{
Effect: "Allow",
Action: ["sqs:SendMessage"],
Resource: [targetQueue.arn],
}],
}),
});
const examplePipe = new aws.pipes.Pipe("example", {
name: "example-pipe",
roleArn: example.arn,
source: sourceQueue.arn,
target: targetQueue.arn,
}, {
dependsOn: [
source,
target,
],
});
import pulumi
import json
import pulumi_aws as aws
main = aws.get_caller_identity()
example = aws.iam.Role("example", assume_role_policy=json.dumps({
"Version": "2012-10-17",
"Statement": {
"Effect": "Allow",
"Action": "sts:AssumeRole",
"Principal": {
"Service": "pipes.amazonaws.com",
},
"Condition": {
"StringEquals": {
"aws:SourceAccount": main.account_id,
},
},
},
}))
source_queue = aws.sqs.Queue("source")
source = aws.iam.RolePolicy("source",
role=example.id,
policy=pulumi.Output.json_dumps({
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": [
"sqs:DeleteMessage",
"sqs:GetQueueAttributes",
"sqs:ReceiveMessage",
],
"Resource": [source_queue.arn],
}],
}))
target_queue = aws.sqs.Queue("target")
target = aws.iam.RolePolicy("target",
role=example.id,
policy=pulumi.Output.json_dumps({
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": ["sqs:SendMessage"],
"Resource": [target_queue.arn],
}],
}))
example_pipe = aws.pipes.Pipe("example",
name="example-pipe",
role_arn=example.arn,
source=source_queue.arn,
target=target_queue.arn,
opts = pulumi.ResourceOptions(depends_on=[
source,
target,
]))
package main
import (
"encoding/json"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/iam"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/sqs"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
main, err := aws.GetCallerIdentity(ctx, nil, nil)
if err != nil {
return err
}
tmpJSON0, err := json.Marshal(map[string]interface{}{
"Version": "2012-10-17",
"Statement": map[string]interface{}{
"Effect": "Allow",
"Action": "sts:AssumeRole",
"Principal": map[string]interface{}{
"Service": "pipes.amazonaws.com",
},
"Condition": map[string]interface{}{
"StringEquals": map[string]interface{}{
"aws:SourceAccount": main.AccountId,
},
},
},
})
if err != nil {
return err
}
json0 := string(tmpJSON0)
example, err := iam.NewRole(ctx, "example", &iam.RoleArgs{
AssumeRolePolicy: pulumi.String(json0),
})
if err != nil {
return err
}
sourceQueue, err := sqs.NewQueue(ctx, "source", nil)
if err != nil {
return err
}
source, err := iam.NewRolePolicy(ctx, "source", &iam.RolePolicyArgs{
Role: example.ID(),
Policy: sourceQueue.Arn.ApplyT(func(arn string) (pulumi.String, error) {
var _zero pulumi.String
tmpJSON1, err := json.Marshal(map[string]interface{}{
"Version": "2012-10-17",
"Statement": []map[string]interface{}{
map[string]interface{}{
"Effect": "Allow",
"Action": []string{
"sqs:DeleteMessage",
"sqs:GetQueueAttributes",
"sqs:ReceiveMessage",
},
"Resource": []string{
arn,
},
},
},
})
if err != nil {
return _zero, err
}
json1 := string(tmpJSON1)
return pulumi.String(json1), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
targetQueue, err := sqs.NewQueue(ctx, "target", nil)
if err != nil {
return err
}
target, err := iam.NewRolePolicy(ctx, "target", &iam.RolePolicyArgs{
Role: example.ID(),
Policy: targetQueue.Arn.ApplyT(func(arn string) (pulumi.String, error) {
var _zero pulumi.String
tmpJSON2, err := json.Marshal(map[string]interface{}{
"Version": "2012-10-17",
"Statement": []map[string]interface{}{
map[string]interface{}{
"Effect": "Allow",
"Action": []string{
"sqs:SendMessage",
},
"Resource": []string{
arn,
},
},
},
})
if err != nil {
return _zero, err
}
json2 := string(tmpJSON2)
return pulumi.String(json2), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
Name: pulumi.String("example-pipe"),
RoleArn: example.Arn,
Source: sourceQueue.Arn,
Target: targetQueue.Arn,
}, pulumi.DependsOn([]pulumi.Resource{
source,
target,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var main = Aws.GetCallerIdentity.Invoke();
var example = new Aws.Iam.Role("example", new()
{
AssumeRolePolicy = JsonSerializer.Serialize(new Dictionary<string, object?>
{
["Version"] = "2012-10-17",
["Statement"] = new Dictionary<string, object?>
{
["Effect"] = "Allow",
["Action"] = "sts:AssumeRole",
["Principal"] = new Dictionary<string, object?>
{
["Service"] = "pipes.amazonaws.com",
},
["Condition"] = new Dictionary<string, object?>
{
["StringEquals"] = new Dictionary<string, object?>
{
["aws:SourceAccount"] = main.Apply(getCallerIdentityResult => getCallerIdentityResult.AccountId),
},
},
},
}),
});
var sourceQueue = new Aws.Sqs.Queue("source");
var source = new Aws.Iam.RolePolicy("source", new()
{
Role = example.Id,
Policy = Output.JsonSerialize(Output.Create(new Dictionary<string, object?>
{
["Version"] = "2012-10-17",
["Statement"] = new[]
{
new Dictionary<string, object?>
{
["Effect"] = "Allow",
["Action"] = new[]
{
"sqs:DeleteMessage",
"sqs:GetQueueAttributes",
"sqs:ReceiveMessage",
},
["Resource"] = new[]
{
sourceQueue.Arn,
},
},
},
})),
});
var targetQueue = new Aws.Sqs.Queue("target");
var target = new Aws.Iam.RolePolicy("target", new()
{
Role = example.Id,
Policy = Output.JsonSerialize(Output.Create(new Dictionary<string, object?>
{
["Version"] = "2012-10-17",
["Statement"] = new[]
{
new Dictionary<string, object?>
{
["Effect"] = "Allow",
["Action"] = new[]
{
"sqs:SendMessage",
},
["Resource"] = new[]
{
targetQueue.Arn,
},
},
},
})),
});
var examplePipe = new Aws.Pipes.Pipe("example", new()
{
Name = "example-pipe",
RoleArn = example.Arn,
Source = sourceQueue.Arn,
Target = targetQueue.Arn,
}, new CustomResourceOptions
{
DependsOn =
{
source,
target,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.AwsFunctions;
import com.pulumi.aws.inputs.GetCallerIdentityArgs;
import com.pulumi.aws.iam.Role;
import com.pulumi.aws.iam.RoleArgs;
import com.pulumi.aws.sqs.Queue;
import com.pulumi.aws.iam.RolePolicy;
import com.pulumi.aws.iam.RolePolicyArgs;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var main = AwsFunctions.getCallerIdentity();
var example = new Role("example", RoleArgs.builder()
.assumeRolePolicy(serializeJson(
jsonObject(
jsonProperty("Version", "2012-10-17"),
jsonProperty("Statement", jsonObject(
jsonProperty("Effect", "Allow"),
jsonProperty("Action", "sts:AssumeRole"),
jsonProperty("Principal", jsonObject(
jsonProperty("Service", "pipes.amazonaws.com")
)),
jsonProperty("Condition", jsonObject(
jsonProperty("StringEquals", jsonObject(
jsonProperty("aws:SourceAccount", main.applyValue(getCallerIdentityResult -> getCallerIdentityResult.accountId()))
))
))
))
)))
.build());
var sourceQueue = new Queue("sourceQueue");
var source = new RolePolicy("source", RolePolicyArgs.builder()
.role(example.id())
.policy(sourceQueue.arn().applyValue(arn -> serializeJson(
jsonObject(
jsonProperty("Version", "2012-10-17"),
jsonProperty("Statement", jsonArray(jsonObject(
jsonProperty("Effect", "Allow"),
jsonProperty("Action", jsonArray(
"sqs:DeleteMessage",
"sqs:GetQueueAttributes",
"sqs:ReceiveMessage"
)),
jsonProperty("Resource", jsonArray(arn))
)))
))))
.build());
var targetQueue = new Queue("targetQueue");
var target = new RolePolicy("target", RolePolicyArgs.builder()
.role(example.id())
.policy(targetQueue.arn().applyValue(arn -> serializeJson(
jsonObject(
jsonProperty("Version", "2012-10-17"),
jsonProperty("Statement", jsonArray(jsonObject(
jsonProperty("Effect", "Allow"),
jsonProperty("Action", jsonArray("sqs:SendMessage")),
jsonProperty("Resource", jsonArray(arn))
)))
))))
.build());
var examplePipe = new Pipe("examplePipe", PipeArgs.builder()
.name("example-pipe")
.roleArn(example.arn())
.source(sourceQueue.arn())
.target(targetQueue.arn())
.build(), CustomResourceOptions.builder()
.dependsOn(
source,
target)
.build());
}
}
resources:
example:
type: aws:iam:Role
properties:
assumeRolePolicy:
fn::toJSON:
Version: 2012-10-17
Statement:
Effect: Allow
Action: sts:AssumeRole
Principal:
Service: pipes.amazonaws.com
Condition:
StringEquals:
aws:SourceAccount: ${main.accountId}
source:
type: aws:iam:RolePolicy
properties:
role: ${example.id}
policy:
fn::toJSON:
Version: 2012-10-17
Statement:
- Effect: Allow
Action:
- sqs:DeleteMessage
- sqs:GetQueueAttributes
- sqs:ReceiveMessage
Resource:
- ${sourceQueue.arn}
sourceQueue:
type: aws:sqs:Queue
name: source
target:
type: aws:iam:RolePolicy
properties:
role: ${example.id}
policy:
fn::toJSON:
Version: 2012-10-17
Statement:
- Effect: Allow
Action:
- sqs:SendMessage
Resource:
- ${targetQueue.arn}
targetQueue:
type: aws:sqs:Queue
name: target
examplePipe:
type: aws:pipes:Pipe
name: example
properties:
name: example-pipe
roleArn: ${example.arn}
source: ${sourceQueue.arn}
target: ${targetQueue.arn}
options:
dependson:
- ${source}
- ${target}
variables:
main:
fn::invoke:
Function: aws:getCallerIdentity
Arguments: {}
Enrichment Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.pipes.Pipe("example", {
name: "example-pipe",
roleArn: exampleAwsIamRole.arn,
source: source.arn,
target: target.arn,
enrichment: exampleAwsCloudwatchEventApiDestination.arn,
enrichmentParameters: {
httpParameters: {
pathParameterValues: "example-path-param",
headerParameters: {
"example-header": "example-value",
"second-example-header": "second-example-value",
},
queryStringParameters: {
"example-query-string": "example-value",
"second-example-query-string": "second-example-value",
},
},
},
});
import pulumi
import pulumi_aws as aws
example = aws.pipes.Pipe("example",
name="example-pipe",
role_arn=example_aws_iam_role["arn"],
source=source["arn"],
target=target["arn"],
enrichment=example_aws_cloudwatch_event_api_destination["arn"],
enrichment_parameters={
"httpParameters": {
"pathParameterValues": "example-path-param",
"headerParameters": {
"example-header": "example-value",
"second-example-header": "second-example-value",
},
"queryStringParameters": {
"example-query-string": "example-value",
"second-example-query-string": "second-example-value",
},
},
})
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
Name: pulumi.String("example-pipe"),
RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
Source: pulumi.Any(source.Arn),
Target: pulumi.Any(target.Arn),
Enrichment: pulumi.Any(exampleAwsCloudwatchEventApiDestination.Arn),
EnrichmentParameters: &pipes.PipeEnrichmentParametersArgs{
HttpParameters: &pipes.PipeEnrichmentParametersHttpParametersArgs{
PathParameterValues: pulumi.String("example-path-param"),
HeaderParameters: pulumi.StringMap{
"example-header": pulumi.String("example-value"),
"second-example-header": pulumi.String("second-example-value"),
},
QueryStringParameters: pulumi.StringMap{
"example-query-string": pulumi.String("example-value"),
"second-example-query-string": pulumi.String("second-example-value"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var example = new Aws.Pipes.Pipe("example", new()
{
Name = "example-pipe",
RoleArn = exampleAwsIamRole.Arn,
Source = source.Arn,
Target = target.Arn,
Enrichment = exampleAwsCloudwatchEventApiDestination.Arn,
EnrichmentParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersArgs
{
HttpParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersHttpParametersArgs
{
PathParameterValues = "example-path-param",
HeaderParameters =
{
{ "example-header", "example-value" },
{ "second-example-header", "second-example-value" },
},
QueryStringParameters =
{
{ "example-query-string", "example-value" },
{ "second-example-query-string", "second-example-value" },
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeEnrichmentParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeEnrichmentParametersHttpParametersArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Pipe("example", PipeArgs.builder()
.name("example-pipe")
.roleArn(exampleAwsIamRole.arn())
.source(source.arn())
.target(target.arn())
.enrichment(exampleAwsCloudwatchEventApiDestination.arn())
.enrichmentParameters(PipeEnrichmentParametersArgs.builder()
.httpParameters(PipeEnrichmentParametersHttpParametersArgs.builder()
.pathParameterValues("example-path-param")
.headerParameters(Map.ofEntries(
Map.entry("example-header", "example-value"),
Map.entry("second-example-header", "second-example-value")
))
.queryStringParameters(Map.ofEntries(
Map.entry("example-query-string", "example-value"),
Map.entry("second-example-query-string", "second-example-value")
))
.build())
.build())
.build());
}
}
resources:
example:
type: aws:pipes:Pipe
properties:
name: example-pipe
roleArn: ${exampleAwsIamRole.arn}
source: ${source.arn}
target: ${target.arn}
enrichment: ${exampleAwsCloudwatchEventApiDestination.arn}
enrichmentParameters:
httpParameters:
pathParameterValues: example-path-param
headerParameters:
example-header: example-value
second-example-header: second-example-value
queryStringParameters:
example-query-string: example-value
second-example-query-string: second-example-value
Filter Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.pipes.Pipe("example", {
name: "example-pipe",
roleArn: exampleAwsIamRole.arn,
source: source.arn,
target: target.arn,
sourceParameters: {
filterCriteria: {
filters: [{
pattern: JSON.stringify({
source: ["event-source"],
}),
}],
},
},
});
import pulumi
import json
import pulumi_aws as aws
example = aws.pipes.Pipe("example",
name="example-pipe",
role_arn=example_aws_iam_role["arn"],
source=source["arn"],
target=target["arn"],
source_parameters={
"filterCriteria": {
"filters": [{
"pattern": json.dumps({
"source": ["event-source"],
}),
}],
},
})
package main
import (
"encoding/json"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
tmpJSON0, err := json.Marshal(map[string]interface{}{
"source": []string{
"event-source",
},
})
if err != nil {
return err
}
json0 := string(tmpJSON0)
_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
Name: pulumi.String("example-pipe"),
RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
Source: pulumi.Any(source.Arn),
Target: pulumi.Any(target.Arn),
SourceParameters: &pipes.PipeSourceParametersArgs{
FilterCriteria: &pipes.PipeSourceParametersFilterCriteriaArgs{
Filters: pipes.PipeSourceParametersFilterCriteriaFilterArray{
&pipes.PipeSourceParametersFilterCriteriaFilterArgs{
Pattern: pulumi.String(json0),
},
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var example = new Aws.Pipes.Pipe("example", new()
{
Name = "example-pipe",
RoleArn = exampleAwsIamRole.Arn,
Source = source.Arn,
Target = target.Arn,
SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
{
FilterCriteria = new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaArgs
{
Filters = new[]
{
new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaFilterArgs
{
Pattern = JsonSerializer.Serialize(new Dictionary<string, object?>
{
["source"] = new[]
{
"event-source",
},
}),
},
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersFilterCriteriaArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Pipe("example", PipeArgs.builder()
.name("example-pipe")
.roleArn(exampleAwsIamRole.arn())
.source(source.arn())
.target(target.arn())
.sourceParameters(PipeSourceParametersArgs.builder()
.filterCriteria(PipeSourceParametersFilterCriteriaArgs.builder()
.filters(PipeSourceParametersFilterCriteriaFilterArgs.builder()
.pattern(serializeJson(
jsonObject(
jsonProperty("source", jsonArray("event-source"))
)))
.build())
.build())
.build())
.build());
}
}
resources:
example:
type: aws:pipes:Pipe
properties:
name: example-pipe
roleArn: ${exampleAwsIamRole.arn}
source: ${source.arn}
target: ${target.arn}
sourceParameters:
filterCriteria:
filters:
- pattern:
fn::toJSON:
source:
- event-source
SQS Source and Target Configuration Usage
Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersSqsQueueParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeTargetParametersArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Pipe("example", PipeArgs.builder()
.name("example-pipe")
.roleArn(exampleAwsIamRole.arn())
.source(source.arn())
.target(target.arn())
.sourceParameters(PipeSourceParametersArgs.builder()
.sqsQueueParameters(PipeSourceParametersSqsQueueParametersArgs.builder()
.batchSize(1)
.maximumBatchingWindowInSeconds(2)
.build())
.build())
.targetParameters(PipeTargetParametersArgs.builder()
.sqsQueue(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.build())
.build());
}
}
resources:
example:
type: aws:pipes:Pipe
properties:
name: example-pipe
roleArn: ${exampleAwsIamRole.arn}
source: ${source.arn}
target: ${target.arn}
sourceParameters:
sqsQueueParameters:
batchSize: 1
maximumBatchingWindowInSeconds: 2
targetParameters:
sqsQueue:
- messageDeduplicationId: example-dedupe
messageGroupId: example-group
Create Pipe Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Pipe(name: string, args: PipeArgs, opts?: CustomResourceOptions);
@overload
def Pipe(resource_name: str,
args: PipeArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Pipe(resource_name: str,
opts: Optional[ResourceOptions] = None,
role_arn: Optional[str] = None,
target: Optional[str] = None,
source: Optional[str] = None,
name_prefix: Optional[str] = None,
log_configuration: Optional[PipeLogConfigurationArgs] = None,
name: Optional[str] = None,
description: Optional[str] = None,
enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
enrichment: Optional[str] = None,
source_parameters: Optional[PipeSourceParametersArgs] = None,
tags: Optional[Mapping[str, str]] = None,
desired_state: Optional[str] = None,
target_parameters: Optional[PipeTargetParametersArgs] = None)
func NewPipe(ctx *Context, name string, args PipeArgs, opts ...ResourceOption) (*Pipe, error)
public Pipe(string name, PipeArgs args, CustomResourceOptions? opts = null)
type: aws:pipes:Pipe
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var pipeResource = new Aws.Pipes.Pipe("pipeResource", new()
{
RoleArn = "string",
Target = "string",
Source = "string",
NamePrefix = "string",
LogConfiguration = new Aws.Pipes.Inputs.PipeLogConfigurationArgs
{
Level = "string",
CloudwatchLogsLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationCloudwatchLogsLogDestinationArgs
{
LogGroupArn = "string",
},
FirehoseLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationFirehoseLogDestinationArgs
{
DeliveryStreamArn = "string",
},
S3LogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationS3LogDestinationArgs
{
BucketName = "string",
BucketOwner = "string",
OutputFormat = "string",
Prefix = "string",
},
},
Name = "string",
Description = "string",
EnrichmentParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersArgs
{
HttpParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersHttpParametersArgs
{
HeaderParameters =
{
{ "string", "string" },
},
PathParameterValues = "string",
QueryStringParameters =
{
{ "string", "string" },
},
},
InputTemplate = "string",
},
Enrichment = "string",
SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
{
ActivemqBrokerParameters = new Aws.Pipes.Inputs.PipeSourceParametersActivemqBrokerParametersArgs
{
Credentials = new Aws.Pipes.Inputs.PipeSourceParametersActivemqBrokerParametersCredentialsArgs
{
BasicAuth = "string",
},
QueueName = "string",
BatchSize = 0,
MaximumBatchingWindowInSeconds = 0,
},
DynamodbStreamParameters = new Aws.Pipes.Inputs.PipeSourceParametersDynamodbStreamParametersArgs
{
StartingPosition = "string",
BatchSize = 0,
DeadLetterConfig = new Aws.Pipes.Inputs.PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs
{
Arn = "string",
},
MaximumBatchingWindowInSeconds = 0,
MaximumRecordAgeInSeconds = 0,
MaximumRetryAttempts = 0,
OnPartialBatchItemFailure = "string",
ParallelizationFactor = 0,
},
FilterCriteria = new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaArgs
{
Filters = new[]
{
new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaFilterArgs
{
Pattern = "string",
},
},
},
KinesisStreamParameters = new Aws.Pipes.Inputs.PipeSourceParametersKinesisStreamParametersArgs
{
StartingPosition = "string",
BatchSize = 0,
DeadLetterConfig = new Aws.Pipes.Inputs.PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs
{
Arn = "string",
},
MaximumBatchingWindowInSeconds = 0,
MaximumRecordAgeInSeconds = 0,
MaximumRetryAttempts = 0,
OnPartialBatchItemFailure = "string",
ParallelizationFactor = 0,
StartingPositionTimestamp = "string",
},
ManagedStreamingKafkaParameters = new Aws.Pipes.Inputs.PipeSourceParametersManagedStreamingKafkaParametersArgs
{
TopicName = "string",
BatchSize = 0,
ConsumerGroupId = "string",
Credentials = new Aws.Pipes.Inputs.PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs
{
ClientCertificateTlsAuth = "string",
SaslScram512Auth = "string",
},
MaximumBatchingWindowInSeconds = 0,
StartingPosition = "string",
},
RabbitmqBrokerParameters = new Aws.Pipes.Inputs.PipeSourceParametersRabbitmqBrokerParametersArgs
{
Credentials = new Aws.Pipes.Inputs.PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs
{
BasicAuth = "string",
},
QueueName = "string",
BatchSize = 0,
MaximumBatchingWindowInSeconds = 0,
VirtualHost = "string",
},
SelfManagedKafkaParameters = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersArgs
{
TopicName = "string",
AdditionalBootstrapServers = new[]
{
"string",
},
BatchSize = 0,
ConsumerGroupId = "string",
Credentials = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs
{
BasicAuth = "string",
ClientCertificateTlsAuth = "string",
SaslScram256Auth = "string",
SaslScram512Auth = "string",
},
MaximumBatchingWindowInSeconds = 0,
ServerRootCaCertificate = "string",
StartingPosition = "string",
Vpc = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersVpcArgs
{
SecurityGroups = new[]
{
"string",
},
Subnets = new[]
{
"string",
},
},
},
SqsQueueParameters = new Aws.Pipes.Inputs.PipeSourceParametersSqsQueueParametersArgs
{
BatchSize = 0,
MaximumBatchingWindowInSeconds = 0,
},
},
Tags =
{
{ "string", "string" },
},
DesiredState = "string",
TargetParameters = new Aws.Pipes.Inputs.PipeTargetParametersArgs
{
BatchJobParameters = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersArgs
{
JobDefinition = "string",
JobName = "string",
ArrayProperties = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersArrayPropertiesArgs
{
Size = 0,
},
ContainerOverrides = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesArgs
{
Commands = new[]
{
"string",
},
Environments = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs
{
Name = "string",
Value = "string",
},
},
InstanceType = "string",
ResourceRequirements = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs
{
Type = "string",
Value = "string",
},
},
},
DependsOns = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersDependsOnArgs
{
JobId = "string",
Type = "string",
},
},
Parameters =
{
{ "string", "string" },
},
RetryStrategy = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersRetryStrategyArgs
{
Attempts = 0,
},
},
CloudwatchLogsParameters = new Aws.Pipes.Inputs.PipeTargetParametersCloudwatchLogsParametersArgs
{
LogStreamName = "string",
Timestamp = "string",
},
EcsTaskParameters = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersArgs
{
TaskDefinitionArn = "string",
Overrides = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesArgs
{
ContainerOverrides = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs
{
Commands = new[]
{
"string",
},
Cpu = 0,
EnvironmentFiles = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs
{
Type = "string",
Value = "string",
},
},
Environments = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs
{
Name = "string",
Value = "string",
},
},
Memory = 0,
MemoryReservation = 0,
Name = "string",
ResourceRequirements = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs
{
Type = "string",
Value = "string",
},
},
},
},
Cpu = "string",
EphemeralStorage = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs
{
SizeInGib = 0,
},
ExecutionRoleArn = "string",
InferenceAcceleratorOverrides = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs
{
DeviceName = "string",
DeviceType = "string",
},
},
Memory = "string",
TaskRoleArn = "string",
},
PlacementStrategies = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersPlacementStrategyArgs
{
Field = "string",
Type = "string",
},
},
Group = "string",
LaunchType = "string",
NetworkConfiguration = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs
{
AwsVpcConfiguration = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs
{
AssignPublicIp = "string",
SecurityGroups = new[]
{
"string",
},
Subnets = new[]
{
"string",
},
},
},
CapacityProviderStrategies = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs
{
CapacityProvider = "string",
Base = 0,
Weight = 0,
},
},
PlacementConstraints = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersPlacementConstraintArgs
{
Expression = "string",
Type = "string",
},
},
EnableExecuteCommand = false,
PlatformVersion = "string",
PropagateTags = "string",
ReferenceId = "string",
Tags =
{
{ "string", "string" },
},
TaskCount = 0,
EnableEcsManagedTags = false,
},
EventbridgeEventBusParameters = new Aws.Pipes.Inputs.PipeTargetParametersEventbridgeEventBusParametersArgs
{
DetailType = "string",
EndpointId = "string",
Resources = new[]
{
"string",
},
Source = "string",
Time = "string",
},
HttpParameters = new Aws.Pipes.Inputs.PipeTargetParametersHttpParametersArgs
{
HeaderParameters =
{
{ "string", "string" },
},
PathParameterValues = "string",
QueryStringParameters =
{
{ "string", "string" },
},
},
InputTemplate = "string",
KinesisStreamParameters = new Aws.Pipes.Inputs.PipeTargetParametersKinesisStreamParametersArgs
{
PartitionKey = "string",
},
LambdaFunctionParameters = new Aws.Pipes.Inputs.PipeTargetParametersLambdaFunctionParametersArgs
{
InvocationType = "string",
},
RedshiftDataParameters = new Aws.Pipes.Inputs.PipeTargetParametersRedshiftDataParametersArgs
{
Database = "string",
Sqls = new[]
{
"string",
},
DbUser = "string",
SecretManagerArn = "string",
StatementName = "string",
WithEvent = false,
},
SagemakerPipelineParameters = new Aws.Pipes.Inputs.PipeTargetParametersSagemakerPipelineParametersArgs
{
PipelineParameters = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs
{
Name = "string",
Value = "string",
},
},
},
SqsQueueParameters = new Aws.Pipes.Inputs.PipeTargetParametersSqsQueueParametersArgs
{
MessageDeduplicationId = "string",
MessageGroupId = "string",
},
StepFunctionStateMachineParameters = new Aws.Pipes.Inputs.PipeTargetParametersStepFunctionStateMachineParametersArgs
{
InvocationType = "string",
},
},
});
example, err := pipes.NewPipe(ctx, "pipeResource", &pipes.PipeArgs{
RoleArn: pulumi.String("string"),
Target: pulumi.String("string"),
Source: pulumi.String("string"),
NamePrefix: pulumi.String("string"),
LogConfiguration: &pipes.PipeLogConfigurationArgs{
Level: pulumi.String("string"),
CloudwatchLogsLogDestination: &pipes.PipeLogConfigurationCloudwatchLogsLogDestinationArgs{
LogGroupArn: pulumi.String("string"),
},
FirehoseLogDestination: &pipes.PipeLogConfigurationFirehoseLogDestinationArgs{
DeliveryStreamArn: pulumi.String("string"),
},
S3LogDestination: &pipes.PipeLogConfigurationS3LogDestinationArgs{
BucketName: pulumi.String("string"),
BucketOwner: pulumi.String("string"),
OutputFormat: pulumi.String("string"),
Prefix: pulumi.String("string"),
},
},
Name: pulumi.String("string"),
Description: pulumi.String("string"),
EnrichmentParameters: &pipes.PipeEnrichmentParametersArgs{
HttpParameters: &pipes.PipeEnrichmentParametersHttpParametersArgs{
HeaderParameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
PathParameterValues: pulumi.String("string"),
QueryStringParameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
},
InputTemplate: pulumi.String("string"),
},
Enrichment: pulumi.String("string"),
SourceParameters: &pipes.PipeSourceParametersArgs{
ActivemqBrokerParameters: &pipes.PipeSourceParametersActivemqBrokerParametersArgs{
Credentials: &pipes.PipeSourceParametersActivemqBrokerParametersCredentialsArgs{
BasicAuth: pulumi.String("string"),
},
QueueName: pulumi.String("string"),
BatchSize: pulumi.Int(0),
MaximumBatchingWindowInSeconds: pulumi.Int(0),
},
DynamodbStreamParameters: &pipes.PipeSourceParametersDynamodbStreamParametersArgs{
StartingPosition: pulumi.String("string"),
BatchSize: pulumi.Int(0),
DeadLetterConfig: &pipes.PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs{
Arn: pulumi.String("string"),
},
MaximumBatchingWindowInSeconds: pulumi.Int(0),
MaximumRecordAgeInSeconds: pulumi.Int(0),
MaximumRetryAttempts: pulumi.Int(0),
OnPartialBatchItemFailure: pulumi.String("string"),
ParallelizationFactor: pulumi.Int(0),
},
FilterCriteria: &pipes.PipeSourceParametersFilterCriteriaArgs{
Filters: pipes.PipeSourceParametersFilterCriteriaFilterArray{
&pipes.PipeSourceParametersFilterCriteriaFilterArgs{
Pattern: pulumi.String("string"),
},
},
},
KinesisStreamParameters: &pipes.PipeSourceParametersKinesisStreamParametersArgs{
StartingPosition: pulumi.String("string"),
BatchSize: pulumi.Int(0),
DeadLetterConfig: &pipes.PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs{
Arn: pulumi.String("string"),
},
MaximumBatchingWindowInSeconds: pulumi.Int(0),
MaximumRecordAgeInSeconds: pulumi.Int(0),
MaximumRetryAttempts: pulumi.Int(0),
OnPartialBatchItemFailure: pulumi.String("string"),
ParallelizationFactor: pulumi.Int(0),
StartingPositionTimestamp: pulumi.String("string"),
},
ManagedStreamingKafkaParameters: &pipes.PipeSourceParametersManagedStreamingKafkaParametersArgs{
TopicName: pulumi.String("string"),
BatchSize: pulumi.Int(0),
ConsumerGroupId: pulumi.String("string"),
Credentials: &pipes.PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs{
ClientCertificateTlsAuth: pulumi.String("string"),
SaslScram512Auth: pulumi.String("string"),
},
MaximumBatchingWindowInSeconds: pulumi.Int(0),
StartingPosition: pulumi.String("string"),
},
RabbitmqBrokerParameters: &pipes.PipeSourceParametersRabbitmqBrokerParametersArgs{
Credentials: &pipes.PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs{
BasicAuth: pulumi.String("string"),
},
QueueName: pulumi.String("string"),
BatchSize: pulumi.Int(0),
MaximumBatchingWindowInSeconds: pulumi.Int(0),
VirtualHost: pulumi.String("string"),
},
SelfManagedKafkaParameters: &pipes.PipeSourceParametersSelfManagedKafkaParametersArgs{
TopicName: pulumi.String("string"),
AdditionalBootstrapServers: pulumi.StringArray{
pulumi.String("string"),
},
BatchSize: pulumi.Int(0),
ConsumerGroupId: pulumi.String("string"),
Credentials: &pipes.PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs{
BasicAuth: pulumi.String("string"),
ClientCertificateTlsAuth: pulumi.String("string"),
SaslScram256Auth: pulumi.String("string"),
SaslScram512Auth: pulumi.String("string"),
},
MaximumBatchingWindowInSeconds: pulumi.Int(0),
ServerRootCaCertificate: pulumi.String("string"),
StartingPosition: pulumi.String("string"),
Vpc: &pipes.PipeSourceParametersSelfManagedKafkaParametersVpcArgs{
SecurityGroups: pulumi.StringArray{
pulumi.String("string"),
},
Subnets: pulumi.StringArray{
pulumi.String("string"),
},
},
},
SqsQueueParameters: &pipes.PipeSourceParametersSqsQueueParametersArgs{
BatchSize: pulumi.Int(0),
MaximumBatchingWindowInSeconds: pulumi.Int(0),
},
},
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
DesiredState: pulumi.String("string"),
TargetParameters: &pipes.PipeTargetParametersArgs{
BatchJobParameters: &pipes.PipeTargetParametersBatchJobParametersArgs{
JobDefinition: pulumi.String("string"),
JobName: pulumi.String("string"),
ArrayProperties: &pipes.PipeTargetParametersBatchJobParametersArrayPropertiesArgs{
Size: pulumi.Int(0),
},
ContainerOverrides: &pipes.PipeTargetParametersBatchJobParametersContainerOverridesArgs{
Commands: pulumi.StringArray{
pulumi.String("string"),
},
Environments: pipes.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArray{
&pipes.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs{
Name: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
InstanceType: pulumi.String("string"),
ResourceRequirements: pipes.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArray{
&pipes.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs{
Type: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
},
DependsOns: pipes.PipeTargetParametersBatchJobParametersDependsOnArray{
&pipes.PipeTargetParametersBatchJobParametersDependsOnArgs{
JobId: pulumi.String("string"),
Type: pulumi.String("string"),
},
},
Parameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
RetryStrategy: &pipes.PipeTargetParametersBatchJobParametersRetryStrategyArgs{
Attempts: pulumi.Int(0),
},
},
CloudwatchLogsParameters: &pipes.PipeTargetParametersCloudwatchLogsParametersArgs{
LogStreamName: pulumi.String("string"),
Timestamp: pulumi.String("string"),
},
EcsTaskParameters: &pipes.PipeTargetParametersEcsTaskParametersArgs{
TaskDefinitionArn: pulumi.String("string"),
Overrides: &pipes.PipeTargetParametersEcsTaskParametersOverridesArgs{
ContainerOverrides: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArray{
&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs{
Commands: pulumi.StringArray{
pulumi.String("string"),
},
Cpu: pulumi.Int(0),
EnvironmentFiles: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArray{
&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs{
Type: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
Environments: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArray{
&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs{
Name: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
Memory: pulumi.Int(0),
MemoryReservation: pulumi.Int(0),
Name: pulumi.String("string"),
ResourceRequirements: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArray{
&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs{
Type: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
},
},
Cpu: pulumi.String("string"),
EphemeralStorage: &pipes.PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs{
SizeInGib: pulumi.Int(0),
},
ExecutionRoleArn: pulumi.String("string"),
InferenceAcceleratorOverrides: pipes.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArray{
&pipes.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs{
DeviceName: pulumi.String("string"),
DeviceType: pulumi.String("string"),
},
},
Memory: pulumi.String("string"),
TaskRoleArn: pulumi.String("string"),
},
PlacementStrategies: pipes.PipeTargetParametersEcsTaskParametersPlacementStrategyArray{
&pipes.PipeTargetParametersEcsTaskParametersPlacementStrategyArgs{
Field: pulumi.String("string"),
Type: pulumi.String("string"),
},
},
Group: pulumi.String("string"),
LaunchType: pulumi.String("string"),
NetworkConfiguration: &pipes.PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs{
AwsVpcConfiguration: &pipes.PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs{
AssignPublicIp: pulumi.String("string"),
SecurityGroups: pulumi.StringArray{
pulumi.String("string"),
},
Subnets: pulumi.StringArray{
pulumi.String("string"),
},
},
},
CapacityProviderStrategies: pipes.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArray{
&pipes.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs{
CapacityProvider: pulumi.String("string"),
Base: pulumi.Int(0),
Weight: pulumi.Int(0),
},
},
PlacementConstraints: pipes.PipeTargetParametersEcsTaskParametersPlacementConstraintArray{
&pipes.PipeTargetParametersEcsTaskParametersPlacementConstraintArgs{
Expression: pulumi.String("string"),
Type: pulumi.String("string"),
},
},
EnableExecuteCommand: pulumi.Bool(false),
PlatformVersion: pulumi.String("string"),
PropagateTags: pulumi.String("string"),
ReferenceId: pulumi.String("string"),
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
TaskCount: pulumi.Int(0),
EnableEcsManagedTags: pulumi.Bool(false),
},
EventbridgeEventBusParameters: &pipes.PipeTargetParametersEventbridgeEventBusParametersArgs{
DetailType: pulumi.String("string"),
EndpointId: pulumi.String("string"),
Resources: pulumi.StringArray{
pulumi.String("string"),
},
Source: pulumi.String("string"),
Time: pulumi.String("string"),
},
HttpParameters: &pipes.PipeTargetParametersHttpParametersArgs{
HeaderParameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
PathParameterValues: pulumi.String("string"),
QueryStringParameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
},
InputTemplate: pulumi.String("string"),
KinesisStreamParameters: &pipes.PipeTargetParametersKinesisStreamParametersArgs{
PartitionKey: pulumi.String("string"),
},
LambdaFunctionParameters: &pipes.PipeTargetParametersLambdaFunctionParametersArgs{
InvocationType: pulumi.String("string"),
},
RedshiftDataParameters: &pipes.PipeTargetParametersRedshiftDataParametersArgs{
Database: pulumi.String("string"),
Sqls: pulumi.StringArray{
pulumi.String("string"),
},
DbUser: pulumi.String("string"),
SecretManagerArn: pulumi.String("string"),
StatementName: pulumi.String("string"),
WithEvent: pulumi.Bool(false),
},
SagemakerPipelineParameters: &pipes.PipeTargetParametersSagemakerPipelineParametersArgs{
PipelineParameters: pipes.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArray{
&pipes.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs{
Name: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
},
SqsQueueParameters: &pipes.PipeTargetParametersSqsQueueParametersArgs{
MessageDeduplicationId: pulumi.String("string"),
MessageGroupId: pulumi.String("string"),
},
StepFunctionStateMachineParameters: &pipes.PipeTargetParametersStepFunctionStateMachineParametersArgs{
InvocationType: pulumi.String("string"),
},
},
})
var pipeResource = new Pipe("pipeResource", PipeArgs.builder()
.roleArn("string")
.target("string")
.source("string")
.namePrefix("string")
.logConfiguration(PipeLogConfigurationArgs.builder()
.level("string")
.cloudwatchLogsLogDestination(PipeLogConfigurationCloudwatchLogsLogDestinationArgs.builder()
.logGroupArn("string")
.build())
.firehoseLogDestination(PipeLogConfigurationFirehoseLogDestinationArgs.builder()
.deliveryStreamArn("string")
.build())
.s3LogDestination(PipeLogConfigurationS3LogDestinationArgs.builder()
.bucketName("string")
.bucketOwner("string")
.outputFormat("string")
.prefix("string")
.build())
.build())
.name("string")
.description("string")
.enrichmentParameters(PipeEnrichmentParametersArgs.builder()
.httpParameters(PipeEnrichmentParametersHttpParametersArgs.builder()
.headerParameters(Map.of("string", "string"))
.pathParameterValues("string")
.queryStringParameters(Map.of("string", "string"))
.build())
.inputTemplate("string")
.build())
.enrichment("string")
.sourceParameters(PipeSourceParametersArgs.builder()
.activemqBrokerParameters(PipeSourceParametersActivemqBrokerParametersArgs.builder()
.credentials(PipeSourceParametersActivemqBrokerParametersCredentialsArgs.builder()
.basicAuth("string")
.build())
.queueName("string")
.batchSize(0)
.maximumBatchingWindowInSeconds(0)
.build())
.dynamodbStreamParameters(PipeSourceParametersDynamodbStreamParametersArgs.builder()
.startingPosition("string")
.batchSize(0)
.deadLetterConfig(PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs.builder()
.arn("string")
.build())
.maximumBatchingWindowInSeconds(0)
.maximumRecordAgeInSeconds(0)
.maximumRetryAttempts(0)
.onPartialBatchItemFailure("string")
.parallelizationFactor(0)
.build())
.filterCriteria(PipeSourceParametersFilterCriteriaArgs.builder()
.filters(PipeSourceParametersFilterCriteriaFilterArgs.builder()
.pattern("string")
.build())
.build())
.kinesisStreamParameters(PipeSourceParametersKinesisStreamParametersArgs.builder()
.startingPosition("string")
.batchSize(0)
.deadLetterConfig(PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs.builder()
.arn("string")
.build())
.maximumBatchingWindowInSeconds(0)
.maximumRecordAgeInSeconds(0)
.maximumRetryAttempts(0)
.onPartialBatchItemFailure("string")
.parallelizationFactor(0)
.startingPositionTimestamp("string")
.build())
.managedStreamingKafkaParameters(PipeSourceParametersManagedStreamingKafkaParametersArgs.builder()
.topicName("string")
.batchSize(0)
.consumerGroupId("string")
.credentials(PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs.builder()
.clientCertificateTlsAuth("string")
.saslScram512Auth("string")
.build())
.maximumBatchingWindowInSeconds(0)
.startingPosition("string")
.build())
.rabbitmqBrokerParameters(PipeSourceParametersRabbitmqBrokerParametersArgs.builder()
.credentials(PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs.builder()
.basicAuth("string")
.build())
.queueName("string")
.batchSize(0)
.maximumBatchingWindowInSeconds(0)
.virtualHost("string")
.build())
.selfManagedKafkaParameters(PipeSourceParametersSelfManagedKafkaParametersArgs.builder()
.topicName("string")
.additionalBootstrapServers("string")
.batchSize(0)
.consumerGroupId("string")
.credentials(PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs.builder()
.basicAuth("string")
.clientCertificateTlsAuth("string")
.saslScram256Auth("string")
.saslScram512Auth("string")
.build())
.maximumBatchingWindowInSeconds(0)
.serverRootCaCertificate("string")
.startingPosition("string")
.vpc(PipeSourceParametersSelfManagedKafkaParametersVpcArgs.builder()
.securityGroups("string")
.subnets("string")
.build())
.build())
.sqsQueueParameters(PipeSourceParametersSqsQueueParametersArgs.builder()
.batchSize(0)
.maximumBatchingWindowInSeconds(0)
.build())
.build())
.tags(Map.of("string", "string"))
.desiredState("string")
.targetParameters(PipeTargetParametersArgs.builder()
.batchJobParameters(PipeTargetParametersBatchJobParametersArgs.builder()
.jobDefinition("string")
.jobName("string")
.arrayProperties(PipeTargetParametersBatchJobParametersArrayPropertiesArgs.builder()
.size(0)
.build())
.containerOverrides(PipeTargetParametersBatchJobParametersContainerOverridesArgs.builder()
.commands("string")
.environments(PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs.builder()
.name("string")
.value("string")
.build())
.instanceType("string")
.resourceRequirements(PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs.builder()
.type("string")
.value("string")
.build())
.build())
.dependsOns(PipeTargetParametersBatchJobParametersDependsOnArgs.builder()
.jobId("string")
.type("string")
.build())
.parameters(Map.of("string", "string"))
.retryStrategy(PipeTargetParametersBatchJobParametersRetryStrategyArgs.builder()
.attempts(0)
.build())
.build())
.cloudwatchLogsParameters(PipeTargetParametersCloudwatchLogsParametersArgs.builder()
.logStreamName("string")
.timestamp("string")
.build())
.ecsTaskParameters(PipeTargetParametersEcsTaskParametersArgs.builder()
.taskDefinitionArn("string")
.overrides(PipeTargetParametersEcsTaskParametersOverridesArgs.builder()
.containerOverrides(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs.builder()
.commands("string")
.cpu(0)
.environmentFiles(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs.builder()
.type("string")
.value("string")
.build())
.environments(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs.builder()
.name("string")
.value("string")
.build())
.memory(0)
.memoryReservation(0)
.name("string")
.resourceRequirements(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs.builder()
.type("string")
.value("string")
.build())
.build())
.cpu("string")
.ephemeralStorage(PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs.builder()
.sizeInGib(0)
.build())
.executionRoleArn("string")
.inferenceAcceleratorOverrides(PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs.builder()
.deviceName("string")
.deviceType("string")
.build())
.memory("string")
.taskRoleArn("string")
.build())
.placementStrategies(PipeTargetParametersEcsTaskParametersPlacementStrategyArgs.builder()
.field("string")
.type("string")
.build())
.group("string")
.launchType("string")
.networkConfiguration(PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs.builder()
.awsVpcConfiguration(PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs.builder()
.assignPublicIp("string")
.securityGroups("string")
.subnets("string")
.build())
.build())
.capacityProviderStrategies(PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs.builder()
.capacityProvider("string")
.base(0)
.weight(0)
.build())
.placementConstraints(PipeTargetParametersEcsTaskParametersPlacementConstraintArgs.builder()
.expression("string")
.type("string")
.build())
.enableExecuteCommand(false)
.platformVersion("string")
.propagateTags("string")
.referenceId("string")
.tags(Map.of("string", "string"))
.taskCount(0)
.enableEcsManagedTags(false)
.build())
.eventbridgeEventBusParameters(PipeTargetParametersEventbridgeEventBusParametersArgs.builder()
.detailType("string")
.endpointId("string")
.resources("string")
.source("string")
.time("string")
.build())
.httpParameters(PipeTargetParametersHttpParametersArgs.builder()
.headerParameters(Map.of("string", "string"))
.pathParameterValues("string")
.queryStringParameters(Map.of("string", "string"))
.build())
.inputTemplate("string")
.kinesisStreamParameters(PipeTargetParametersKinesisStreamParametersArgs.builder()
.partitionKey("string")
.build())
.lambdaFunctionParameters(PipeTargetParametersLambdaFunctionParametersArgs.builder()
.invocationType("string")
.build())
.redshiftDataParameters(PipeTargetParametersRedshiftDataParametersArgs.builder()
.database("string")
.sqls("string")
.dbUser("string")
.secretManagerArn("string")
.statementName("string")
.withEvent(false)
.build())
.sagemakerPipelineParameters(PipeTargetParametersSagemakerPipelineParametersArgs.builder()
.pipelineParameters(PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs.builder()
.name("string")
.value("string")
.build())
.build())
.sqsQueueParameters(PipeTargetParametersSqsQueueParametersArgs.builder()
.messageDeduplicationId("string")
.messageGroupId("string")
.build())
.stepFunctionStateMachineParameters(PipeTargetParametersStepFunctionStateMachineParametersArgs.builder()
.invocationType("string")
.build())
.build())
.build());
pipe_resource = aws.pipes.Pipe("pipeResource",
role_arn="string",
target="string",
source="string",
name_prefix="string",
log_configuration={
"level": "string",
"cloudwatchLogsLogDestination": {
"logGroupArn": "string",
},
"firehoseLogDestination": {
"deliveryStreamArn": "string",
},
"s3LogDestination": {
"bucketName": "string",
"bucketOwner": "string",
"outputFormat": "string",
"prefix": "string",
},
},
name="string",
description="string",
enrichment_parameters={
"httpParameters": {
"headerParameters": {
"string": "string",
},
"pathParameterValues": "string",
"queryStringParameters": {
"string": "string",
},
},
"inputTemplate": "string",
},
enrichment="string",
source_parameters={
"activemqBrokerParameters": {
"credentials": {
"basicAuth": "string",
},
"queueName": "string",
"batchSize": 0,
"maximumBatchingWindowInSeconds": 0,
},
"dynamodbStreamParameters": {
"startingPosition": "string",
"batchSize": 0,
"deadLetterConfig": {
"arn": "string",
},
"maximumBatchingWindowInSeconds": 0,
"maximumRecordAgeInSeconds": 0,
"maximumRetryAttempts": 0,
"onPartialBatchItemFailure": "string",
"parallelizationFactor": 0,
},
"filterCriteria": {
"filters": [{
"pattern": "string",
}],
},
"kinesisStreamParameters": {
"startingPosition": "string",
"batchSize": 0,
"deadLetterConfig": {
"arn": "string",
},
"maximumBatchingWindowInSeconds": 0,
"maximumRecordAgeInSeconds": 0,
"maximumRetryAttempts": 0,
"onPartialBatchItemFailure": "string",
"parallelizationFactor": 0,
"startingPositionTimestamp": "string",
},
"managedStreamingKafkaParameters": {
"topicName": "string",
"batchSize": 0,
"consumerGroupId": "string",
"credentials": {
"clientCertificateTlsAuth": "string",
"saslScram512Auth": "string",
},
"maximumBatchingWindowInSeconds": 0,
"startingPosition": "string",
},
"rabbitmqBrokerParameters": {
"credentials": {
"basicAuth": "string",
},
"queueName": "string",
"batchSize": 0,
"maximumBatchingWindowInSeconds": 0,
"virtualHost": "string",
},
"selfManagedKafkaParameters": {
"topicName": "string",
"additionalBootstrapServers": ["string"],
"batchSize": 0,
"consumerGroupId": "string",
"credentials": {
"basicAuth": "string",
"clientCertificateTlsAuth": "string",
"saslScram256Auth": "string",
"saslScram512Auth": "string",
},
"maximumBatchingWindowInSeconds": 0,
"serverRootCaCertificate": "string",
"startingPosition": "string",
"vpc": {
"securityGroups": ["string"],
"subnets": ["string"],
},
},
"sqsQueueParameters": {
"batchSize": 0,
"maximumBatchingWindowInSeconds": 0,
},
},
tags={
"string": "string",
},
desired_state="string",
target_parameters={
"batchJobParameters": {
"jobDefinition": "string",
"jobName": "string",
"arrayProperties": {
"size": 0,
},
"containerOverrides": {
"commands": ["string"],
"environments": [{
"name": "string",
"value": "string",
}],
"instanceType": "string",
"resourceRequirements": [{
"type": "string",
"value": "string",
}],
},
"dependsOns": [{
"jobId": "string",
"type": "string",
}],
"parameters": {
"string": "string",
},
"retryStrategy": {
"attempts": 0,
},
},
"cloudwatchLogsParameters": {
"logStreamName": "string",
"timestamp": "string",
},
"ecsTaskParameters": {
"taskDefinitionArn": "string",
"overrides": {
"containerOverrides": [{
"commands": ["string"],
"cpu": 0,
"environmentFiles": [{
"type": "string",
"value": "string",
}],
"environments": [{
"name": "string",
"value": "string",
}],
"memory": 0,
"memoryReservation": 0,
"name": "string",
"resourceRequirements": [{
"type": "string",
"value": "string",
}],
}],
"cpu": "string",
"ephemeralStorage": {
"sizeInGib": 0,
},
"executionRoleArn": "string",
"inferenceAcceleratorOverrides": [{
"deviceName": "string",
"deviceType": "string",
}],
"memory": "string",
"taskRoleArn": "string",
},
"placementStrategies": [{
"field": "string",
"type": "string",
}],
"group": "string",
"launchType": "string",
"networkConfiguration": {
"awsVpcConfiguration": {
"assignPublicIp": "string",
"securityGroups": ["string"],
"subnets": ["string"],
},
},
"capacityProviderStrategies": [{
"capacityProvider": "string",
"base": 0,
"weight": 0,
}],
"placementConstraints": [{
"expression": "string",
"type": "string",
}],
"enableExecuteCommand": False,
"platformVersion": "string",
"propagateTags": "string",
"referenceId": "string",
"tags": {
"string": "string",
},
"taskCount": 0,
"enableEcsManagedTags": False,
},
"eventbridgeEventBusParameters": {
"detailType": "string",
"endpointId": "string",
"resources": ["string"],
"source": "string",
"time": "string",
},
"httpParameters": {
"headerParameters": {
"string": "string",
},
"pathParameterValues": "string",
"queryStringParameters": {
"string": "string",
},
},
"inputTemplate": "string",
"kinesisStreamParameters": {
"partitionKey": "string",
},
"lambdaFunctionParameters": {
"invocationType": "string",
},
"redshiftDataParameters": {
"database": "string",
"sqls": ["string"],
"dbUser": "string",
"secretManagerArn": "string",
"statementName": "string",
"withEvent": False,
},
"sagemakerPipelineParameters": {
"pipelineParameters": [{
"name": "string",
"value": "string",
}],
},
"sqsQueueParameters": {
"messageDeduplicationId": "string",
"messageGroupId": "string",
},
"stepFunctionStateMachineParameters": {
"invocationType": "string",
},
})
const pipeResource = new aws.pipes.Pipe("pipeResource", {
roleArn: "string",
target: "string",
source: "string",
namePrefix: "string",
logConfiguration: {
level: "string",
cloudwatchLogsLogDestination: {
logGroupArn: "string",
},
firehoseLogDestination: {
deliveryStreamArn: "string",
},
s3LogDestination: {
bucketName: "string",
bucketOwner: "string",
outputFormat: "string",
prefix: "string",
},
},
name: "string",
description: "string",
enrichmentParameters: {
httpParameters: {
headerParameters: {
string: "string",
},
pathParameterValues: "string",
queryStringParameters: {
string: "string",
},
},
inputTemplate: "string",
},
enrichment: "string",
sourceParameters: {
activemqBrokerParameters: {
credentials: {
basicAuth: "string",
},
queueName: "string",
batchSize: 0,
maximumBatchingWindowInSeconds: 0,
},
dynamodbStreamParameters: {
startingPosition: "string",
batchSize: 0,
deadLetterConfig: {
arn: "string",
},
maximumBatchingWindowInSeconds: 0,
maximumRecordAgeInSeconds: 0,
maximumRetryAttempts: 0,
onPartialBatchItemFailure: "string",
parallelizationFactor: 0,
},
filterCriteria: {
filters: [{
pattern: "string",
}],
},
kinesisStreamParameters: {
startingPosition: "string",
batchSize: 0,
deadLetterConfig: {
arn: "string",
},
maximumBatchingWindowInSeconds: 0,
maximumRecordAgeInSeconds: 0,
maximumRetryAttempts: 0,
onPartialBatchItemFailure: "string",
parallelizationFactor: 0,
startingPositionTimestamp: "string",
},
managedStreamingKafkaParameters: {
topicName: "string",
batchSize: 0,
consumerGroupId: "string",
credentials: {
clientCertificateTlsAuth: "string",
saslScram512Auth: "string",
},
maximumBatchingWindowInSeconds: 0,
startingPosition: "string",
},
rabbitmqBrokerParameters: {
credentials: {
basicAuth: "string",
},
queueName: "string",
batchSize: 0,
maximumBatchingWindowInSeconds: 0,
virtualHost: "string",
},
selfManagedKafkaParameters: {
topicName: "string",
additionalBootstrapServers: ["string"],
batchSize: 0,
consumerGroupId: "string",
credentials: {
basicAuth: "string",
clientCertificateTlsAuth: "string",
saslScram256Auth: "string",
saslScram512Auth: "string",
},
maximumBatchingWindowInSeconds: 0,
serverRootCaCertificate: "string",
startingPosition: "string",
vpc: {
securityGroups: ["string"],
subnets: ["string"],
},
},
sqsQueueParameters: {
batchSize: 0,
maximumBatchingWindowInSeconds: 0,
},
},
tags: {
string: "string",
},
desiredState: "string",
targetParameters: {
batchJobParameters: {
jobDefinition: "string",
jobName: "string",
arrayProperties: {
size: 0,
},
containerOverrides: {
commands: ["string"],
environments: [{
name: "string",
value: "string",
}],
instanceType: "string",
resourceRequirements: [{
type: "string",
value: "string",
}],
},
dependsOns: [{
jobId: "string",
type: "string",
}],
parameters: {
string: "string",
},
retryStrategy: {
attempts: 0,
},
},
cloudwatchLogsParameters: {
logStreamName: "string",
timestamp: "string",
},
ecsTaskParameters: {
taskDefinitionArn: "string",
overrides: {
containerOverrides: [{
commands: ["string"],
cpu: 0,
environmentFiles: [{
type: "string",
value: "string",
}],
environments: [{
name: "string",
value: "string",
}],
memory: 0,
memoryReservation: 0,
name: "string",
resourceRequirements: [{
type: "string",
value: "string",
}],
}],
cpu: "string",
ephemeralStorage: {
sizeInGib: 0,
},
executionRoleArn: "string",
inferenceAcceleratorOverrides: [{
deviceName: "string",
deviceType: "string",
}],
memory: "string",
taskRoleArn: "string",
},
placementStrategies: [{
field: "string",
type: "string",
}],
group: "string",
launchType: "string",
networkConfiguration: {
awsVpcConfiguration: {
assignPublicIp: "string",
securityGroups: ["string"],
subnets: ["string"],
},
},
capacityProviderStrategies: [{
capacityProvider: "string",
base: 0,
weight: 0,
}],
placementConstraints: [{
expression: "string",
type: "string",
}],
enableExecuteCommand: false,
platformVersion: "string",
propagateTags: "string",
referenceId: "string",
tags: {
string: "string",
},
taskCount: 0,
enableEcsManagedTags: false,
},
eventbridgeEventBusParameters: {
detailType: "string",
endpointId: "string",
resources: ["string"],
source: "string",
time: "string",
},
httpParameters: {
headerParameters: {
string: "string",
},
pathParameterValues: "string",
queryStringParameters: {
string: "string",
},
},
inputTemplate: "string",
kinesisStreamParameters: {
partitionKey: "string",
},
lambdaFunctionParameters: {
invocationType: "string",
},
redshiftDataParameters: {
database: "string",
sqls: ["string"],
dbUser: "string",
secretManagerArn: "string",
statementName: "string",
withEvent: false,
},
sagemakerPipelineParameters: {
pipelineParameters: [{
name: "string",
value: "string",
}],
},
sqsQueueParameters: {
messageDeduplicationId: "string",
messageGroupId: "string",
},
stepFunctionStateMachineParameters: {
invocationType: "string",
},
},
});
type: aws:pipes:Pipe
properties:
description: string
desiredState: string
enrichment: string
enrichmentParameters:
httpParameters:
headerParameters:
string: string
pathParameterValues: string
queryStringParameters:
string: string
inputTemplate: string
logConfiguration:
cloudwatchLogsLogDestination:
logGroupArn: string
firehoseLogDestination:
deliveryStreamArn: string
level: string
s3LogDestination:
bucketName: string
bucketOwner: string
outputFormat: string
prefix: string
name: string
namePrefix: string
roleArn: string
source: string
sourceParameters:
activemqBrokerParameters:
batchSize: 0
credentials:
basicAuth: string
maximumBatchingWindowInSeconds: 0
queueName: string
dynamodbStreamParameters:
batchSize: 0
deadLetterConfig:
arn: string
maximumBatchingWindowInSeconds: 0
maximumRecordAgeInSeconds: 0
maximumRetryAttempts: 0
onPartialBatchItemFailure: string
parallelizationFactor: 0
startingPosition: string
filterCriteria:
filters:
- pattern: string
kinesisStreamParameters:
batchSize: 0
deadLetterConfig:
arn: string
maximumBatchingWindowInSeconds: 0
maximumRecordAgeInSeconds: 0
maximumRetryAttempts: 0
onPartialBatchItemFailure: string
parallelizationFactor: 0
startingPosition: string
startingPositionTimestamp: string
managedStreamingKafkaParameters:
batchSize: 0
consumerGroupId: string
credentials:
clientCertificateTlsAuth: string
saslScram512Auth: string
maximumBatchingWindowInSeconds: 0
startingPosition: string
topicName: string
rabbitmqBrokerParameters:
batchSize: 0
credentials:
basicAuth: string
maximumBatchingWindowInSeconds: 0
queueName: string
virtualHost: string
selfManagedKafkaParameters:
additionalBootstrapServers:
- string
batchSize: 0
consumerGroupId: string
credentials:
basicAuth: string
clientCertificateTlsAuth: string
saslScram256Auth: string
saslScram512Auth: string
maximumBatchingWindowInSeconds: 0
serverRootCaCertificate: string
startingPosition: string
topicName: string
vpc:
securityGroups:
- string
subnets:
- string
sqsQueueParameters:
batchSize: 0
maximumBatchingWindowInSeconds: 0
tags:
string: string
target: string
targetParameters:
batchJobParameters:
arrayProperties:
size: 0
containerOverrides:
commands:
- string
environments:
- name: string
value: string
instanceType: string
resourceRequirements:
- type: string
value: string
dependsOns:
- jobId: string
type: string
jobDefinition: string
jobName: string
parameters:
string: string
retryStrategy:
attempts: 0
cloudwatchLogsParameters:
logStreamName: string
timestamp: string
ecsTaskParameters:
capacityProviderStrategies:
- base: 0
capacityProvider: string
weight: 0
enableEcsManagedTags: false
enableExecuteCommand: false
group: string
launchType: string
networkConfiguration:
awsVpcConfiguration:
assignPublicIp: string
securityGroups:
- string
subnets:
- string
overrides:
containerOverrides:
- commands:
- string
cpu: 0
environmentFiles:
- type: string
value: string
environments:
- name: string
value: string
memory: 0
memoryReservation: 0
name: string
resourceRequirements:
- type: string
value: string
cpu: string
ephemeralStorage:
sizeInGib: 0
executionRoleArn: string
inferenceAcceleratorOverrides:
- deviceName: string
deviceType: string
memory: string
taskRoleArn: string
placementConstraints:
- expression: string
type: string
placementStrategies:
- field: string
type: string
platformVersion: string
propagateTags: string
referenceId: string
tags:
string: string
taskCount: 0
taskDefinitionArn: string
eventbridgeEventBusParameters:
detailType: string
endpointId: string
resources:
- string
source: string
time: string
httpParameters:
headerParameters:
string: string
pathParameterValues: string
queryStringParameters:
string: string
inputTemplate: string
kinesisStreamParameters:
partitionKey: string
lambdaFunctionParameters:
invocationType: string
redshiftDataParameters:
database: string
dbUser: string
secretManagerArn: string
sqls:
- string
statementName: string
withEvent: false
sagemakerPipelineParameters:
pipelineParameters:
- name: string
value: string
sqsQueueParameters:
messageDeduplicationId: string
messageGroupId: string
stepFunctionStateMachineParameters:
invocationType: string
Pipe Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Pipe resource accepts the following input properties:
- Role
Arn string - ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- Description string
- A description of the pipe. At most 512 characters.
- Desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- Enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- Log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - Source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- Role
Arn string - ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- Description string
- A description of the pipe. At most 512 characters.
- Desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- Enrichment
Parameters PipeEnrichment Parameters Args - Parameters to configure enrichment for your pipe. Detailed below.
- Log
Configuration PipeLog Configuration Args - Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - Source
Parameters PipeSource Parameters Args - Parameters to configure a source for the pipe. Detailed below.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Target
Parameters PipeTarget Parameters Args - Parameters to configure a target for your pipe. Detailed below.
- role
Arn String - ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target String
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- description String
- A description of the pipe. At most 512 characters.
- desired
State String - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix String - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- role
Arn string - ARN of the role that allows the pipe to send data to the target.
- source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- description string
- A description of the pipe. At most 512 characters.
- desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- role_
arn str - ARN of the role that allows the pipe to send data to the target.
- source str
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target str
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- description str
- A description of the pipe. At most 512 characters.
- desired_
state str - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment str
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment_
parameters PipeEnrichment Parameters Args - Parameters to configure enrichment for your pipe. Detailed below.
- log_
configuration PipeLog Configuration Args - Logging configuration settings for the pipe. Detailed below.
- name str
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name_
prefix str - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - source_
parameters PipeSource Parameters Args - Parameters to configure a source for the pipe. Detailed below.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - target_
parameters PipeTarget Parameters Args - Parameters to configure a target for your pipe. Detailed below.
- role
Arn String - ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target String
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- description String
- A description of the pipe. At most 512 characters.
- desired
State String - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters Property Map - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration Property Map - Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix String - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - source
Parameters Property Map - Parameters to configure a source for the pipe. Detailed below.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - target
Parameters Property Map - Parameters to configure a target for your pipe. Detailed below.
Outputs
All input properties are implicitly available as output properties. Additionally, the Pipe resource produces the following output properties:
Look up Existing Pipe Resource
Get an existing Pipe resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: PipeState, opts?: CustomResourceOptions): Pipe
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
arn: Optional[str] = None,
description: Optional[str] = None,
desired_state: Optional[str] = None,
enrichment: Optional[str] = None,
enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
log_configuration: Optional[PipeLogConfigurationArgs] = None,
name: Optional[str] = None,
name_prefix: Optional[str] = None,
role_arn: Optional[str] = None,
source: Optional[str] = None,
source_parameters: Optional[PipeSourceParametersArgs] = None,
tags: Optional[Mapping[str, str]] = None,
tags_all: Optional[Mapping[str, str]] = None,
target: Optional[str] = None,
target_parameters: Optional[PipeTargetParametersArgs] = None) -> Pipe
func GetPipe(ctx *Context, name string, id IDInput, state *PipeState, opts ...ResourceOption) (*Pipe, error)
public static Pipe Get(string name, Input<string> id, PipeState? state, CustomResourceOptions? opts = null)
public static Pipe get(String name, Output<String> id, PipeState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Arn string
- ARN of this pipe.
- Description string
- A description of the pipe. At most 512 characters.
- Desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- Enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- Log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - Role
Arn string - ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Dictionary<string, string>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - Target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- Target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- Arn string
- ARN of this pipe.
- Description string
- A description of the pipe. At most 512 characters.
- Desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- Enrichment
Parameters PipeEnrichment Parameters Args - Parameters to configure enrichment for your pipe. Detailed below.
- Log
Configuration PipeLog Configuration Args - Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - Role
Arn string - ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Source
Parameters PipeSource Parameters Args - Parameters to configure a source for the pipe. Detailed below.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - map[string]string
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - Target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- Target
Parameters PipeTarget Parameters Args - Parameters to configure a target for your pipe. Detailed below.
- arn String
- ARN of this pipe.
- description String
- A description of the pipe. At most 512 characters.
- desired
State String - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix String - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - role
Arn String - ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Map<String,String>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - target String
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- arn string
- ARN of this pipe.
- description string
- A description of the pipe. At most 512 characters.
- desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - role
Arn string - ARN of the role that allows the pipe to send data to the target.
- source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - {[key: string]: string}
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- arn str
- ARN of this pipe.
- description str
- A description of the pipe. At most 512 characters.
- desired_
state str - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment str
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment_
parameters PipeEnrichment Parameters Args - Parameters to configure enrichment for your pipe. Detailed below.
- log_
configuration PipeLog Configuration Args - Logging configuration settings for the pipe. Detailed below.
- name str
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name_
prefix str - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - role_
arn str - ARN of the role that allows the pipe to send data to the target.
- source str
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- source_
parameters PipeSource Parameters Args - Parameters to configure a source for the pipe. Detailed below.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Mapping[str, str]
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - target str
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- target_
parameters PipeTarget Parameters Args - Parameters to configure a target for your pipe. Detailed below.
- arn String
- ARN of this pipe.
- description String
- A description of the pipe. At most 512 characters.
- desired
State String - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters Property Map - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration Property Map - Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix String - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - role
Arn String - ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- source
Parameters Property Map - Parameters to configure a source for the pipe. Detailed below.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Map<String>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - target String
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- target
Parameters Property Map - Parameters to configure a target for your pipe. Detailed below.
Supporting Types
PipeEnrichmentParameters, PipeEnrichmentParametersArgs
- Http
Parameters PipeEnrichment Parameters Http Parameters - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- Input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- Http
Parameters PipeEnrichment Parameters Http Parameters - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- Input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- http
Parameters PipeEnrichment Parameters Http Parameters - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- input
Template String - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- http
Parameters PipeEnrichment Parameters Http Parameters - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- http_
parameters PipeEnrichment Parameters Http Parameters - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- input_
template str - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- http
Parameters Property Map - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- input
Template String - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
PipeEnrichmentParametersHttpParameters, PipeEnrichmentParametersHttpParametersArgs
- Header
Parameters Dictionary<string, string> - Path
Parameter stringValues - Query
String Dictionary<string, string>Parameters
- Header
Parameters map[string]string - Path
Parameter stringValues - Query
String map[string]stringParameters
- header
Parameters Map<String,String> - path
Parameter StringValues - query
String Map<String,String>Parameters
- header
Parameters {[key: string]: string} - path
Parameter stringValues - query
String {[key: string]: string}Parameters
- header_
parameters Mapping[str, str] - path_
parameter_ strvalues - query_
string_ Mapping[str, str]parameters
- header
Parameters Map<String> - path
Parameter StringValues - query
String Map<String>Parameters
PipeLogConfiguration, PipeLogConfigurationArgs
- Level string
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - Cloudwatch
Logs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- Firehose
Log PipeDestination Log Configuration Firehose Log Destination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- S3Log
Destination PipeLog Configuration S3Log Destination - Amazon S3 logging configuration settings for the pipe. Detailed below.
- Level string
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - Cloudwatch
Logs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- Firehose
Log PipeDestination Log Configuration Firehose Log Destination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- S3Log
Destination PipeLog Configuration S3Log Destination - Amazon S3 logging configuration settings for the pipe. Detailed below.
- level String
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - cloudwatch
Logs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehose
Log PipeDestination Log Configuration Firehose Log Destination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- s3Log
Destination PipeLog Configuration S3Log Destination - Amazon S3 logging configuration settings for the pipe. Detailed below.
- level string
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - cloudwatch
Logs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehose
Log PipeDestination Log Configuration Firehose Log Destination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- s3Log
Destination PipeLog Configuration S3Log Destination - Amazon S3 logging configuration settings for the pipe. Detailed below.
- level str
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - cloudwatch_
logs_ Pipelog_ destination Log Configuration Cloudwatch Logs Log Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehose_
log_ Pipedestination Log Configuration Firehose Log Destination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- s3_
log_ Pipedestination Log Configuration S3Log Destination - Amazon S3 logging configuration settings for the pipe. Detailed below.
- level String
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - cloudwatch
Logs Property MapLog Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehose
Log Property MapDestination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- s3Log
Destination Property Map - Amazon S3 logging configuration settings for the pipe. Detailed below.
PipeLogConfigurationCloudwatchLogsLogDestination, PipeLogConfigurationCloudwatchLogsLogDestinationArgs
- Log
Group stringArn
- Log
Group stringArn
- log
Group StringArn
- log
Group stringArn
- log_
group_ strarn
- log
Group StringArn
PipeLogConfigurationFirehoseLogDestination, PipeLogConfigurationFirehoseLogDestinationArgs
- Delivery
Stream stringArn
- Delivery
Stream stringArn
- delivery
Stream StringArn
- delivery
Stream stringArn
- delivery
Stream StringArn
PipeLogConfigurationS3LogDestination, PipeLogConfigurationS3LogDestinationArgs
- Bucket
Name string - Bucket
Owner string - Output
Format string - Prefix string
- Bucket
Name string - Bucket
Owner string - Output
Format string - Prefix string
- bucket
Name String - bucket
Owner String - output
Format String - prefix String
- bucket
Name string - bucket
Owner string - output
Format string - prefix string
- bucket_
name str - bucket_
owner str - output_
format str - prefix str
- bucket
Name String - bucket
Owner String - output
Format String - prefix String
PipeSourceParameters, PipeSourceParametersArgs
- Activemq
Broker PipeParameters Source Parameters Activemq Broker Parameters - The parameters for using an Active MQ broker as a source. Detailed below.
- Dynamodb
Stream PipeParameters Source Parameters Dynamodb Stream Parameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- Filter
Criteria PipeSource Parameters Filter Criteria - The collection of event patterns used to filter events. Detailed below.
- Kinesis
Stream PipeParameters Source Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- Managed
Streaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- Rabbitmq
Broker PipeParameters Source Parameters Rabbitmq Broker Parameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- Self
Managed PipeKafka Parameters Source Parameters Self Managed Kafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- Sqs
Queue PipeParameters Source Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
- Activemq
Broker PipeParameters Source Parameters Activemq Broker Parameters - The parameters for using an Active MQ broker as a source. Detailed below.
- Dynamodb
Stream PipeParameters Source Parameters Dynamodb Stream Parameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- Filter
Criteria PipeSource Parameters Filter Criteria - The collection of event patterns used to filter events. Detailed below.
- Kinesis
Stream PipeParameters Source Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- Managed
Streaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- Rabbitmq
Broker PipeParameters Source Parameters Rabbitmq Broker Parameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- Self
Managed PipeKafka Parameters Source Parameters Self Managed Kafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- Sqs
Queue PipeParameters Source Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemq
Broker PipeParameters Source Parameters Activemq Broker Parameters - The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodb
Stream PipeParameters Source Parameters Dynamodb Stream Parameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- filter
Criteria PipeSource Parameters Filter Criteria - The collection of event patterns used to filter events. Detailed below.
- kinesis
Stream PipeParameters Source Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- managed
Streaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- rabbitmq
Broker PipeParameters Source Parameters Rabbitmq Broker Parameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- self
Managed PipeKafka Parameters Source Parameters Self Managed Kafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqs
Queue PipeParameters Source Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemq
Broker PipeParameters Source Parameters Activemq Broker Parameters - The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodb
Stream PipeParameters Source Parameters Dynamodb Stream Parameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- filter
Criteria PipeSource Parameters Filter Criteria - The collection of event patterns used to filter events. Detailed below.
- kinesis
Stream PipeParameters Source Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- managed
Streaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- rabbitmq
Broker PipeParameters Source Parameters Rabbitmq Broker Parameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- self
Managed PipeKafka Parameters Source Parameters Self Managed Kafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqs
Queue PipeParameters Source Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemq_
broker_ Pipeparameters Source Parameters Activemq Broker Parameters - The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodb_
stream_ Pipeparameters Source Parameters Dynamodb Stream Parameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- filter_
criteria PipeSource Parameters Filter Criteria - The collection of event patterns used to filter events. Detailed below.
- kinesis_
stream_ Pipeparameters Source Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- managed_
streaming_ Pipekafka_ parameters Source Parameters Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- rabbitmq_
broker_ Pipeparameters Source Parameters Rabbitmq Broker Parameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- self_
managed_ Pipekafka_ parameters Source Parameters Self Managed Kafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqs_
queue_ Pipeparameters Source Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemq
Broker Property MapParameters - The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodb
Stream Property MapParameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- filter
Criteria Property Map - The collection of event patterns used to filter events. Detailed below.
- kinesis
Stream Property MapParameters - The parameters for using a Kinesis stream as a source. Detailed below.
- managed
Streaming Property MapKafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- rabbitmq
Broker Property MapParameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- self
Managed Property MapKafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqs
Queue Property MapParameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
PipeSourceParametersActivemqBrokerParameters, PipeSourceParametersActivemqBrokerParametersArgs
- credentials Property Map
- queue
Name String - batch
Size Number - maximum
Batching NumberWindow In Seconds
PipeSourceParametersActivemqBrokerParametersCredentials, PipeSourceParametersActivemqBrokerParametersCredentialsArgs
- Basic
Auth string
- Basic
Auth string
- basic
Auth String
- basic
Auth string
- basic_
auth str
- basic
Auth String
PipeSourceParametersDynamodbStreamParameters, PipeSourceParametersDynamodbStreamParametersArgs
- starting
Position String - batch
Size Number - dead
Letter Property MapConfig - maximum
Batching NumberWindow In Seconds - maximum
Record NumberAge In Seconds - maximum
Retry NumberAttempts - on
Partial StringBatch Item Failure - parallelization
Factor Number
PipeSourceParametersDynamodbStreamParametersDeadLetterConfig, PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs
- Arn string
- ARN of this pipe.
- Arn string
- ARN of this pipe.
- arn String
- ARN of this pipe.
- arn string
- ARN of this pipe.
- arn str
- ARN of this pipe.
- arn String
- ARN of this pipe.
PipeSourceParametersFilterCriteria, PipeSourceParametersFilterCriteriaArgs
PipeSourceParametersFilterCriteriaFilter, PipeSourceParametersFilterCriteriaFilterArgs
- Pattern string
- Pattern string
- pattern String
- pattern string
- pattern str
- pattern String
PipeSourceParametersKinesisStreamParameters, PipeSourceParametersKinesisStreamParametersArgs
- starting
Position String - batch
Size Integer - dead
Letter PipeConfig Source Parameters Kinesis Stream Parameters Dead Letter Config - maximum
Batching IntegerWindow In Seconds - maximum
Record IntegerAge In Seconds - maximum
Retry IntegerAttempts - on
Partial StringBatch Item Failure - parallelization
Factor Integer - starting
Position StringTimestamp
- starting
Position string - batch
Size number - dead
Letter PipeConfig Source Parameters Kinesis Stream Parameters Dead Letter Config - maximum
Batching numberWindow In Seconds - maximum
Record numberAge In Seconds - maximum
Retry numberAttempts - on
Partial stringBatch Item Failure - parallelization
Factor number - starting
Position stringTimestamp
- starting_
position str - batch_
size int - dead_
letter_ Pipeconfig Source Parameters Kinesis Stream Parameters Dead Letter Config - maximum_
batching_ intwindow_ in_ seconds - maximum_
record_ intage_ in_ seconds - maximum_
retry_ intattempts - on_
partial_ strbatch_ item_ failure - parallelization_
factor int - starting_
position_ strtimestamp
- starting
Position String - batch
Size Number - dead
Letter Property MapConfig - maximum
Batching NumberWindow In Seconds - maximum
Record NumberAge In Seconds - maximum
Retry NumberAttempts - on
Partial StringBatch Item Failure - parallelization
Factor Number - starting
Position StringTimestamp
PipeSourceParametersKinesisStreamParametersDeadLetterConfig, PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs
- Arn string
- ARN of this pipe.
- Arn string
- ARN of this pipe.
- arn String
- ARN of this pipe.
- arn string
- ARN of this pipe.
- arn str
- ARN of this pipe.
- arn String
- ARN of this pipe.
PipeSourceParametersManagedStreamingKafkaParameters, PipeSourceParametersManagedStreamingKafkaParametersArgs
- topic
Name String - batch
Size Integer - consumer
Group StringId - credentials
Pipe
Source Parameters Managed Streaming Kafka Parameters Credentials - maximum
Batching IntegerWindow In Seconds - starting
Position String
- topic
Name String - batch
Size Number - consumer
Group StringId - credentials Property Map
- maximum
Batching NumberWindow In Seconds - starting
Position String
PipeSourceParametersManagedStreamingKafkaParametersCredentials, PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs
- Client
Certificate stringTls Auth - Sasl
Scram512Auth string
- Client
Certificate stringTls Auth - Sasl
Scram512Auth string
- client
Certificate StringTls Auth - sasl
Scram512Auth String
- client
Certificate stringTls Auth - sasl
Scram512Auth string
- client
Certificate StringTls Auth - sasl
Scram512Auth String
PipeSourceParametersRabbitmqBrokerParameters, PipeSourceParametersRabbitmqBrokerParametersArgs
- credentials Property Map
- queue
Name String - batch
Size Number - maximum
Batching NumberWindow In Seconds - virtual
Host String
PipeSourceParametersRabbitmqBrokerParametersCredentials, PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs
- Basic
Auth string
- Basic
Auth string
- basic
Auth String
- basic
Auth string
- basic_
auth str
- basic
Auth String
PipeSourceParametersSelfManagedKafkaParameters, PipeSourceParametersSelfManagedKafkaParametersArgs
- topic
Name String - additional
Bootstrap List<String>Servers - batch
Size Integer - consumer
Group StringId - credentials
Pipe
Source Parameters Self Managed Kafka Parameters Credentials - maximum
Batching IntegerWindow In Seconds - server
Root StringCa Certificate - starting
Position String - vpc
Pipe
Source Parameters Self Managed Kafka Parameters Vpc
- topic
Name string - additional
Bootstrap string[]Servers - batch
Size number - consumer
Group stringId - credentials
Pipe
Source Parameters Self Managed Kafka Parameters Credentials - maximum
Batching numberWindow In Seconds - server
Root stringCa Certificate - starting
Position string - vpc
Pipe
Source Parameters Self Managed Kafka Parameters Vpc
- topic_
name str - additional_
bootstrap_ Sequence[str]servers - batch_
size int - consumer_
group_ strid - credentials
Pipe
Source Parameters Self Managed Kafka Parameters Credentials - maximum_
batching_ intwindow_ in_ seconds - server_
root_ strca_ certificate - starting_
position str - vpc
Pipe
Source Parameters Self Managed Kafka Parameters Vpc
- topic
Name String - additional
Bootstrap List<String>Servers - batch
Size Number - consumer
Group StringId - credentials Property Map
- maximum
Batching NumberWindow In Seconds - server
Root StringCa Certificate - starting
Position String - vpc Property Map
PipeSourceParametersSelfManagedKafkaParametersCredentials, PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs
- Basic
Auth string - Client
Certificate stringTls Auth - Sasl
Scram256Auth string - Sasl
Scram512Auth string
- Basic
Auth string - Client
Certificate stringTls Auth - Sasl
Scram256Auth string - Sasl
Scram512Auth string
- basic
Auth String - client
Certificate StringTls Auth - sasl
Scram256Auth String - sasl
Scram512Auth String
- basic
Auth string - client
Certificate stringTls Auth - sasl
Scram256Auth string - sasl
Scram512Auth string
- basic
Auth String - client
Certificate StringTls Auth - sasl
Scram256Auth String - sasl
Scram512Auth String
PipeSourceParametersSelfManagedKafkaParametersVpc, PipeSourceParametersSelfManagedKafkaParametersVpcArgs
- Security
Groups List<string> - Subnets List<string>
- Security
Groups []string - Subnets []string
- security
Groups List<String> - subnets List<String>
- security
Groups string[] - subnets string[]
- security_
groups Sequence[str] - subnets Sequence[str]
- security
Groups List<String> - subnets List<String>
PipeSourceParametersSqsQueueParameters, PipeSourceParametersSqsQueueParametersArgs
- batch
Size Integer - maximum
Batching IntegerWindow In Seconds
- batch
Size number - maximum
Batching numberWindow In Seconds
- batch
Size Number - maximum
Batching NumberWindow In Seconds
PipeTargetParameters, PipeTargetParametersArgs
- Batch
Job PipeParameters Target Parameters Batch Job Parameters - The parameters for using an AWS Batch job as a target. Detailed below.
- Cloudwatch
Logs PipeParameters Target Parameters Cloudwatch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- Ecs
Task PipeParameters Target Parameters Ecs Task Parameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- Eventbridge
Event PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- Http
Parameters PipeTarget Parameters Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- Input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- Kinesis
Stream PipeParameters Target Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- Lambda
Function PipeParameters Target Parameters Lambda Function Parameters - The parameters for using a Lambda function as a target. Detailed below.
- Redshift
Data PipeParameters Target Parameters Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- Sagemaker
Pipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- Sqs
Queue PipeParameters Target Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- Step
Function PipeState Machine Parameters Target Parameters Step Function State Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
- Batch
Job PipeParameters Target Parameters Batch Job Parameters - The parameters for using an AWS Batch job as a target. Detailed below.
- Cloudwatch
Logs PipeParameters Target Parameters Cloudwatch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- Ecs
Task PipeParameters Target Parameters Ecs Task Parameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- Eventbridge
Event PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- Http
Parameters PipeTarget Parameters Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- Input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- Kinesis
Stream PipeParameters Target Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- Lambda
Function PipeParameters Target Parameters Lambda Function Parameters - The parameters for using a Lambda function as a target. Detailed below.
- Redshift
Data PipeParameters Target Parameters Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- Sagemaker
Pipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- Sqs
Queue PipeParameters Target Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- Step
Function PipeState Machine Parameters Target Parameters Step Function State Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
- batch
Job PipeParameters Target Parameters Batch Job Parameters - The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatch
Logs PipeParameters Target Parameters Cloudwatch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecs
Task PipeParameters Target Parameters Ecs Task Parameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridge
Event PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- http
Parameters PipeTarget Parameters Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- input
Template String - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesis
Stream PipeParameters Target Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- lambda
Function PipeParameters Target Parameters Lambda Function Parameters - The parameters for using a Lambda function as a target. Detailed below.
- redshift
Data PipeParameters Target Parameters Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemaker
Pipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- sqs
Queue PipeParameters Target Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- step
Function PipeState Machine Parameters Target Parameters Step Function State Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
- batch
Job PipeParameters Target Parameters Batch Job Parameters - The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatch
Logs PipeParameters Target Parameters Cloudwatch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecs
Task PipeParameters Target Parameters Ecs Task Parameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridge
Event PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- http
Parameters PipeTarget Parameters Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesis
Stream PipeParameters Target Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- lambda
Function PipeParameters Target Parameters Lambda Function Parameters - The parameters for using a Lambda function as a target. Detailed below.
- redshift
Data PipeParameters Target Parameters Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemaker
Pipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- sqs
Queue PipeParameters Target Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- step
Function PipeState Machine Parameters Target Parameters Step Function State Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
- batch_
job_ Pipeparameters Target Parameters Batch Job Parameters - The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatch_
logs_ Pipeparameters Target Parameters Cloudwatch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecs_
task_ Pipeparameters Target Parameters Ecs Task Parameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridge_
event_ Pipebus_ parameters Target Parameters Eventbridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- http_
parameters PipeTarget Parameters Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- input_
template str - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesis_
stream_ Pipeparameters Target Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- lambda_
function_ Pipeparameters Target Parameters Lambda Function Parameters - The parameters for using a Lambda function as a target. Detailed below.
- redshift_
data_ Pipeparameters Target Parameters Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemaker_
pipeline_ Pipeparameters Target Parameters Sagemaker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- sqs_
queue_ Pipeparameters Target Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- step_
function_ Pipestate_ machine_ parameters Target Parameters Step Function State Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
- batch
Job Property MapParameters - The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatch
Logs Property MapParameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecs
Task Property MapParameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridge
Event Property MapBus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- http
Parameters Property Map - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- input
Template String - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesis
Stream Property MapParameters - The parameters for using a Kinesis stream as a source. Detailed below.
- lambda
Function Property MapParameters - The parameters for using a Lambda function as a target. Detailed below.
- redshift
Data Property MapParameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemaker
Pipeline Property MapParameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- sqs
Queue Property MapParameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- step
Function Property MapState Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
PipeTargetParametersBatchJobParameters, PipeTargetParametersBatchJobParametersArgs
- Job
Definition string - Job
Name string - Array
Properties PipeTarget Parameters Batch Job Parameters Array Properties - Container
Overrides PipeTarget Parameters Batch Job Parameters Container Overrides - Depends
Ons List<PipeTarget Parameters Batch Job Parameters Depends On> - Parameters Dictionary<string, string>
- Retry
Strategy PipeTarget Parameters Batch Job Parameters Retry Strategy
- Job
Definition string - Job
Name string - Array
Properties PipeTarget Parameters Batch Job Parameters Array Properties - Container
Overrides PipeTarget Parameters Batch Job Parameters Container Overrides - Depends
Ons []PipeTarget Parameters Batch Job Parameters Depends On - Parameters map[string]string
- Retry
Strategy PipeTarget Parameters Batch Job Parameters Retry Strategy
- job
Definition String - job
Name String - array
Properties PipeTarget Parameters Batch Job Parameters Array Properties - container
Overrides PipeTarget Parameters Batch Job Parameters Container Overrides - depends
Ons List<PipeTarget Parameters Batch Job Parameters Depends On> - parameters Map<String,String>
- retry
Strategy PipeTarget Parameters Batch Job Parameters Retry Strategy
- job
Definition string - job
Name string - array
Properties PipeTarget Parameters Batch Job Parameters Array Properties - container
Overrides PipeTarget Parameters Batch Job Parameters Container Overrides - depends
Ons PipeTarget Parameters Batch Job Parameters Depends On[] - parameters {[key: string]: string}
- retry
Strategy PipeTarget Parameters Batch Job Parameters Retry Strategy
- job_
definition str - job_
name str - array_
properties PipeTarget Parameters Batch Job Parameters Array Properties - container_
overrides PipeTarget Parameters Batch Job Parameters Container Overrides - depends_
ons Sequence[PipeTarget Parameters Batch Job Parameters Depends On] - parameters Mapping[str, str]
- retry_
strategy PipeTarget Parameters Batch Job Parameters Retry Strategy
PipeTargetParametersBatchJobParametersArrayProperties, PipeTargetParametersBatchJobParametersArrayPropertiesArgs
- Size int
- Size int
- size Integer
- size number
- size int
- size Number
PipeTargetParametersBatchJobParametersContainerOverrides, PipeTargetParametersBatchJobParametersContainerOverridesArgs
- commands List<String>
- environments List<Property Map>
- instance
Type String - resource
Requirements List<Property Map>
PipeTargetParametersBatchJobParametersContainerOverridesEnvironment, PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs
PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirement, PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs
PipeTargetParametersBatchJobParametersDependsOn, PipeTargetParametersBatchJobParametersDependsOnArgs
PipeTargetParametersBatchJobParametersRetryStrategy, PipeTargetParametersBatchJobParametersRetryStrategyArgs
- Attempts int
- Attempts int
- attempts Integer
- attempts number
- attempts int
- attempts Number
PipeTargetParametersCloudwatchLogsParameters, PipeTargetParametersCloudwatchLogsParametersArgs
- Log
Stream stringName - Timestamp string
- Log
Stream stringName - Timestamp string
- log
Stream StringName - timestamp String
- log
Stream stringName - timestamp string
- log_
stream_ strname - timestamp str
- log
Stream StringName - timestamp String
PipeTargetParametersEcsTaskParameters, PipeTargetParametersEcsTaskParametersArgs
- Task
Definition stringArn - Capacity
Provider List<PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy> - bool
- Enable
Execute boolCommand - Group string
- Launch
Type string - Network
Configuration PipeTarget Parameters Ecs Task Parameters Network Configuration - Overrides
Pipe
Target Parameters Ecs Task Parameters Overrides - Placement
Constraints List<PipeTarget Parameters Ecs Task Parameters Placement Constraint> - Placement
Strategies List<PipeTarget Parameters Ecs Task Parameters Placement Strategy> - Platform
Version string - string
- Reference
Id string - Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Task
Count int
- Task
Definition stringArn - Capacity
Provider []PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy - bool
- Enable
Execute boolCommand - Group string
- Launch
Type string - Network
Configuration PipeTarget Parameters Ecs Task Parameters Network Configuration - Overrides
Pipe
Target Parameters Ecs Task Parameters Overrides - Placement
Constraints []PipeTarget Parameters Ecs Task Parameters Placement Constraint - Placement
Strategies []PipeTarget Parameters Ecs Task Parameters Placement Strategy - Platform
Version string - string
- Reference
Id string - map[string]string
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Task
Count int
- task
Definition StringArn - capacity
Provider List<PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy> - Boolean
- enable
Execute BooleanCommand - group String
- launch
Type String - network
Configuration PipeTarget Parameters Ecs Task Parameters Network Configuration - overrides
Pipe
Target Parameters Ecs Task Parameters Overrides - placement
Constraints List<PipeTarget Parameters Ecs Task Parameters Placement Constraint> - placement
Strategies List<PipeTarget Parameters Ecs Task Parameters Placement Strategy> - platform
Version String - String
- reference
Id String - Map<String,String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - task
Count Integer
- task
Definition stringArn - capacity
Provider PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy[] - boolean
- enable
Execute booleanCommand - group string
- launch
Type string - network
Configuration PipeTarget Parameters Ecs Task Parameters Network Configuration - overrides
Pipe
Target Parameters Ecs Task Parameters Overrides - placement
Constraints PipeTarget Parameters Ecs Task Parameters Placement Constraint[] - placement
Strategies PipeTarget Parameters Ecs Task Parameters Placement Strategy[] - platform
Version string - string
- reference
Id string - {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - task
Count number
- task_
definition_ strarn - capacity_
provider_ Sequence[Pipestrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy] - bool
- enable_
execute_ boolcommand - group str
- launch_
type str - network_
configuration PipeTarget Parameters Ecs Task Parameters Network Configuration - overrides
Pipe
Target Parameters Ecs Task Parameters Overrides - placement_
constraints Sequence[PipeTarget Parameters Ecs Task Parameters Placement Constraint] - placement_
strategies Sequence[PipeTarget Parameters Ecs Task Parameters Placement Strategy] - platform_
version str - str
- reference_
id str - Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - task_
count int
- task
Definition StringArn - capacity
Provider List<Property Map>Strategies - Boolean
- enable
Execute BooleanCommand - group String
- launch
Type String - network
Configuration Property Map - overrides Property Map
- placement
Constraints List<Property Map> - placement
Strategies List<Property Map> - platform
Version String - String
- reference
Id String - Map<String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - task
Count Number
PipeTargetParametersEcsTaskParametersCapacityProviderStrategy, PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs
- Capacity
Provider string - Base int
- Weight int
- Capacity
Provider string - Base int
- Weight int
- capacity
Provider String - base Integer
- weight Integer
- capacity
Provider string - base number
- weight number
- capacity_
provider str - base int
- weight int
- capacity
Provider String - base Number
- weight Number
PipeTargetParametersEcsTaskParametersNetworkConfiguration, PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs
PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfiguration, PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs
- Assign
Public stringIp - Security
Groups List<string> - Subnets List<string>
- Assign
Public stringIp - Security
Groups []string - Subnets []string
- assign
Public StringIp - security
Groups List<String> - subnets List<String>
- assign
Public stringIp - security
Groups string[] - subnets string[]
- assign_
public_ strip - security_
groups Sequence[str] - subnets Sequence[str]
- assign
Public StringIp - security
Groups List<String> - subnets List<String>
PipeTargetParametersEcsTaskParametersOverrides, PipeTargetParametersEcsTaskParametersOverridesArgs
- Container
Overrides List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override> - Cpu string
- Ephemeral
Storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage - Execution
Role stringArn - Inference
Accelerator List<PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override> - Memory string
- Task
Role stringArn
- Container
Overrides []PipeTarget Parameters Ecs Task Parameters Overrides Container Override - Cpu string
- Ephemeral
Storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage - Execution
Role stringArn - Inference
Accelerator []PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override - Memory string
- Task
Role stringArn
- container
Overrides List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override> - cpu String
- ephemeral
Storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage - execution
Role StringArn - inference
Accelerator List<PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override> - memory String
- task
Role StringArn
- container
Overrides PipeTarget Parameters Ecs Task Parameters Overrides Container Override[] - cpu string
- ephemeral
Storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage - execution
Role stringArn - inference
Accelerator PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override[] - memory string
- task
Role stringArn
- container_
overrides Sequence[PipeTarget Parameters Ecs Task Parameters Overrides Container Override] - cpu str
- ephemeral_
storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage - execution_
role_ strarn - inference_
accelerator_ Sequence[Pipeoverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override] - memory str
- task_
role_ strarn
PipeTargetParametersEcsTaskParametersOverridesContainerOverride, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs
- Commands List<string>
- Cpu int
- Environment
Files List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File> - Environments
List<Pipe
Target Parameters Ecs Task Parameters Overrides Container Override Environment> - Memory int
- Memory
Reservation int - Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Resource
Requirements List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement>
- Commands []string
- Cpu int
- Environment
Files []PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File - Environments
[]Pipe
Target Parameters Ecs Task Parameters Overrides Container Override Environment - Memory int
- Memory
Reservation int - Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Resource
Requirements []PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement
- commands List<String>
- cpu Integer
- environment
Files List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File> - environments
List<Pipe
Target Parameters Ecs Task Parameters Overrides Container Override Environment> - memory Integer
- memory
Reservation Integer - name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - resource
Requirements List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement>
- commands string[]
- cpu number
- environment
Files PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File[] - environments
Pipe
Target Parameters Ecs Task Parameters Overrides Container Override Environment[] - memory number
- memory
Reservation number - name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - resource
Requirements PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement[]
- commands Sequence[str]
- cpu int
- environment_
files Sequence[PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File] - environments
Sequence[Pipe
Target Parameters Ecs Task Parameters Overrides Container Override Environment] - memory int
- memory_
reservation int - name str
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - resource_
requirements Sequence[PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement]
- commands List<String>
- cpu Number
- environment
Files List<Property Map> - environments List<Property Map>
- memory Number
- memory
Reservation Number - name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - resource
Requirements List<Property Map>
PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironment, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs
PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFile, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs
PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirement, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs
PipeTargetParametersEcsTaskParametersOverridesEphemeralStorage, PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs
- Size
In intGib
- Size
In intGib
- size
In IntegerGib
- size
In numberGib
- size_
in_ intgib
- size
In NumberGib
PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverride, PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs
- Device
Name string - Device
Type string
- Device
Name string - Device
Type string
- device
Name String - device
Type String
- device
Name string - device
Type string
- device_
name str - device_
type str
- device
Name String - device
Type String
PipeTargetParametersEcsTaskParametersPlacementConstraint, PipeTargetParametersEcsTaskParametersPlacementConstraintArgs
- Expression string
- Type string
- Expression string
- Type string
- expression String
- type String
- expression string
- type string
- expression str
- type str
- expression String
- type String
PipeTargetParametersEcsTaskParametersPlacementStrategy, PipeTargetParametersEcsTaskParametersPlacementStrategyArgs
PipeTargetParametersEventbridgeEventBusParameters, PipeTargetParametersEventbridgeEventBusParametersArgs
- Detail
Type string - Endpoint
Id string - Resources List<string>
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Time string
- Detail
Type string - Endpoint
Id string - Resources []string
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Time string
- detail
Type String - endpoint
Id String - resources List<String>
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time String
- detail
Type string - endpoint
Id string - resources string[]
- source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time string
- detail_
type str - endpoint_
id str - resources Sequence[str]
- source str
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time str
- detail
Type String - endpoint
Id String - resources List<String>
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time String
PipeTargetParametersHttpParameters, PipeTargetParametersHttpParametersArgs
- Header
Parameters Dictionary<string, string> - Path
Parameter stringValues - Query
String Dictionary<string, string>Parameters
- Header
Parameters map[string]string - Path
Parameter stringValues - Query
String map[string]stringParameters
- header
Parameters Map<String,String> - path
Parameter StringValues - query
String Map<String,String>Parameters
- header
Parameters {[key: string]: string} - path
Parameter stringValues - query
String {[key: string]: string}Parameters
- header_
parameters Mapping[str, str] - path_
parameter_ strvalues - query_
string_ Mapping[str, str]parameters
- header
Parameters Map<String> - path
Parameter StringValues - query
String Map<String>Parameters
PipeTargetParametersKinesisStreamParameters, PipeTargetParametersKinesisStreamParametersArgs
- Partition
Key string
- Partition
Key string
- partition
Key String
- partition
Key string
- partition_
key str
- partition
Key String
PipeTargetParametersLambdaFunctionParameters, PipeTargetParametersLambdaFunctionParametersArgs
- Invocation
Type string
- Invocation
Type string
- invocation
Type String
- invocation
Type string
- invocation_
type str
- invocation
Type String
PipeTargetParametersRedshiftDataParameters, PipeTargetParametersRedshiftDataParametersArgs
- Database string
- Sqls List<string>
- Db
User string - Secret
Manager stringArn - Statement
Name string - With
Event bool
- Database string
- Sqls []string
- Db
User string - Secret
Manager stringArn - Statement
Name string - With
Event bool
- database String
- sqls List<String>
- db
User String - secret
Manager StringArn - statement
Name String - with
Event Boolean
- database string
- sqls string[]
- db
User string - secret
Manager stringArn - statement
Name string - with
Event boolean
- database str
- sqls Sequence[str]
- db_
user str - secret_
manager_ strarn - statement_
name str - with_
event bool
- database String
- sqls List<String>
- db
User String - secret
Manager StringArn - statement
Name String - with
Event Boolean
PipeTargetParametersSagemakerPipelineParameters, PipeTargetParametersSagemakerPipelineParametersArgs
PipeTargetParametersSagemakerPipelineParametersPipelineParameter, PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs
PipeTargetParametersSqsQueueParameters, PipeTargetParametersSqsQueueParametersArgs
- Message
Deduplication stringId - Message
Group stringId
- Message
Deduplication stringId - Message
Group stringId
- message
Deduplication StringId - message
Group StringId
- message
Deduplication stringId - message
Group stringId
- message
Deduplication StringId - message
Group StringId
PipeTargetParametersStepFunctionStateMachineParameters, PipeTargetParametersStepFunctionStateMachineParametersArgs
- Invocation
Type string
- Invocation
Type string
- invocation
Type String
- invocation
Type string
- invocation_
type str
- invocation
Type String
Import
Using pulumi import
, import pipes using the name
. For example:
$ pulumi import aws:pipes/pipe:Pipe example my-pipe
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
aws
Terraform Provider.
Try AWS Native preview for resources not in the classic version.