1. Packages
  2. AWS Cloud Control
  3. API Docs
  4. pipes
  5. Pipe

We recommend new projects start with resources from the AWS provider.

AWS Cloud Control v1.27.0 published on Monday, Apr 14, 2025 by Pulumi

aws-native.pipes.Pipe

Explore with Pulumi AI

We recommend new projects start with resources from the AWS provider.

AWS Cloud Control v1.27.0 published on Monday, Apr 14, 2025 by Pulumi

Definition of AWS::Pipes::Pipe Resource Type

Example Usage

Example

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AwsNative = Pulumi.AwsNative;

return await Deployment.RunAsync(() => 
{
    var testPipe = new AwsNative.Pipes.Pipe("testPipe", new()
    {
        Name = "PipeCfnExample",
        RoleArn = "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
        Source = "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
        Enrichment = "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
        Target = "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
    });

});
Copy
package main

import (
	"github.com/pulumi/pulumi-aws-native/sdk/go/aws/pipes"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := pipes.NewPipe(ctx, "testPipe", &pipes.PipeArgs{
			Name:       pulumi.String("PipeCfnExample"),
			RoleArn:    pulumi.String("arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role"),
			Source:     pulumi.String("arn:aws:sqs:us-east-1:123456789123:pipeDemoSource"),
			Enrichment: pulumi.String("arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets"),
			Target:     pulumi.String("arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy

Coming soon!

import * as pulumi from "@pulumi/pulumi";
import * as aws_native from "@pulumi/aws-native";

const testPipe = new aws_native.pipes.Pipe("testPipe", {
    name: "PipeCfnExample",
    roleArn: "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
    source: "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
    enrichment: "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
    target: "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
});
Copy
import pulumi
import pulumi_aws_native as aws_native

test_pipe = aws_native.pipes.Pipe("testPipe",
    name="PipeCfnExample",
    role_arn="arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
    source="arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
    enrichment="arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
    target="arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine")
Copy

Coming soon!

Example

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AwsNative = Pulumi.AwsNative;

return await Deployment.RunAsync(() => 
{
    var testPipe = new AwsNative.Pipes.Pipe("testPipe", new()
    {
        Name = "PipeCfnExample",
        RoleArn = "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
        Source = "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
        Enrichment = "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
        Target = "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
    });

});
Copy
package main

import (
	"github.com/pulumi/pulumi-aws-native/sdk/go/aws/pipes"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := pipes.NewPipe(ctx, "testPipe", &pipes.PipeArgs{
			Name:       pulumi.String("PipeCfnExample"),
			RoleArn:    pulumi.String("arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role"),
			Source:     pulumi.String("arn:aws:sqs:us-east-1:123456789123:pipeDemoSource"),
			Enrichment: pulumi.String("arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets"),
			Target:     pulumi.String("arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy

Coming soon!

import * as pulumi from "@pulumi/pulumi";
import * as aws_native from "@pulumi/aws-native";

const testPipe = new aws_native.pipes.Pipe("testPipe", {
    name: "PipeCfnExample",
    roleArn: "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
    source: "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
    enrichment: "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
    target: "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
});
Copy
import pulumi
import pulumi_aws_native as aws_native

test_pipe = aws_native.pipes.Pipe("testPipe",
    name="PipeCfnExample",
    role_arn="arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
    source="arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
    enrichment="arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
    target="arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine")
Copy

Coming soon!

Create Pipe Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new Pipe(name: string, args: PipeArgs, opts?: CustomResourceOptions);
@overload
def Pipe(resource_name: str,
         args: PipeArgs,
         opts: Optional[ResourceOptions] = None)

@overload
def Pipe(resource_name: str,
         opts: Optional[ResourceOptions] = None,
         role_arn: Optional[str] = None,
         target: Optional[str] = None,
         source: Optional[str] = None,
         name: Optional[str] = None,
         kms_key_identifier: Optional[str] = None,
         log_configuration: Optional[PipeLogConfigurationArgs] = None,
         description: Optional[str] = None,
         enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
         enrichment: Optional[str] = None,
         source_parameters: Optional[PipeSourceParametersArgs] = None,
         tags: Optional[Mapping[str, str]] = None,
         desired_state: Optional[PipeRequestedPipeState] = None,
         target_parameters: Optional[PipeTargetParametersArgs] = None)
func NewPipe(ctx *Context, name string, args PipeArgs, opts ...ResourceOption) (*Pipe, error)
public Pipe(string name, PipeArgs args, CustomResourceOptions? opts = null)
public Pipe(String name, PipeArgs args)
public Pipe(String name, PipeArgs args, CustomResourceOptions options)
type: aws-native:pipes:Pipe
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. PipeArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. PipeArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. PipeArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. PipeArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. PipeArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Pipe Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The Pipe resource accepts the following input properties:

RoleArn This property is required. string
The ARN of the role that allows the pipe to send data to the target.
Source This property is required. string
The ARN of the source resource.
Target This property is required. string
The ARN of the target resource.
Description string
A description of the pipe.
DesiredState Pulumi.AwsNative.Pipes.PipeRequestedPipeState
The state the pipe should be in.
Enrichment string
The ARN of the enrichment resource.
EnrichmentParameters Pulumi.AwsNative.Pipes.Inputs.PipeEnrichmentParameters
The parameters required to set up enrichment on your pipe.
KmsKeyIdentifier string

The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.

To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.

To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.

For more information, see Managing keys in the AWS Key Management Service Developer Guide .

LogConfiguration Pulumi.AwsNative.Pipes.Inputs.PipeLogConfiguration
The logging configuration settings for the pipe.
Name string
The name of the pipe.
SourceParameters Pulumi.AwsNative.Pipes.Inputs.PipeSourceParameters
The parameters required to set up a source for your pipe.
Tags Dictionary<string, string>
The list of key-value pairs to associate with the pipe.
TargetParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetParameters

The parameters required to set up a target for your pipe.

For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .

RoleArn This property is required. string
The ARN of the role that allows the pipe to send data to the target.
Source This property is required. string
The ARN of the source resource.
Target This property is required. string
The ARN of the target resource.
Description string
A description of the pipe.
DesiredState PipeRequestedPipeState
The state the pipe should be in.
Enrichment string
The ARN of the enrichment resource.
EnrichmentParameters PipeEnrichmentParametersArgs
The parameters required to set up enrichment on your pipe.
KmsKeyIdentifier string

The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.

To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.

To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.

For more information, see Managing keys in the AWS Key Management Service Developer Guide .

LogConfiguration PipeLogConfigurationArgs
The logging configuration settings for the pipe.
Name string
The name of the pipe.
SourceParameters PipeSourceParametersArgs
The parameters required to set up a source for your pipe.
Tags map[string]string
The list of key-value pairs to associate with the pipe.
TargetParameters PipeTargetParametersArgs

The parameters required to set up a target for your pipe.

For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .

roleArn This property is required. String
The ARN of the role that allows the pipe to send data to the target.
source This property is required. String
The ARN of the source resource.
target This property is required. String
The ARN of the target resource.
description String
A description of the pipe.
desiredState PipeRequestedPipeState
The state the pipe should be in.
enrichment String
The ARN of the enrichment resource.
enrichmentParameters PipeEnrichmentParameters
The parameters required to set up enrichment on your pipe.
kmsKeyIdentifier String

The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.

To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.

To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.

For more information, see Managing keys in the AWS Key Management Service Developer Guide .

logConfiguration PipeLogConfiguration
The logging configuration settings for the pipe.
name String
The name of the pipe.
sourceParameters PipeSourceParameters
The parameters required to set up a source for your pipe.
tags Map<String,String>
The list of key-value pairs to associate with the pipe.
targetParameters PipeTargetParameters

The parameters required to set up a target for your pipe.

For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .

roleArn This property is required. string
The ARN of the role that allows the pipe to send data to the target.
source This property is required. string
The ARN of the source resource.
target This property is required. string
The ARN of the target resource.
description string
A description of the pipe.
desiredState PipeRequestedPipeState
The state the pipe should be in.
enrichment string
The ARN of the enrichment resource.
enrichmentParameters PipeEnrichmentParameters
The parameters required to set up enrichment on your pipe.
kmsKeyIdentifier string

The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.

To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.

To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.

For more information, see Managing keys in the AWS Key Management Service Developer Guide .

logConfiguration PipeLogConfiguration
The logging configuration settings for the pipe.
name string
The name of the pipe.
sourceParameters PipeSourceParameters
The parameters required to set up a source for your pipe.
tags {[key: string]: string}
The list of key-value pairs to associate with the pipe.
targetParameters PipeTargetParameters

The parameters required to set up a target for your pipe.

For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .

role_arn This property is required. str
The ARN of the role that allows the pipe to send data to the target.
source This property is required. str
The ARN of the source resource.
target This property is required. str
The ARN of the target resource.
description str
A description of the pipe.
desired_state PipeRequestedPipeState
The state the pipe should be in.
enrichment str
The ARN of the enrichment resource.
enrichment_parameters PipeEnrichmentParametersArgs
The parameters required to set up enrichment on your pipe.
kms_key_identifier str

The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.

To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.

To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.

For more information, see Managing keys in the AWS Key Management Service Developer Guide .

log_configuration PipeLogConfigurationArgs
The logging configuration settings for the pipe.
name str
The name of the pipe.
source_parameters PipeSourceParametersArgs
The parameters required to set up a source for your pipe.
tags Mapping[str, str]
The list of key-value pairs to associate with the pipe.
target_parameters PipeTargetParametersArgs

The parameters required to set up a target for your pipe.

For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .

roleArn This property is required. String
The ARN of the role that allows the pipe to send data to the target.
source This property is required. String
The ARN of the source resource.
target This property is required. String
The ARN of the target resource.
description String
A description of the pipe.
desiredState "RUNNING" | "STOPPED"
The state the pipe should be in.
enrichment String
The ARN of the enrichment resource.
enrichmentParameters Property Map
The parameters required to set up enrichment on your pipe.
kmsKeyIdentifier String

The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.

To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.

To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.

For more information, see Managing keys in the AWS Key Management Service Developer Guide .

logConfiguration Property Map
The logging configuration settings for the pipe.
name String
The name of the pipe.
sourceParameters Property Map
The parameters required to set up a source for your pipe.
tags Map<String>
The list of key-value pairs to associate with the pipe.
targetParameters Property Map

The parameters required to set up a target for your pipe.

For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .

Outputs

All input properties are implicitly available as output properties. Additionally, the Pipe resource produces the following output properties:

Arn string
The ARN of the pipe.
CreationTime string
The time the pipe was created.
CurrentState Pulumi.AwsNative.Pipes.PipeState
The state the pipe is in.
Id string
The provider-assigned unique ID for this managed resource.
LastModifiedTime string
When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
StateReason string
The reason the pipe is in its current state.
Arn string
The ARN of the pipe.
CreationTime string
The time the pipe was created.
CurrentState PipeStateEnum
The state the pipe is in.
Id string
The provider-assigned unique ID for this managed resource.
LastModifiedTime string
When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
StateReason string
The reason the pipe is in its current state.
arn String
The ARN of the pipe.
creationTime String
The time the pipe was created.
currentState PipeState
The state the pipe is in.
id String
The provider-assigned unique ID for this managed resource.
lastModifiedTime String
When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
stateReason String
The reason the pipe is in its current state.
arn string
The ARN of the pipe.
creationTime string
The time the pipe was created.
currentState PipeState
The state the pipe is in.
id string
The provider-assigned unique ID for this managed resource.
lastModifiedTime string
When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
stateReason string
The reason the pipe is in its current state.
arn str
The ARN of the pipe.
creation_time str
The time the pipe was created.
current_state PipeState
The state the pipe is in.
id str
The provider-assigned unique ID for this managed resource.
last_modified_time str
When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
state_reason str
The reason the pipe is in its current state.
arn String
The ARN of the pipe.
creationTime String
The time the pipe was created.
currentState "RUNNING" | "STOPPED" | "CREATING" | "UPDATING" | "DELETING" | "STARTING" | "STOPPING" | "CREATE_FAILED" | "UPDATE_FAILED" | "START_FAILED" | "STOP_FAILED" | "DELETE_FAILED" | "CREATE_ROLLBACK_FAILED" | "DELETE_ROLLBACK_FAILED" | "UPDATE_ROLLBACK_FAILED"
The state the pipe is in.
id String
The provider-assigned unique ID for this managed resource.
lastModifiedTime String
When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
stateReason String
The reason the pipe is in its current state.

Supporting Types

PipeAssignPublicIp
, PipeAssignPublicIpArgs

Enabled
ENABLED
Disabled
DISABLED
PipeAssignPublicIpEnabled
ENABLED
PipeAssignPublicIpDisabled
DISABLED
Enabled
ENABLED
Disabled
DISABLED
Enabled
ENABLED
Disabled
DISABLED
ENABLED
ENABLED
DISABLED
DISABLED
"ENABLED"
ENABLED
"DISABLED"
DISABLED

PipeAwsVpcConfiguration
, PipeAwsVpcConfigurationArgs

Subnets This property is required. List<string>
Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
AssignPublicIp Pulumi.AwsNative.Pipes.PipeAssignPublicIp
Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE .
SecurityGroups List<string>
Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
Subnets This property is required. []string
Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
AssignPublicIp PipeAssignPublicIp
Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE .
SecurityGroups []string
Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
subnets This property is required. List<String>
Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
assignPublicIp PipeAssignPublicIp
Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE .
securityGroups List<String>
Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
subnets This property is required. string[]
Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
assignPublicIp PipeAssignPublicIp
Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE .
securityGroups string[]
Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
subnets This property is required. Sequence[str]
Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
assign_public_ip PipeAssignPublicIp
Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE .
security_groups Sequence[str]
Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
subnets This property is required. List<String>
Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
assignPublicIp "ENABLED" | "DISABLED"
Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE .
securityGroups List<String>
Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.

PipeBatchArrayProperties
, PipeBatchArrayPropertiesArgs

Size int
The size of the array, if this is an array batch job.
Size int
The size of the array, if this is an array batch job.
size Integer
The size of the array, if this is an array batch job.
size number
The size of the array, if this is an array batch job.
size int
The size of the array, if this is an array batch job.
size Number
The size of the array, if this is an array batch job.

PipeBatchContainerOverrides
, PipeBatchContainerOverridesArgs

Command List<string>
The command to send to the container that overrides the default command from the Docker image or the task definition.
Environment List<Pulumi.AwsNative.Pipes.Inputs.PipeBatchEnvironmentVariable>

The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.

Environment variables cannot start with " AWS Batch ". This naming convention is reserved for variables that AWS Batch sets.

InstanceType string

The instance type to use for a multi-node parallel job.

This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.

ResourceRequirements List<Pulumi.AwsNative.Pipes.Inputs.PipeBatchResourceRequirement>
The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU , MEMORY , and VCPU .
Command []string
The command to send to the container that overrides the default command from the Docker image or the task definition.
Environment []PipeBatchEnvironmentVariable

The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.

Environment variables cannot start with " AWS Batch ". This naming convention is reserved for variables that AWS Batch sets.

InstanceType string

The instance type to use for a multi-node parallel job.

This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.

ResourceRequirements []PipeBatchResourceRequirement
The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU , MEMORY , and VCPU .
command List<String>
The command to send to the container that overrides the default command from the Docker image or the task definition.
environment List<PipeBatchEnvironmentVariable>

The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.

Environment variables cannot start with " AWS Batch ". This naming convention is reserved for variables that AWS Batch sets.

instanceType String

The instance type to use for a multi-node parallel job.

This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.

resourceRequirements List<PipeBatchResourceRequirement>
The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU , MEMORY , and VCPU .
command string[]
The command to send to the container that overrides the default command from the Docker image or the task definition.
environment PipeBatchEnvironmentVariable[]

The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.

Environment variables cannot start with " AWS Batch ". This naming convention is reserved for variables that AWS Batch sets.

instanceType string

The instance type to use for a multi-node parallel job.

This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.

resourceRequirements PipeBatchResourceRequirement[]
The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU , MEMORY , and VCPU .
command Sequence[str]
The command to send to the container that overrides the default command from the Docker image or the task definition.
environment Sequence[PipeBatchEnvironmentVariable]

The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.

Environment variables cannot start with " AWS Batch ". This naming convention is reserved for variables that AWS Batch sets.

instance_type str

The instance type to use for a multi-node parallel job.

This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.

resource_requirements Sequence[PipeBatchResourceRequirement]
The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU , MEMORY , and VCPU .
command List<String>
The command to send to the container that overrides the default command from the Docker image or the task definition.
environment List<Property Map>

The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.

Environment variables cannot start with " AWS Batch ". This naming convention is reserved for variables that AWS Batch sets.

instanceType String

The instance type to use for a multi-node parallel job.

This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.

resourceRequirements List<Property Map>
The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU , MEMORY , and VCPU .

PipeBatchEnvironmentVariable
, PipeBatchEnvironmentVariableArgs

Name string
The name of the key-value pair. For environment variables, this is the name of the environment variable.
Value string
The value of the key-value pair. For environment variables, this is the value of the environment variable.
Name string
The name of the key-value pair. For environment variables, this is the name of the environment variable.
Value string
The value of the key-value pair. For environment variables, this is the value of the environment variable.
name String
The name of the key-value pair. For environment variables, this is the name of the environment variable.
value String
The value of the key-value pair. For environment variables, this is the value of the environment variable.
name string
The name of the key-value pair. For environment variables, this is the name of the environment variable.
value string
The value of the key-value pair. For environment variables, this is the value of the environment variable.
name str
The name of the key-value pair. For environment variables, this is the name of the environment variable.
value str
The value of the key-value pair. For environment variables, this is the value of the environment variable.
name String
The name of the key-value pair. For environment variables, this is the name of the environment variable.
value String
The value of the key-value pair. For environment variables, this is the value of the environment variable.

PipeBatchJobDependency
, PipeBatchJobDependencyArgs

JobId string
The job ID of the AWS Batch job that's associated with this dependency.
Type Pulumi.AwsNative.Pipes.PipeBatchJobDependencyType
The type of the job dependency.
JobId string
The job ID of the AWS Batch job that's associated with this dependency.
Type PipeBatchJobDependencyType
The type of the job dependency.
jobId String
The job ID of the AWS Batch job that's associated with this dependency.
type PipeBatchJobDependencyType
The type of the job dependency.
jobId string
The job ID of the AWS Batch job that's associated with this dependency.
type PipeBatchJobDependencyType
The type of the job dependency.
job_id str
The job ID of the AWS Batch job that's associated with this dependency.
type PipeBatchJobDependencyType
The type of the job dependency.
jobId String
The job ID of the AWS Batch job that's associated with this dependency.
type "N_TO_N" | "SEQUENTIAL"
The type of the job dependency.

PipeBatchJobDependencyType
, PipeBatchJobDependencyTypeArgs

NToN
N_TO_N
Sequential
SEQUENTIAL
PipeBatchJobDependencyTypeNToN
N_TO_N
PipeBatchJobDependencyTypeSequential
SEQUENTIAL
NToN
N_TO_N
Sequential
SEQUENTIAL
NToN
N_TO_N
Sequential
SEQUENTIAL
N_TO_N
N_TO_N
SEQUENTIAL
SEQUENTIAL
"N_TO_N"
N_TO_N
"SEQUENTIAL"
SEQUENTIAL

PipeBatchResourceRequirement
, PipeBatchResourceRequirementArgs

Type This property is required. Pulumi.AwsNative.Pipes.PipeBatchResourceRequirementType
The type of resource to assign to a container. The supported resources include GPU , MEMORY , and VCPU .
Value This property is required. string

The quantity of the specified resource to reserve for the container. The values vary based on the type specified.

  • type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.

GPUs aren't available for jobs that are running on Fargate resources.

  • type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run .

If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .

For jobs that are running on Fargate resources, then value is the hard limit (in MiB), and must match one of the supported values and the VCPU values must be one of the values supported for that memory value.

  • value = 512 - VCPU = 0.25
  • value = 1024 - VCPU = 0.25 or 0.5
  • value = 2048 - VCPU = 0.25, 0.5, or 1
  • value = 3072 - VCPU = 0.5, or 1
  • value = 4096 - VCPU = 0.5, 1, or 2
  • value = 5120, 6144, or 7168 - VCPU = 1 or 2
  • value = 8192 - VCPU = 1, 2, 4, or 8
  • value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU = 2 or 4
  • value = 16384 - VCPU = 2, 4, or 8
  • value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU = 4
  • value = 20480, 24576, or 28672 - VCPU = 4 or 8
  • value = 36864, 45056, 53248, or 61440 - VCPU = 8
  • value = 32768, 40960, 49152, or 57344 - VCPU = 8 or 16
  • value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU = 16
  • type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuShares in the Create a container section of the Docker Remote API and the --cpu-shares option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.

The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .

For jobs that are running on Fargate resources, then value must match one of the supported values and the MEMORY values must be one of the values supported for that VCPU value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16

  • value = 0.25 - MEMORY = 512, 1024, or 2048
  • value = 0.5 - MEMORY = 1024, 2048, 3072, or 4096
  • value = 1 - MEMORY = 2048, 3072, 4096, 5120, 6144, 7168, or 8192
  • value = 2 - MEMORY = 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
  • value = 4 - MEMORY = 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
  • value = 8 - MEMORY = 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
  • value = 16 - MEMORY = 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
Type This property is required. PipeBatchResourceRequirementType
The type of resource to assign to a container. The supported resources include GPU , MEMORY , and VCPU .
Value This property is required. string

The quantity of the specified resource to reserve for the container. The values vary based on the type specified.

  • type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.

GPUs aren't available for jobs that are running on Fargate resources.

  • type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run .

If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .

For jobs that are running on Fargate resources, then value is the hard limit (in MiB), and must match one of the supported values and the VCPU values must be one of the values supported for that memory value.

  • value = 512 - VCPU = 0.25
  • value = 1024 - VCPU = 0.25 or 0.5
  • value = 2048 - VCPU = 0.25, 0.5, or 1
  • value = 3072 - VCPU = 0.5, or 1
  • value = 4096 - VCPU = 0.5, 1, or 2
  • value = 5120, 6144, or 7168 - VCPU = 1 or 2
  • value = 8192 - VCPU = 1, 2, 4, or 8
  • value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU = 2 or 4
  • value = 16384 - VCPU = 2, 4, or 8
  • value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU = 4
  • value = 20480, 24576, or 28672 - VCPU = 4 or 8
  • value = 36864, 45056, 53248, or 61440 - VCPU = 8
  • value = 32768, 40960, 49152, or 57344 - VCPU = 8 or 16
  • value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU = 16
  • type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuShares in the Create a container section of the Docker Remote API and the --cpu-shares option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.

The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .

For jobs that are running on Fargate resources, then value must match one of the supported values and the MEMORY values must be one of the values supported for that VCPU value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16

  • value = 0.25 - MEMORY = 512, 1024, or 2048
  • value = 0.5 - MEMORY = 1024, 2048, 3072, or 4096
  • value = 1 - MEMORY = 2048, 3072, 4096, 5120, 6144, 7168, or 8192
  • value = 2 - MEMORY = 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
  • value = 4 - MEMORY = 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
  • value = 8 - MEMORY = 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
  • value = 16 - MEMORY = 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
type This property is required. PipeBatchResourceRequirementType
The type of resource to assign to a container. The supported resources include GPU , MEMORY , and VCPU .
value This property is required. String

The quantity of the specified resource to reserve for the container. The values vary based on the type specified.

  • type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.

GPUs aren't available for jobs that are running on Fargate resources.

  • type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run .

If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .

For jobs that are running on Fargate resources, then value is the hard limit (in MiB), and must match one of the supported values and the VCPU values must be one of the values supported for that memory value.

  • value = 512 - VCPU = 0.25
  • value = 1024 - VCPU = 0.25 or 0.5
  • value = 2048 - VCPU = 0.25, 0.5, or 1
  • value = 3072 - VCPU = 0.5, or 1
  • value = 4096 - VCPU = 0.5, 1, or 2
  • value = 5120, 6144, or 7168 - VCPU = 1 or 2
  • value = 8192 - VCPU = 1, 2, 4, or 8
  • value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU = 2 or 4
  • value = 16384 - VCPU = 2, 4, or 8
  • value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU = 4
  • value = 20480, 24576, or 28672 - VCPU = 4 or 8
  • value = 36864, 45056, 53248, or 61440 - VCPU = 8
  • value = 32768, 40960, 49152, or 57344 - VCPU = 8 or 16
  • value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU = 16
  • type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuShares in the Create a container section of the Docker Remote API and the --cpu-shares option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.

The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .

For jobs that are running on Fargate resources, then value must match one of the supported values and the MEMORY values must be one of the values supported for that VCPU value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16

  • value = 0.25 - MEMORY = 512, 1024, or 2048
  • value = 0.5 - MEMORY = 1024, 2048, 3072, or 4096
  • value = 1 - MEMORY = 2048, 3072, 4096, 5120, 6144, 7168, or 8192
  • value = 2 - MEMORY = 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
  • value = 4 - MEMORY = 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
  • value = 8 - MEMORY = 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
  • value = 16 - MEMORY = 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
type This property is required. PipeBatchResourceRequirementType
The type of resource to assign to a container. The supported resources include GPU , MEMORY , and VCPU .
value This property is required. string

The quantity of the specified resource to reserve for the container. The values vary based on the type specified.

  • type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.

GPUs aren't available for jobs that are running on Fargate resources.

  • type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run .

If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .

For jobs that are running on Fargate resources, then value is the hard limit (in MiB), and must match one of the supported values and the VCPU values must be one of the values supported for that memory value.

  • value = 512 - VCPU = 0.25
  • value = 1024 - VCPU = 0.25 or 0.5
  • value = 2048 - VCPU = 0.25, 0.5, or 1
  • value = 3072 - VCPU = 0.5, or 1
  • value = 4096 - VCPU = 0.5, 1, or 2
  • value = 5120, 6144, or 7168 - VCPU = 1 or 2
  • value = 8192 - VCPU = 1, 2, 4, or 8
  • value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU = 2 or 4
  • value = 16384 - VCPU = 2, 4, or 8
  • value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU = 4
  • value = 20480, 24576, or 28672 - VCPU = 4 or 8
  • value = 36864, 45056, 53248, or 61440 - VCPU = 8
  • value = 32768, 40960, 49152, or 57344 - VCPU = 8 or 16
  • value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU = 16
  • type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuShares in the Create a container section of the Docker Remote API and the --cpu-shares option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.

The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .

For jobs that are running on Fargate resources, then value must match one of the supported values and the MEMORY values must be one of the values supported for that VCPU value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16

  • value = 0.25 - MEMORY = 512, 1024, or 2048
  • value = 0.5 - MEMORY = 1024, 2048, 3072, or 4096
  • value = 1 - MEMORY = 2048, 3072, 4096, 5120, 6144, 7168, or 8192
  • value = 2 - MEMORY = 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
  • value = 4 - MEMORY = 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
  • value = 8 - MEMORY = 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
  • value = 16 - MEMORY = 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
type This property is required. PipeBatchResourceRequirementType
The type of resource to assign to a container. The supported resources include GPU , MEMORY , and VCPU .
value This property is required. str

The quantity of the specified resource to reserve for the container. The values vary based on the type specified.

  • type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.

GPUs aren't available for jobs that are running on Fargate resources.

  • type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run .

If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .

For jobs that are running on Fargate resources, then value is the hard limit (in MiB), and must match one of the supported values and the VCPU values must be one of the values supported for that memory value.

  • value = 512 - VCPU = 0.25
  • value = 1024 - VCPU = 0.25 or 0.5
  • value = 2048 - VCPU = 0.25, 0.5, or 1
  • value = 3072 - VCPU = 0.5, or 1
  • value = 4096 - VCPU = 0.5, 1, or 2
  • value = 5120, 6144, or 7168 - VCPU = 1 or 2
  • value = 8192 - VCPU = 1, 2, 4, or 8
  • value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU = 2 or 4
  • value = 16384 - VCPU = 2, 4, or 8
  • value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU = 4
  • value = 20480, 24576, or 28672 - VCPU = 4 or 8
  • value = 36864, 45056, 53248, or 61440 - VCPU = 8
  • value = 32768, 40960, 49152, or 57344 - VCPU = 8 or 16
  • value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU = 16
  • type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuShares in the Create a container section of the Docker Remote API and the --cpu-shares option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.

The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .

For jobs that are running on Fargate resources, then value must match one of the supported values and the MEMORY values must be one of the values supported for that VCPU value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16

  • value = 0.25 - MEMORY = 512, 1024, or 2048
  • value = 0.5 - MEMORY = 1024, 2048, 3072, or 4096
  • value = 1 - MEMORY = 2048, 3072, 4096, 5120, 6144, 7168, or 8192
  • value = 2 - MEMORY = 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
  • value = 4 - MEMORY = 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
  • value = 8 - MEMORY = 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
  • value = 16 - MEMORY = 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
type This property is required. "GPU" | "MEMORY" | "VCPU"
The type of resource to assign to a container. The supported resources include GPU , MEMORY , and VCPU .
value This property is required. String

The quantity of the specified resource to reserve for the container. The values vary based on the type specified.

  • type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.

GPUs aren't available for jobs that are running on Fargate resources.

  • type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps to Memory in the Create a container section of the Docker Remote API and the --memory option to docker run .

If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .

For jobs that are running on Fargate resources, then value is the hard limit (in MiB), and must match one of the supported values and the VCPU values must be one of the values supported for that memory value.

  • value = 512 - VCPU = 0.25
  • value = 1024 - VCPU = 0.25 or 0.5
  • value = 2048 - VCPU = 0.25, 0.5, or 1
  • value = 3072 - VCPU = 0.5, or 1
  • value = 4096 - VCPU = 0.5, 1, or 2
  • value = 5120, 6144, or 7168 - VCPU = 1 or 2
  • value = 8192 - VCPU = 1, 2, 4, or 8
  • value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU = 2 or 4
  • value = 16384 - VCPU = 2, 4, or 8
  • value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU = 4
  • value = 20480, 24576, or 28672 - VCPU = 4 or 8
  • value = 36864, 45056, 53248, or 61440 - VCPU = 8
  • value = 32768, 40960, 49152, or 57344 - VCPU = 8 or 16
  • value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU = 16
  • type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuShares in the Create a container section of the Docker Remote API and the --cpu-shares option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.

The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .

For jobs that are running on Fargate resources, then value must match one of the supported values and the MEMORY values must be one of the values supported for that VCPU value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16

  • value = 0.25 - MEMORY = 512, 1024, or 2048
  • value = 0.5 - MEMORY = 1024, 2048, 3072, or 4096
  • value = 1 - MEMORY = 2048, 3072, 4096, 5120, 6144, 7168, or 8192
  • value = 2 - MEMORY = 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
  • value = 4 - MEMORY = 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
  • value = 8 - MEMORY = 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
  • value = 16 - MEMORY = 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880

PipeBatchResourceRequirementType
, PipeBatchResourceRequirementTypeArgs

Gpu
GPU
Memory
MEMORY
Vcpu
VCPU
PipeBatchResourceRequirementTypeGpu
GPU
PipeBatchResourceRequirementTypeMemory
MEMORY
PipeBatchResourceRequirementTypeVcpu
VCPU
Gpu
GPU
Memory
MEMORY
Vcpu
VCPU
Gpu
GPU
Memory
MEMORY
Vcpu
VCPU
GPU
GPU
MEMORY
MEMORY
VCPU
VCPU
"GPU"
GPU
"MEMORY"
MEMORY
"VCPU"
VCPU

PipeBatchRetryStrategy
, PipeBatchRetryStrategyArgs

Attempts int
The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value.
Attempts int
The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value.
attempts Integer
The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value.
attempts number
The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value.
attempts int
The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value.
attempts Number
The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value.

PipeCapacityProviderStrategyItem
, PipeCapacityProviderStrategyItemArgs

CapacityProvider This property is required. string
The short name of the capacity provider.
Base int
The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
Weight int
The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
CapacityProvider This property is required. string
The short name of the capacity provider.
Base int
The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
Weight int
The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
capacityProvider This property is required. String
The short name of the capacity provider.
base Integer
The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
weight Integer
The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
capacityProvider This property is required. string
The short name of the capacity provider.
base number
The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
weight number
The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
capacity_provider This property is required. str
The short name of the capacity provider.
base int
The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
weight int
The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
capacityProvider This property is required. String
The short name of the capacity provider.
base Number
The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
weight Number
The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.

PipeCloudwatchLogsLogDestination
, PipeCloudwatchLogsLogDestinationArgs

LogGroupArn string
The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
LogGroupArn string
The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
logGroupArn String
The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
logGroupArn string
The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
log_group_arn str
The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
logGroupArn String
The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.

PipeDeadLetterConfig
, PipeDeadLetterConfigArgs

Arn string

The ARN of the specified target for the dead-letter queue.

For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.

Arn string

The ARN of the specified target for the dead-letter queue.

For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.

arn String

The ARN of the specified target for the dead-letter queue.

For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.

arn string

The ARN of the specified target for the dead-letter queue.

For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.

arn str

The ARN of the specified target for the dead-letter queue.

For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.

arn String

The ARN of the specified target for the dead-letter queue.

For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.

PipeDimensionMapping
, PipeDimensionMappingArgs

DimensionName This property is required. string
The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
DimensionValue This property is required. string
Dynamic path to the dimension value in the source event.
DimensionValueType This property is required. Pulumi.AwsNative.Pipes.PipeDimensionValueType
The data type of the dimension for the time-series data.
DimensionName This property is required. string
The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
DimensionValue This property is required. string
Dynamic path to the dimension value in the source event.
DimensionValueType This property is required. PipeDimensionValueType
The data type of the dimension for the time-series data.
dimensionName This property is required. String
The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
dimensionValue This property is required. String
Dynamic path to the dimension value in the source event.
dimensionValueType This property is required. PipeDimensionValueType
The data type of the dimension for the time-series data.
dimensionName This property is required. string
The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
dimensionValue This property is required. string
Dynamic path to the dimension value in the source event.
dimensionValueType This property is required. PipeDimensionValueType
The data type of the dimension for the time-series data.
dimension_name This property is required. str
The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
dimension_value This property is required. str
Dynamic path to the dimension value in the source event.
dimension_value_type This property is required. PipeDimensionValueType
The data type of the dimension for the time-series data.
dimensionName This property is required. String
The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
dimensionValue This property is required. String
Dynamic path to the dimension value in the source event.
dimensionValueType This property is required. "VARCHAR"
The data type of the dimension for the time-series data.

PipeDimensionValueType
, PipeDimensionValueTypeArgs

Varchar
VARCHAR
PipeDimensionValueTypeVarchar
VARCHAR
Varchar
VARCHAR
Varchar
VARCHAR
VARCHAR
VARCHAR
"VARCHAR"
VARCHAR

PipeDynamoDbStreamStartPosition
, PipeDynamoDbStreamStartPositionArgs

TrimHorizon
TRIM_HORIZON
Latest
LATEST
PipeDynamoDbStreamStartPositionTrimHorizon
TRIM_HORIZON
PipeDynamoDbStreamStartPositionLatest
LATEST
TrimHorizon
TRIM_HORIZON
Latest
LATEST
TrimHorizon
TRIM_HORIZON
Latest
LATEST
TRIM_HORIZON
TRIM_HORIZON
LATEST
LATEST
"TRIM_HORIZON"
TRIM_HORIZON
"LATEST"
LATEST

PipeEcsContainerOverride
, PipeEcsContainerOverrideArgs

Command List<string>
The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
Cpu int
The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
Environment List<Pulumi.AwsNative.Pipes.Inputs.PipeEcsEnvironmentVariable>
The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
EnvironmentFiles List<Pulumi.AwsNative.Pipes.Inputs.PipeEcsEnvironmentFile>
A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
Memory int
The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
MemoryReservation int
The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
Name string
The name of the container that receives the override. This parameter is required if any override is specified.
ResourceRequirements List<Pulumi.AwsNative.Pipes.Inputs.PipeEcsResourceRequirement>
The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
Command []string
The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
Cpu int
The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
Environment []PipeEcsEnvironmentVariable
The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
EnvironmentFiles []PipeEcsEnvironmentFile
A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
Memory int
The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
MemoryReservation int
The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
Name string
The name of the container that receives the override. This parameter is required if any override is specified.
ResourceRequirements []PipeEcsResourceRequirement
The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
command List<String>
The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
cpu Integer
The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
environment List<PipeEcsEnvironmentVariable>
The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
environmentFiles List<PipeEcsEnvironmentFile>
A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
memory Integer
The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
memoryReservation Integer
The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
name String
The name of the container that receives the override. This parameter is required if any override is specified.
resourceRequirements List<PipeEcsResourceRequirement>
The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
command string[]
The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
cpu number
The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
environment PipeEcsEnvironmentVariable[]
The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
environmentFiles PipeEcsEnvironmentFile[]
A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
memory number
The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
memoryReservation number
The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
name string
The name of the container that receives the override. This parameter is required if any override is specified.
resourceRequirements PipeEcsResourceRequirement[]
The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
command Sequence[str]
The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
cpu int
The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
environment Sequence[PipeEcsEnvironmentVariable]
The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
environment_files Sequence[PipeEcsEnvironmentFile]
A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
memory int
The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
memory_reservation int
The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
name str
The name of the container that receives the override. This parameter is required if any override is specified.
resource_requirements Sequence[PipeEcsResourceRequirement]
The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
command List<String>
The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
cpu Number
The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
environment List<Property Map>
The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
environmentFiles List<Property Map>
A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
memory Number
The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
memoryReservation Number
The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
name String
The name of the container that receives the override. This parameter is required if any override is specified.
resourceRequirements List<Property Map>
The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.

PipeEcsEnvironmentFile
, PipeEcsEnvironmentFileArgs

Type This property is required. Pulumi.AwsNative.Pipes.PipeEcsEnvironmentFileType
The file type to use. The only supported value is s3 .
Value This property is required. string
The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
Type This property is required. PipeEcsEnvironmentFileType
The file type to use. The only supported value is s3 .
Value This property is required. string
The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
type This property is required. PipeEcsEnvironmentFileType
The file type to use. The only supported value is s3 .
value This property is required. String
The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
type This property is required. PipeEcsEnvironmentFileType
The file type to use. The only supported value is s3 .
value This property is required. string
The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
type This property is required. PipeEcsEnvironmentFileType
The file type to use. The only supported value is s3 .
value This property is required. str
The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
type This property is required. "s3"
The file type to use. The only supported value is s3 .
value This property is required. String
The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.

PipeEcsEnvironmentFileType
, PipeEcsEnvironmentFileTypeArgs

S3
s3
PipeEcsEnvironmentFileTypeS3
s3
S3
s3
S3
s3
S3
s3
"s3"
s3

PipeEcsEnvironmentVariable
, PipeEcsEnvironmentVariableArgs

Name string
The name of the key-value pair. For environment variables, this is the name of the environment variable.
Value string
The value of the key-value pair. For environment variables, this is the value of the environment variable.
Name string
The name of the key-value pair. For environment variables, this is the name of the environment variable.
Value string
The value of the key-value pair. For environment variables, this is the value of the environment variable.
name String
The name of the key-value pair. For environment variables, this is the name of the environment variable.
value String
The value of the key-value pair. For environment variables, this is the value of the environment variable.
name string
The name of the key-value pair. For environment variables, this is the name of the environment variable.
value string
The value of the key-value pair. For environment variables, this is the value of the environment variable.
name str
The name of the key-value pair. For environment variables, this is the name of the environment variable.
value str
The value of the key-value pair. For environment variables, this is the value of the environment variable.
name String
The name of the key-value pair. For environment variables, this is the name of the environment variable.
value String
The value of the key-value pair. For environment variables, this is the value of the environment variable.

PipeEcsEphemeralStorage
, PipeEcsEphemeralStorageArgs

SizeInGiB This property is required. int
The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
SizeInGiB This property is required. int
The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
sizeInGiB This property is required. Integer
The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
sizeInGiB This property is required. number
The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
size_in_gi_b This property is required. int
The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
sizeInGiB This property is required. Number
The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.

PipeEcsInferenceAcceleratorOverride
, PipeEcsInferenceAcceleratorOverrideArgs

DeviceName string
The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
DeviceType string
The Elastic Inference accelerator type to use.
DeviceName string
The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
DeviceType string
The Elastic Inference accelerator type to use.
deviceName String
The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
deviceType String
The Elastic Inference accelerator type to use.
deviceName string
The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
deviceType string
The Elastic Inference accelerator type to use.
device_name str
The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
device_type str
The Elastic Inference accelerator type to use.
deviceName String
The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
deviceType String
The Elastic Inference accelerator type to use.

PipeEcsResourceRequirement
, PipeEcsResourceRequirementArgs

Type This property is required. Pulumi.AwsNative.Pipes.PipeEcsResourceRequirementType
The type of resource to assign to a container. The supported values are GPU or InferenceAccelerator .
Value This property is required. string

The value for the specified resource type.

If the GPU type is used, the value is the number of physical GPUs the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.

If the InferenceAccelerator type is used, the value matches the deviceName for an InferenceAccelerator specified in a task definition.

Type This property is required. PipeEcsResourceRequirementType
The type of resource to assign to a container. The supported values are GPU or InferenceAccelerator .
Value This property is required. string

The value for the specified resource type.

If the GPU type is used, the value is the number of physical GPUs the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.

If the InferenceAccelerator type is used, the value matches the deviceName for an InferenceAccelerator specified in a task definition.

type This property is required. PipeEcsResourceRequirementType
The type of resource to assign to a container. The supported values are GPU or InferenceAccelerator .
value This property is required. String

The value for the specified resource type.

If the GPU type is used, the value is the number of physical GPUs the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.

If the InferenceAccelerator type is used, the value matches the deviceName for an InferenceAccelerator specified in a task definition.

type This property is required. PipeEcsResourceRequirementType
The type of resource to assign to a container. The supported values are GPU or InferenceAccelerator .
value This property is required. string

The value for the specified resource type.

If the GPU type is used, the value is the number of physical GPUs the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.

If the InferenceAccelerator type is used, the value matches the deviceName for an InferenceAccelerator specified in a task definition.

type This property is required. PipeEcsResourceRequirementType
The type of resource to assign to a container. The supported values are GPU or InferenceAccelerator .
value This property is required. str

The value for the specified resource type.

If the GPU type is used, the value is the number of physical GPUs the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.

If the InferenceAccelerator type is used, the value matches the deviceName for an InferenceAccelerator specified in a task definition.

type This property is required. "GPU" | "InferenceAccelerator"
The type of resource to assign to a container. The supported values are GPU or InferenceAccelerator .
value This property is required. String

The value for the specified resource type.

If the GPU type is used, the value is the number of physical GPUs the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.

If the InferenceAccelerator type is used, the value matches the deviceName for an InferenceAccelerator specified in a task definition.

PipeEcsResourceRequirementType
, PipeEcsResourceRequirementTypeArgs

Gpu
GPU
InferenceAccelerator
InferenceAccelerator
PipeEcsResourceRequirementTypeGpu
GPU
PipeEcsResourceRequirementTypeInferenceAccelerator
InferenceAccelerator
Gpu
GPU
InferenceAccelerator
InferenceAccelerator
Gpu
GPU
InferenceAccelerator
InferenceAccelerator
GPU
GPU
INFERENCE_ACCELERATOR
InferenceAccelerator
"GPU"
GPU
"InferenceAccelerator"
InferenceAccelerator

PipeEcsTaskOverride
, PipeEcsTaskOverrideArgs

ContainerOverrides List<Pulumi.AwsNative.Pipes.Inputs.PipeEcsContainerOverride>
One or more container overrides that are sent to a task.
Cpu string
The cpu override for the task.
EphemeralStorage Pulumi.AwsNative.Pipes.Inputs.PipeEcsEphemeralStorage

The ephemeral storage setting override for the task.

This parameter is only supported for tasks hosted on Fargate that use the following platform versions:

  • Linux platform version 1.4.0 or later.
  • Windows platform version 1.0.0 or later.
ExecutionRoleArn string
The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
InferenceAcceleratorOverrides List<Pulumi.AwsNative.Pipes.Inputs.PipeEcsInferenceAcceleratorOverride>
The Elastic Inference accelerator override for the task.
Memory string
The memory override for the task.
TaskRoleArn string
The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
ContainerOverrides []PipeEcsContainerOverride
One or more container overrides that are sent to a task.
Cpu string
The cpu override for the task.
EphemeralStorage PipeEcsEphemeralStorage

The ephemeral storage setting override for the task.

This parameter is only supported for tasks hosted on Fargate that use the following platform versions:

  • Linux platform version 1.4.0 or later.
  • Windows platform version 1.0.0 or later.
ExecutionRoleArn string
The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
InferenceAcceleratorOverrides []PipeEcsInferenceAcceleratorOverride
The Elastic Inference accelerator override for the task.
Memory string
The memory override for the task.
TaskRoleArn string
The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
containerOverrides List<PipeEcsContainerOverride>
One or more container overrides that are sent to a task.
cpu String
The cpu override for the task.
ephemeralStorage PipeEcsEphemeralStorage

The ephemeral storage setting override for the task.

This parameter is only supported for tasks hosted on Fargate that use the following platform versions:

  • Linux platform version 1.4.0 or later.
  • Windows platform version 1.0.0 or later.
executionRoleArn String
The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
inferenceAcceleratorOverrides List<PipeEcsInferenceAcceleratorOverride>
The Elastic Inference accelerator override for the task.
memory String
The memory override for the task.
taskRoleArn String
The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
containerOverrides PipeEcsContainerOverride[]
One or more container overrides that are sent to a task.
cpu string
The cpu override for the task.
ephemeralStorage PipeEcsEphemeralStorage

The ephemeral storage setting override for the task.

This parameter is only supported for tasks hosted on Fargate that use the following platform versions:

  • Linux platform version 1.4.0 or later.
  • Windows platform version 1.0.0 or later.
executionRoleArn string
The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
inferenceAcceleratorOverrides PipeEcsInferenceAcceleratorOverride[]
The Elastic Inference accelerator override for the task.
memory string
The memory override for the task.
taskRoleArn string
The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
container_overrides Sequence[PipeEcsContainerOverride]
One or more container overrides that are sent to a task.
cpu str
The cpu override for the task.
ephemeral_storage PipeEcsEphemeralStorage

The ephemeral storage setting override for the task.

This parameter is only supported for tasks hosted on Fargate that use the following platform versions:

  • Linux platform version 1.4.0 or later.
  • Windows platform version 1.0.0 or later.
execution_role_arn str
The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
inference_accelerator_overrides Sequence[PipeEcsInferenceAcceleratorOverride]
The Elastic Inference accelerator override for the task.
memory str
The memory override for the task.
task_role_arn str
The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
containerOverrides List<Property Map>
One or more container overrides that are sent to a task.
cpu String
The cpu override for the task.
ephemeralStorage Property Map

The ephemeral storage setting override for the task.

This parameter is only supported for tasks hosted on Fargate that use the following platform versions:

  • Linux platform version 1.4.0 or later.
  • Windows platform version 1.0.0 or later.
executionRoleArn String
The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
inferenceAcceleratorOverrides List<Property Map>
The Elastic Inference accelerator override for the task.
memory String
The memory override for the task.
taskRoleArn String
The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .

PipeEnrichmentHttpParameters
, PipeEnrichmentHttpParametersArgs

HeaderParameters Dictionary<string, string>
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
PathParameterValues List<string>
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
QueryStringParameters Dictionary<string, string>
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
HeaderParameters map[string]string
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
PathParameterValues []string
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
QueryStringParameters map[string]string
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
headerParameters Map<String,String>
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
pathParameterValues List<String>
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
queryStringParameters Map<String,String>
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
headerParameters {[key: string]: string}
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
pathParameterValues string[]
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
queryStringParameters {[key: string]: string}
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
header_parameters Mapping[str, str]
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
path_parameter_values Sequence[str]
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
query_string_parameters Mapping[str, str]
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
headerParameters Map<String>
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
pathParameterValues List<String>
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
queryStringParameters Map<String>
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.

PipeEnrichmentParameters
, PipeEnrichmentParametersArgs

HttpParameters Pulumi.AwsNative.Pipes.Inputs.PipeEnrichmentHttpParameters

Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.

If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.

InputTemplate string

Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

HttpParameters PipeEnrichmentHttpParameters

Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.

If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.

InputTemplate string

Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

httpParameters PipeEnrichmentHttpParameters

Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.

If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.

inputTemplate String

Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

httpParameters PipeEnrichmentHttpParameters

Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.

If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.

inputTemplate string

Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

http_parameters PipeEnrichmentHttpParameters

Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.

If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.

input_template str

Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

httpParameters Property Map

Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.

If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.

inputTemplate String

Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

PipeEpochTimeUnit
, PipeEpochTimeUnitArgs

Milliseconds
MILLISECONDS
Seconds
SECONDS
Microseconds
MICROSECONDS
Nanoseconds
NANOSECONDS
PipeEpochTimeUnitMilliseconds
MILLISECONDS
PipeEpochTimeUnitSeconds
SECONDS
PipeEpochTimeUnitMicroseconds
MICROSECONDS
PipeEpochTimeUnitNanoseconds
NANOSECONDS
Milliseconds
MILLISECONDS
Seconds
SECONDS
Microseconds
MICROSECONDS
Nanoseconds
NANOSECONDS
Milliseconds
MILLISECONDS
Seconds
SECONDS
Microseconds
MICROSECONDS
Nanoseconds
NANOSECONDS
MILLISECONDS
MILLISECONDS
SECONDS
SECONDS
MICROSECONDS
MICROSECONDS
NANOSECONDS
NANOSECONDS
"MILLISECONDS"
MILLISECONDS
"SECONDS"
SECONDS
"MICROSECONDS"
MICROSECONDS
"NANOSECONDS"
NANOSECONDS

PipeFilter
, PipeFilterArgs

Pattern string
The event pattern.
Pattern string
The event pattern.
pattern String
The event pattern.
pattern string
The event pattern.
pattern str
The event pattern.
pattern String
The event pattern.

PipeFilterCriteria
, PipeFilterCriteriaArgs

Filters []PipeFilter
The event patterns.
filters List<PipeFilter>
The event patterns.
filters PipeFilter[]
The event patterns.
filters Sequence[PipeFilter]
The event patterns.
filters List<Property Map>
The event patterns.

PipeFirehoseLogDestination
, PipeFirehoseLogDestinationArgs

DeliveryStreamArn string
The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
DeliveryStreamArn string
The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
deliveryStreamArn String
The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
deliveryStreamArn string
The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
delivery_stream_arn str
The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
deliveryStreamArn String
The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.

PipeIncludeExecutionDataOption
, PipeIncludeExecutionDataOptionArgs

All
ALL
PipeIncludeExecutionDataOptionAll
ALL
All
ALL
All
ALL
ALL
ALL
"ALL"
ALL

PipeKinesisStreamStartPosition
, PipeKinesisStreamStartPositionArgs

TrimHorizon
TRIM_HORIZON
Latest
LATEST
AtTimestamp
AT_TIMESTAMP
PipeKinesisStreamStartPositionTrimHorizon
TRIM_HORIZON
PipeKinesisStreamStartPositionLatest
LATEST
PipeKinesisStreamStartPositionAtTimestamp
AT_TIMESTAMP
TrimHorizon
TRIM_HORIZON
Latest
LATEST
AtTimestamp
AT_TIMESTAMP
TrimHorizon
TRIM_HORIZON
Latest
LATEST
AtTimestamp
AT_TIMESTAMP
TRIM_HORIZON
TRIM_HORIZON
LATEST
LATEST
AT_TIMESTAMP
AT_TIMESTAMP
"TRIM_HORIZON"
TRIM_HORIZON
"LATEST"
LATEST
"AT_TIMESTAMP"
AT_TIMESTAMP

PipeLaunchType
, PipeLaunchTypeArgs

Ec2
EC2
Fargate
FARGATE
External
EXTERNAL
PipeLaunchTypeEc2
EC2
PipeLaunchTypeFargate
FARGATE
PipeLaunchTypeExternal
EXTERNAL
Ec2
EC2
Fargate
FARGATE
External
EXTERNAL
Ec2
EC2
Fargate
FARGATE
External
EXTERNAL
EC2
EC2
FARGATE
FARGATE
EXTERNAL
EXTERNAL
"EC2"
EC2
"FARGATE"
FARGATE
"EXTERNAL"
EXTERNAL

PipeLogConfiguration
, PipeLogConfigurationArgs

CloudwatchLogsLogDestination Pulumi.AwsNative.Pipes.Inputs.PipeCloudwatchLogsLogDestination
The logging configuration settings for the pipe.
FirehoseLogDestination Pulumi.AwsNative.Pipes.Inputs.PipeFirehoseLogDestination
The Amazon Data Firehose logging configuration settings for the pipe.
IncludeExecutionData List<Pulumi.AwsNative.Pipes.PipeIncludeExecutionDataOption>

Whether the execution data (specifically, the payload , awsRequest , and awsResponse fields) is included in the log messages for this pipe.

This applies to all log destinations for the pipe.

For more information, see Including execution data in logs in the Amazon EventBridge User Guide .

Allowed values: ALL

Level Pulumi.AwsNative.Pipes.PipeLogLevel
The level of logging detail to include. This applies to all log destinations for the pipe.
S3LogDestination Pulumi.AwsNative.Pipes.Inputs.PipeS3LogDestination
The Amazon S3 logging configuration settings for the pipe.
CloudwatchLogsLogDestination PipeCloudwatchLogsLogDestination
The logging configuration settings for the pipe.
FirehoseLogDestination PipeFirehoseLogDestination
The Amazon Data Firehose logging configuration settings for the pipe.
IncludeExecutionData []PipeIncludeExecutionDataOption

Whether the execution data (specifically, the payload , awsRequest , and awsResponse fields) is included in the log messages for this pipe.

This applies to all log destinations for the pipe.

For more information, see Including execution data in logs in the Amazon EventBridge User Guide .

Allowed values: ALL

Level PipeLogLevel
The level of logging detail to include. This applies to all log destinations for the pipe.
S3LogDestination PipeS3LogDestination
The Amazon S3 logging configuration settings for the pipe.
cloudwatchLogsLogDestination PipeCloudwatchLogsLogDestination
The logging configuration settings for the pipe.
firehoseLogDestination PipeFirehoseLogDestination
The Amazon Data Firehose logging configuration settings for the pipe.
includeExecutionData List<PipeIncludeExecutionDataOption>

Whether the execution data (specifically, the payload , awsRequest , and awsResponse fields) is included in the log messages for this pipe.

This applies to all log destinations for the pipe.

For more information, see Including execution data in logs in the Amazon EventBridge User Guide .

Allowed values: ALL

level PipeLogLevel
The level of logging detail to include. This applies to all log destinations for the pipe.
s3LogDestination PipeS3LogDestination
The Amazon S3 logging configuration settings for the pipe.
cloudwatchLogsLogDestination PipeCloudwatchLogsLogDestination
The logging configuration settings for the pipe.
firehoseLogDestination PipeFirehoseLogDestination
The Amazon Data Firehose logging configuration settings for the pipe.
includeExecutionData PipeIncludeExecutionDataOption[]

Whether the execution data (specifically, the payload , awsRequest , and awsResponse fields) is included in the log messages for this pipe.

This applies to all log destinations for the pipe.

For more information, see Including execution data in logs in the Amazon EventBridge User Guide .

Allowed values: ALL

level PipeLogLevel
The level of logging detail to include. This applies to all log destinations for the pipe.
s3LogDestination PipeS3LogDestination
The Amazon S3 logging configuration settings for the pipe.
cloudwatch_logs_log_destination PipeCloudwatchLogsLogDestination
The logging configuration settings for the pipe.
firehose_log_destination PipeFirehoseLogDestination
The Amazon Data Firehose logging configuration settings for the pipe.
include_execution_data Sequence[PipeIncludeExecutionDataOption]

Whether the execution data (specifically, the payload , awsRequest , and awsResponse fields) is included in the log messages for this pipe.

This applies to all log destinations for the pipe.

For more information, see Including execution data in logs in the Amazon EventBridge User Guide .

Allowed values: ALL

level PipeLogLevel
The level of logging detail to include. This applies to all log destinations for the pipe.
s3_log_destination PipeS3LogDestination
The Amazon S3 logging configuration settings for the pipe.
cloudwatchLogsLogDestination Property Map
The logging configuration settings for the pipe.
firehoseLogDestination Property Map
The Amazon Data Firehose logging configuration settings for the pipe.
includeExecutionData List<"ALL">

Whether the execution data (specifically, the payload , awsRequest , and awsResponse fields) is included in the log messages for this pipe.

This applies to all log destinations for the pipe.

For more information, see Including execution data in logs in the Amazon EventBridge User Guide .

Allowed values: ALL

level "OFF" | "ERROR" | "INFO" | "TRACE"
The level of logging detail to include. This applies to all log destinations for the pipe.
s3LogDestination Property Map
The Amazon S3 logging configuration settings for the pipe.

PipeLogLevel
, PipeLogLevelArgs

Off
OFF
Error
ERROR
Info
INFO
Trace
TRACE
PipeLogLevelOff
OFF
PipeLogLevelError
ERROR
PipeLogLevelInfo
INFO
PipeLogLevelTrace
TRACE
Off
OFF
Error
ERROR
Info
INFO
Trace
TRACE
Off
OFF
Error
ERROR
Info
INFO
Trace
TRACE
OFF
OFF
ERROR
ERROR
INFO
INFO
TRACE
TRACE
"OFF"
OFF
"ERROR"
ERROR
"INFO"
INFO
"TRACE"
TRACE

PipeMeasureValueType
, PipeMeasureValueTypeArgs

Double
DOUBLE
Bigint
BIGINT
Varchar
VARCHAR
Boolean
BOOLEAN
Timestamp
TIMESTAMP
PipeMeasureValueTypeDouble
DOUBLE
PipeMeasureValueTypeBigint
BIGINT
PipeMeasureValueTypeVarchar
VARCHAR
PipeMeasureValueTypeBoolean
BOOLEAN
PipeMeasureValueTypeTimestamp
TIMESTAMP
Double
DOUBLE
Bigint
BIGINT
Varchar
VARCHAR
Boolean
BOOLEAN
Timestamp
TIMESTAMP
Double
DOUBLE
Bigint
BIGINT
Varchar
VARCHAR
Boolean
BOOLEAN
Timestamp
TIMESTAMP
DOUBLE
DOUBLE
BIGINT
BIGINT
VARCHAR
VARCHAR
BOOLEAN
BOOLEAN
TIMESTAMP
TIMESTAMP
"DOUBLE"
DOUBLE
"BIGINT"
BIGINT
"VARCHAR"
VARCHAR
"BOOLEAN"
BOOLEAN
"TIMESTAMP"
TIMESTAMP

PipeMqBrokerAccessCredentialsProperties
, PipeMqBrokerAccessCredentialsPropertiesArgs

BasicAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
BasicAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
basicAuth This property is required. String
Optional SecretManager ARN which stores the database credentials
basicAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
basic_auth This property is required. str
Optional SecretManager ARN which stores the database credentials
basicAuth This property is required. String
Optional SecretManager ARN which stores the database credentials

PipeMskAccessCredentials0Properties
, PipeMskAccessCredentials0PropertiesArgs

SaslScram512Auth This property is required. string
Optional SecretManager ARN which stores the database credentials
SaslScram512Auth This property is required. string
Optional SecretManager ARN which stores the database credentials
saslScram512Auth This property is required. String
Optional SecretManager ARN which stores the database credentials
saslScram512Auth This property is required. string
Optional SecretManager ARN which stores the database credentials
sasl_scram512_auth This property is required. str
Optional SecretManager ARN which stores the database credentials
saslScram512Auth This property is required. String
Optional SecretManager ARN which stores the database credentials

PipeMskAccessCredentials1Properties
, PipeMskAccessCredentials1PropertiesArgs

ClientCertificateTlsAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
ClientCertificateTlsAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
clientCertificateTlsAuth This property is required. String
Optional SecretManager ARN which stores the database credentials
clientCertificateTlsAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
client_certificate_tls_auth This property is required. str
Optional SecretManager ARN which stores the database credentials
clientCertificateTlsAuth This property is required. String
Optional SecretManager ARN which stores the database credentials

PipeMskStartPosition
, PipeMskStartPositionArgs

TrimHorizon
TRIM_HORIZON
Latest
LATEST
PipeMskStartPositionTrimHorizon
TRIM_HORIZON
PipeMskStartPositionLatest
LATEST
TrimHorizon
TRIM_HORIZON
Latest
LATEST
TrimHorizon
TRIM_HORIZON
Latest
LATEST
TRIM_HORIZON
TRIM_HORIZON
LATEST
LATEST
"TRIM_HORIZON"
TRIM_HORIZON
"LATEST"
LATEST

PipeMultiMeasureAttributeMapping
, PipeMultiMeasureAttributeMappingArgs

MeasureValue This property is required. string
Dynamic path to the measurement attribute in the source event.
MeasureValueType This property is required. Pulumi.AwsNative.Pipes.PipeMeasureValueType
Data type of the measurement attribute in the source event.
MultiMeasureAttributeName This property is required. string
Target measure name to be used.
MeasureValue This property is required. string
Dynamic path to the measurement attribute in the source event.
MeasureValueType This property is required. PipeMeasureValueType
Data type of the measurement attribute in the source event.
MultiMeasureAttributeName This property is required. string
Target measure name to be used.
measureValue This property is required. String
Dynamic path to the measurement attribute in the source event.
measureValueType This property is required. PipeMeasureValueType
Data type of the measurement attribute in the source event.
multiMeasureAttributeName This property is required. String
Target measure name to be used.
measureValue This property is required. string
Dynamic path to the measurement attribute in the source event.
measureValueType This property is required. PipeMeasureValueType
Data type of the measurement attribute in the source event.
multiMeasureAttributeName This property is required. string
Target measure name to be used.
measure_value This property is required. str
Dynamic path to the measurement attribute in the source event.
measure_value_type This property is required. PipeMeasureValueType
Data type of the measurement attribute in the source event.
multi_measure_attribute_name This property is required. str
Target measure name to be used.
measureValue This property is required. String
Dynamic path to the measurement attribute in the source event.
measureValueType This property is required. "DOUBLE" | "BIGINT" | "VARCHAR" | "BOOLEAN" | "TIMESTAMP"
Data type of the measurement attribute in the source event.
multiMeasureAttributeName This property is required. String
Target measure name to be used.

PipeMultiMeasureMapping
, PipeMultiMeasureMappingArgs

MultiMeasureAttributeMappings This property is required. List<Pulumi.AwsNative.Pipes.Inputs.PipeMultiMeasureAttributeMapping>
Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
MultiMeasureName This property is required. string
The name of the multiple measurements per record (multi-measure).
MultiMeasureAttributeMappings This property is required. []PipeMultiMeasureAttributeMapping
Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
MultiMeasureName This property is required. string
The name of the multiple measurements per record (multi-measure).
multiMeasureAttributeMappings This property is required. List<PipeMultiMeasureAttributeMapping>
Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
multiMeasureName This property is required. String
The name of the multiple measurements per record (multi-measure).
multiMeasureAttributeMappings This property is required. PipeMultiMeasureAttributeMapping[]
Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
multiMeasureName This property is required. string
The name of the multiple measurements per record (multi-measure).
multi_measure_attribute_mappings This property is required. Sequence[PipeMultiMeasureAttributeMapping]
Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
multi_measure_name This property is required. str
The name of the multiple measurements per record (multi-measure).
multiMeasureAttributeMappings This property is required. List<Property Map>
Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
multiMeasureName This property is required. String
The name of the multiple measurements per record (multi-measure).

PipeNetworkConfiguration
, PipeNetworkConfigurationArgs

AwsvpcConfiguration Pulumi.AwsNative.Pipes.Inputs.PipeAwsVpcConfiguration
Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode.
AwsvpcConfiguration PipeAwsVpcConfiguration
Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode.
awsvpcConfiguration PipeAwsVpcConfiguration
Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode.
awsvpcConfiguration PipeAwsVpcConfiguration
Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode.
awsvpc_configuration PipeAwsVpcConfiguration
Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode.
awsvpcConfiguration Property Map
Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode.

PipeOnPartialBatchItemFailureStreams
, PipeOnPartialBatchItemFailureStreamsArgs

AutomaticBisect
AUTOMATIC_BISECT
PipeOnPartialBatchItemFailureStreamsAutomaticBisect
AUTOMATIC_BISECT
AutomaticBisect
AUTOMATIC_BISECT
AutomaticBisect
AUTOMATIC_BISECT
AUTOMATIC_BISECT
AUTOMATIC_BISECT
"AUTOMATIC_BISECT"
AUTOMATIC_BISECT

PipePlacementConstraint
, PipePlacementConstraintArgs

Expression string
A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance . To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
Type Pulumi.AwsNative.Pipes.PipePlacementConstraintType
The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
Expression string
A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance . To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
Type PipePlacementConstraintType
The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
expression String
A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance . To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
type PipePlacementConstraintType
The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
expression string
A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance . To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
type PipePlacementConstraintType
The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
expression str
A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance . To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
type PipePlacementConstraintType
The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
expression String
A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance . To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
type "distinctInstance" | "memberOf"
The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.

PipePlacementConstraintType
, PipePlacementConstraintTypeArgs

DistinctInstance
distinctInstance
MemberOf
memberOf
PipePlacementConstraintTypeDistinctInstance
distinctInstance
PipePlacementConstraintTypeMemberOf
memberOf
DistinctInstance
distinctInstance
MemberOf
memberOf
DistinctInstance
distinctInstance
MemberOf
memberOf
DISTINCT_INSTANCE
distinctInstance
MEMBER_OF
memberOf
"distinctInstance"
distinctInstance
"memberOf"
memberOf

PipePlacementStrategy
, PipePlacementStrategyArgs

Field string
The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
Type Pulumi.AwsNative.Pipes.PipePlacementStrategyType
The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
Field string
The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
Type PipePlacementStrategyType
The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
field String
The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
type PipePlacementStrategyType
The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
field string
The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
type PipePlacementStrategyType
The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
field str
The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
type PipePlacementStrategyType
The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
field String
The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
type "random" | "spread" | "binpack"
The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).

PipePlacementStrategyType
, PipePlacementStrategyTypeArgs

Random
random
Spread
spread
Binpack
binpack
PipePlacementStrategyTypeRandom
random
PipePlacementStrategyTypeSpread
spread
PipePlacementStrategyTypeBinpack
binpack
Random
random
Spread
spread
Binpack
binpack
Random
random
Spread
spread
Binpack
binpack
RANDOM
random
SPREAD
spread
BINPACK
binpack
"random"
random
"spread"
spread
"binpack"
binpack

PipePropagateTags
, PipePropagateTagsArgs

TaskDefinition
TASK_DEFINITION
PipePropagateTagsTaskDefinition
TASK_DEFINITION
TaskDefinition
TASK_DEFINITION
TaskDefinition
TASK_DEFINITION
TASK_DEFINITION
TASK_DEFINITION
"TASK_DEFINITION"
TASK_DEFINITION

PipeRequestedPipeState
, PipeRequestedPipeStateArgs

Running
RUNNING
Stopped
STOPPED
PipeRequestedPipeStateRunning
RUNNING
PipeRequestedPipeStateStopped
STOPPED
Running
RUNNING
Stopped
STOPPED
Running
RUNNING
Stopped
STOPPED
RUNNING
RUNNING
STOPPED
STOPPED
"RUNNING"
RUNNING
"STOPPED"
STOPPED

PipeS3LogDestination
, PipeS3LogDestinationArgs

BucketName string
The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
BucketOwner string
The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
OutputFormat Pulumi.AwsNative.Pipes.PipeS3OutputFormat

The format EventBridge uses for the log records.

EventBridge currently only supports json formatting.

Prefix string

The prefix text with which to begin Amazon S3 log object names.

For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .

BucketName string
The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
BucketOwner string
The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
OutputFormat PipeS3OutputFormat

The format EventBridge uses for the log records.

EventBridge currently only supports json formatting.

Prefix string

The prefix text with which to begin Amazon S3 log object names.

For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .

bucketName String
The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
bucketOwner String
The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
outputFormat PipeS3OutputFormat

The format EventBridge uses for the log records.

EventBridge currently only supports json formatting.

prefix String

The prefix text with which to begin Amazon S3 log object names.

For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .

bucketName string
The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
bucketOwner string
The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
outputFormat PipeS3OutputFormat

The format EventBridge uses for the log records.

EventBridge currently only supports json formatting.

prefix string

The prefix text with which to begin Amazon S3 log object names.

For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .

bucket_name str
The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
bucket_owner str
The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
output_format PipeS3OutputFormat

The format EventBridge uses for the log records.

EventBridge currently only supports json formatting.

prefix str

The prefix text with which to begin Amazon S3 log object names.

For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .

bucketName String
The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
bucketOwner String
The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
outputFormat "json" | "plain" | "w3c"

The format EventBridge uses for the log records.

EventBridge currently only supports json formatting.

prefix String

The prefix text with which to begin Amazon S3 log object names.

For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .

PipeS3OutputFormat
, PipeS3OutputFormatArgs

Json
json
Plain
plain
W3c
w3c
PipeS3OutputFormatJson
json
PipeS3OutputFormatPlain
plain
PipeS3OutputFormatW3c
w3c
Json
json
Plain
plain
W3c
w3c
Json
json
Plain
plain
W3c
w3c
JSON
json
PLAIN
plain
W3C
w3c
"json"
json
"plain"
plain
"w3c"
w3c

PipeSageMakerPipelineParameter
, PipeSageMakerPipelineParameterArgs

Name This property is required. string
Name of parameter to start execution of a SageMaker AI Model Building Pipeline.
Value This property is required. string
Value of parameter to start execution of a SageMaker AI Model Building Pipeline.
Name This property is required. string
Name of parameter to start execution of a SageMaker AI Model Building Pipeline.
Value This property is required. string
Value of parameter to start execution of a SageMaker AI Model Building Pipeline.
name This property is required. String
Name of parameter to start execution of a SageMaker AI Model Building Pipeline.
value This property is required. String
Value of parameter to start execution of a SageMaker AI Model Building Pipeline.
name This property is required. string
Name of parameter to start execution of a SageMaker AI Model Building Pipeline.
value This property is required. string
Value of parameter to start execution of a SageMaker AI Model Building Pipeline.
name This property is required. str
Name of parameter to start execution of a SageMaker AI Model Building Pipeline.
value This property is required. str
Value of parameter to start execution of a SageMaker AI Model Building Pipeline.
name This property is required. String
Name of parameter to start execution of a SageMaker AI Model Building Pipeline.
value This property is required. String
Value of parameter to start execution of a SageMaker AI Model Building Pipeline.

PipeSelfManagedKafkaAccessConfigurationCredentials0Properties
, PipeSelfManagedKafkaAccessConfigurationCredentials0PropertiesArgs

BasicAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
BasicAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
basicAuth This property is required. String
Optional SecretManager ARN which stores the database credentials
basicAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
basic_auth This property is required. str
Optional SecretManager ARN which stores the database credentials
basicAuth This property is required. String
Optional SecretManager ARN which stores the database credentials

PipeSelfManagedKafkaAccessConfigurationCredentials1Properties
, PipeSelfManagedKafkaAccessConfigurationCredentials1PropertiesArgs

SaslScram512Auth This property is required. string
Optional SecretManager ARN which stores the database credentials
SaslScram512Auth This property is required. string
Optional SecretManager ARN which stores the database credentials
saslScram512Auth This property is required. String
Optional SecretManager ARN which stores the database credentials
saslScram512Auth This property is required. string
Optional SecretManager ARN which stores the database credentials
sasl_scram512_auth This property is required. str
Optional SecretManager ARN which stores the database credentials
saslScram512Auth This property is required. String
Optional SecretManager ARN which stores the database credentials

PipeSelfManagedKafkaAccessConfigurationCredentials2Properties
, PipeSelfManagedKafkaAccessConfigurationCredentials2PropertiesArgs

SaslScram256Auth This property is required. string
Optional SecretManager ARN which stores the database credentials
SaslScram256Auth This property is required. string
Optional SecretManager ARN which stores the database credentials
saslScram256Auth This property is required. String
Optional SecretManager ARN which stores the database credentials
saslScram256Auth This property is required. string
Optional SecretManager ARN which stores the database credentials
sasl_scram256_auth This property is required. str
Optional SecretManager ARN which stores the database credentials
saslScram256Auth This property is required. String
Optional SecretManager ARN which stores the database credentials

PipeSelfManagedKafkaAccessConfigurationCredentials3Properties
, PipeSelfManagedKafkaAccessConfigurationCredentials3PropertiesArgs

ClientCertificateTlsAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
ClientCertificateTlsAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
clientCertificateTlsAuth This property is required. String
Optional SecretManager ARN which stores the database credentials
clientCertificateTlsAuth This property is required. string
Optional SecretManager ARN which stores the database credentials
client_certificate_tls_auth This property is required. str
Optional SecretManager ARN which stores the database credentials
clientCertificateTlsAuth This property is required. String
Optional SecretManager ARN which stores the database credentials

PipeSelfManagedKafkaAccessConfigurationVpc
, PipeSelfManagedKafkaAccessConfigurationVpcArgs

SecurityGroup List<string>
List of SecurityGroupId.
Subnets List<string>
List of SubnetId.
SecurityGroup []string
List of SecurityGroupId.
Subnets []string
List of SubnetId.
securityGroup List<String>
List of SecurityGroupId.
subnets List<String>
List of SubnetId.
securityGroup string[]
List of SecurityGroupId.
subnets string[]
List of SubnetId.
security_group Sequence[str]
List of SecurityGroupId.
subnets Sequence[str]
List of SubnetId.
securityGroup List<String>
List of SecurityGroupId.
subnets List<String>
List of SubnetId.

PipeSelfManagedKafkaStartPosition
, PipeSelfManagedKafkaStartPositionArgs

TrimHorizon
TRIM_HORIZON
Latest
LATEST
PipeSelfManagedKafkaStartPositionTrimHorizon
TRIM_HORIZON
PipeSelfManagedKafkaStartPositionLatest
LATEST
TrimHorizon
TRIM_HORIZON
Latest
LATEST
TrimHorizon
TRIM_HORIZON
Latest
LATEST
TRIM_HORIZON
TRIM_HORIZON
LATEST
LATEST
"TRIM_HORIZON"
TRIM_HORIZON
"LATEST"
LATEST

PipeSingleMeasureMapping
, PipeSingleMeasureMappingArgs

MeasureName This property is required. string
Target measure name for the measurement attribute in the Timestream table.
MeasureValue This property is required. string
Dynamic path of the source field to map to the measure in the record.
MeasureValueType This property is required. Pulumi.AwsNative.Pipes.PipeMeasureValueType
Data type of the source field.
MeasureName This property is required. string
Target measure name for the measurement attribute in the Timestream table.
MeasureValue This property is required. string
Dynamic path of the source field to map to the measure in the record.
MeasureValueType This property is required. PipeMeasureValueType
Data type of the source field.
measureName This property is required. String
Target measure name for the measurement attribute in the Timestream table.
measureValue This property is required. String
Dynamic path of the source field to map to the measure in the record.
measureValueType This property is required. PipeMeasureValueType
Data type of the source field.
measureName This property is required. string
Target measure name for the measurement attribute in the Timestream table.
measureValue This property is required. string
Dynamic path of the source field to map to the measure in the record.
measureValueType This property is required. PipeMeasureValueType
Data type of the source field.
measure_name This property is required. str
Target measure name for the measurement attribute in the Timestream table.
measure_value This property is required. str
Dynamic path of the source field to map to the measure in the record.
measure_value_type This property is required. PipeMeasureValueType
Data type of the source field.
measureName This property is required. String
Target measure name for the measurement attribute in the Timestream table.
measureValue This property is required. String
Dynamic path of the source field to map to the measure in the record.
measureValueType This property is required. "DOUBLE" | "BIGINT" | "VARCHAR" | "BOOLEAN" | "TIMESTAMP"
Data type of the source field.

PipeSourceActiveMqBrokerParameters
, PipeSourceActiveMqBrokerParametersArgs

Credentials This property is required. Pulumi.AwsNative.Pipes.Inputs.PipeMqBrokerAccessCredentialsProperties
The credentials needed to access the resource.
QueueName
This property is required.
Changes to this property will trigger replacement.
string
The name of the destination queue to consume.
BatchSize int
The maximum number of records to include in each batch.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
Credentials This property is required. PipeMqBrokerAccessCredentialsProperties
The credentials needed to access the resource.
QueueName
This property is required.
Changes to this property will trigger replacement.
string
The name of the destination queue to consume.
BatchSize int
The maximum number of records to include in each batch.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
credentials This property is required. PipeMqBrokerAccessCredentialsProperties
The credentials needed to access the resource.
queueName
This property is required.
Changes to this property will trigger replacement.
String
The name of the destination queue to consume.
batchSize Integer
The maximum number of records to include in each batch.
maximumBatchingWindowInSeconds Integer
The maximum length of a time to wait for events.
credentials This property is required. PipeMqBrokerAccessCredentialsProperties
The credentials needed to access the resource.
queueName
This property is required.
Changes to this property will trigger replacement.
string
The name of the destination queue to consume.
batchSize number
The maximum number of records to include in each batch.
maximumBatchingWindowInSeconds number
The maximum length of a time to wait for events.
credentials This property is required. PipeMqBrokerAccessCredentialsProperties
The credentials needed to access the resource.
queue_name
This property is required.
Changes to this property will trigger replacement.
str
The name of the destination queue to consume.
batch_size int
The maximum number of records to include in each batch.
maximum_batching_window_in_seconds int
The maximum length of a time to wait for events.
credentials This property is required. Property Map
The credentials needed to access the resource.
queueName
This property is required.
Changes to this property will trigger replacement.
String
The name of the destination queue to consume.
batchSize Number
The maximum number of records to include in each batch.
maximumBatchingWindowInSeconds Number
The maximum length of a time to wait for events.

PipeSourceDynamoDbStreamParameters
, PipeSourceDynamoDbStreamParametersArgs

StartingPosition
This property is required.
Changes to this property will trigger replacement.
Pulumi.AwsNative.Pipes.PipeDynamoDbStreamStartPosition

(Streams only) The position in a stream from which to start reading.

Valid values : TRIM_HORIZON | LATEST

BatchSize int
The maximum number of records to include in each batch.
DeadLetterConfig Pulumi.AwsNative.Pipes.Inputs.PipeDeadLetterConfig
Define the target queue to send dead-letter queue events to.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
MaximumRecordAgeInSeconds int
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
MaximumRetryAttempts int
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
OnPartialBatchItemFailure Pulumi.AwsNative.Pipes.PipeOnPartialBatchItemFailureStreams
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
ParallelizationFactor int
The number of batches to process concurrently from each shard. The default value is 1.
StartingPosition
This property is required.
Changes to this property will trigger replacement.
PipeDynamoDbStreamStartPosition

(Streams only) The position in a stream from which to start reading.

Valid values : TRIM_HORIZON | LATEST

BatchSize int
The maximum number of records to include in each batch.
DeadLetterConfig PipeDeadLetterConfig
Define the target queue to send dead-letter queue events to.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
MaximumRecordAgeInSeconds int
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
MaximumRetryAttempts int
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
OnPartialBatchItemFailure PipeOnPartialBatchItemFailureStreams
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
ParallelizationFactor int
The number of batches to process concurrently from each shard. The default value is 1.
startingPosition
This property is required.
Changes to this property will trigger replacement.
PipeDynamoDbStreamStartPosition

(Streams only) The position in a stream from which to start reading.

Valid values : TRIM_HORIZON | LATEST

batchSize Integer
The maximum number of records to include in each batch.
deadLetterConfig PipeDeadLetterConfig
Define the target queue to send dead-letter queue events to.
maximumBatchingWindowInSeconds Integer
The maximum length of a time to wait for events.
maximumRecordAgeInSeconds Integer
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
maximumRetryAttempts Integer
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
onPartialBatchItemFailure PipeOnPartialBatchItemFailureStreams
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
parallelizationFactor Integer
The number of batches to process concurrently from each shard. The default value is 1.
startingPosition
This property is required.
Changes to this property will trigger replacement.
PipeDynamoDbStreamStartPosition

(Streams only) The position in a stream from which to start reading.

Valid values : TRIM_HORIZON | LATEST

batchSize number
The maximum number of records to include in each batch.
deadLetterConfig PipeDeadLetterConfig
Define the target queue to send dead-letter queue events to.
maximumBatchingWindowInSeconds number
The maximum length of a time to wait for events.
maximumRecordAgeInSeconds number
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
maximumRetryAttempts number
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
onPartialBatchItemFailure PipeOnPartialBatchItemFailureStreams
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
parallelizationFactor number
The number of batches to process concurrently from each shard. The default value is 1.
starting_position
This property is required.
Changes to this property will trigger replacement.
PipeDynamoDbStreamStartPosition

(Streams only) The position in a stream from which to start reading.

Valid values : TRIM_HORIZON | LATEST

batch_size int
The maximum number of records to include in each batch.
dead_letter_config PipeDeadLetterConfig
Define the target queue to send dead-letter queue events to.
maximum_batching_window_in_seconds int
The maximum length of a time to wait for events.
maximum_record_age_in_seconds int
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
maximum_retry_attempts int
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
on_partial_batch_item_failure PipeOnPartialBatchItemFailureStreams
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
parallelization_factor int
The number of batches to process concurrently from each shard. The default value is 1.
startingPosition
This property is required.
Changes to this property will trigger replacement.
"TRIM_HORIZON" | "LATEST"

(Streams only) The position in a stream from which to start reading.

Valid values : TRIM_HORIZON | LATEST

batchSize Number
The maximum number of records to include in each batch.
deadLetterConfig Property Map
Define the target queue to send dead-letter queue events to.
maximumBatchingWindowInSeconds Number
The maximum length of a time to wait for events.
maximumRecordAgeInSeconds Number
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
maximumRetryAttempts Number
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
onPartialBatchItemFailure "AUTOMATIC_BISECT"
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
parallelizationFactor Number
The number of batches to process concurrently from each shard. The default value is 1.

PipeSourceKinesisStreamParameters
, PipeSourceKinesisStreamParametersArgs

StartingPosition
This property is required.
Changes to this property will trigger replacement.
Pulumi.AwsNative.Pipes.PipeKinesisStreamStartPosition
The position in a stream from which to start reading.
BatchSize int
The maximum number of records to include in each batch.
DeadLetterConfig Pulumi.AwsNative.Pipes.Inputs.PipeDeadLetterConfig
Define the target queue to send dead-letter queue events to.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
MaximumRecordAgeInSeconds int
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
MaximumRetryAttempts int
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
OnPartialBatchItemFailure Pulumi.AwsNative.Pipes.PipeOnPartialBatchItemFailureStreams
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
ParallelizationFactor int
The number of batches to process concurrently from each shard. The default value is 1.
StartingPositionTimestamp Changes to this property will trigger replacement. string
With StartingPosition set to AT_TIMESTAMP , the time from which to start reading, in Unix time seconds.
StartingPosition
This property is required.
Changes to this property will trigger replacement.
PipeKinesisStreamStartPosition
The position in a stream from which to start reading.
BatchSize int
The maximum number of records to include in each batch.
DeadLetterConfig PipeDeadLetterConfig
Define the target queue to send dead-letter queue events to.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
MaximumRecordAgeInSeconds int
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
MaximumRetryAttempts int
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
OnPartialBatchItemFailure PipeOnPartialBatchItemFailureStreams
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
ParallelizationFactor int
The number of batches to process concurrently from each shard. The default value is 1.
StartingPositionTimestamp Changes to this property will trigger replacement. string
With StartingPosition set to AT_TIMESTAMP , the time from which to start reading, in Unix time seconds.
startingPosition
This property is required.
Changes to this property will trigger replacement.
PipeKinesisStreamStartPosition
The position in a stream from which to start reading.
batchSize Integer
The maximum number of records to include in each batch.
deadLetterConfig PipeDeadLetterConfig
Define the target queue to send dead-letter queue events to.
maximumBatchingWindowInSeconds Integer
The maximum length of a time to wait for events.
maximumRecordAgeInSeconds Integer
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
maximumRetryAttempts Integer
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
onPartialBatchItemFailure PipeOnPartialBatchItemFailureStreams
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
parallelizationFactor Integer
The number of batches to process concurrently from each shard. The default value is 1.
startingPositionTimestamp Changes to this property will trigger replacement. String
With StartingPosition set to AT_TIMESTAMP , the time from which to start reading, in Unix time seconds.
startingPosition
This property is required.
Changes to this property will trigger replacement.
PipeKinesisStreamStartPosition
The position in a stream from which to start reading.
batchSize number
The maximum number of records to include in each batch.
deadLetterConfig PipeDeadLetterConfig
Define the target queue to send dead-letter queue events to.
maximumBatchingWindowInSeconds number
The maximum length of a time to wait for events.
maximumRecordAgeInSeconds number
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
maximumRetryAttempts number
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
onPartialBatchItemFailure PipeOnPartialBatchItemFailureStreams
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
parallelizationFactor number
The number of batches to process concurrently from each shard. The default value is 1.
startingPositionTimestamp Changes to this property will trigger replacement. string
With StartingPosition set to AT_TIMESTAMP , the time from which to start reading, in Unix time seconds.
starting_position
This property is required.
Changes to this property will trigger replacement.
PipeKinesisStreamStartPosition
The position in a stream from which to start reading.
batch_size int
The maximum number of records to include in each batch.
dead_letter_config PipeDeadLetterConfig
Define the target queue to send dead-letter queue events to.
maximum_batching_window_in_seconds int
The maximum length of a time to wait for events.
maximum_record_age_in_seconds int
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
maximum_retry_attempts int
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
on_partial_batch_item_failure PipeOnPartialBatchItemFailureStreams
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
parallelization_factor int
The number of batches to process concurrently from each shard. The default value is 1.
starting_position_timestamp Changes to this property will trigger replacement. str
With StartingPosition set to AT_TIMESTAMP , the time from which to start reading, in Unix time seconds.
startingPosition
This property is required.
Changes to this property will trigger replacement.
"TRIM_HORIZON" | "LATEST" | "AT_TIMESTAMP"
The position in a stream from which to start reading.
batchSize Number
The maximum number of records to include in each batch.
deadLetterConfig Property Map
Define the target queue to send dead-letter queue events to.
maximumBatchingWindowInSeconds Number
The maximum length of a time to wait for events.
maximumRecordAgeInSeconds Number
Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
maximumRetryAttempts Number
Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
onPartialBatchItemFailure "AUTOMATIC_BISECT"
Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
parallelizationFactor Number
The number of batches to process concurrently from each shard. The default value is 1.
startingPositionTimestamp Changes to this property will trigger replacement. String
With StartingPosition set to AT_TIMESTAMP , the time from which to start reading, in Unix time seconds.

PipeSourceManagedStreamingKafkaParameters
, PipeSourceManagedStreamingKafkaParametersArgs

TopicName
This property is required.
Changes to this property will trigger replacement.
string
The name of the topic that the pipe will read from.
BatchSize int
The maximum number of records to include in each batch.
ConsumerGroupId Changes to this property will trigger replacement. string
The name of the destination queue to consume.
Credentials Pulumi.AwsNative.Pipes.Inputs.PipeMskAccessCredentials0Properties | Pulumi.AwsNative.Pipes.Inputs.PipeMskAccessCredentials1Properties
The credentials needed to access the resource.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
StartingPosition Changes to this property will trigger replacement. Pulumi.AwsNative.Pipes.PipeMskStartPosition
The position in a stream from which to start reading.
TopicName
This property is required.
Changes to this property will trigger replacement.
string
The name of the topic that the pipe will read from.
BatchSize int
The maximum number of records to include in each batch.
ConsumerGroupId Changes to this property will trigger replacement. string
The name of the destination queue to consume.
Credentials PipeMskAccessCredentials0Properties | PipeMskAccessCredentials1Properties
The credentials needed to access the resource.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
StartingPosition Changes to this property will trigger replacement. PipeMskStartPosition
The position in a stream from which to start reading.
topicName
This property is required.
Changes to this property will trigger replacement.
String
The name of the topic that the pipe will read from.
batchSize Integer
The maximum number of records to include in each batch.
consumerGroupId Changes to this property will trigger replacement. String
The name of the destination queue to consume.
credentials PipeMskAccessCredentials0Properties | PipeMskAccessCredentials1Properties
The credentials needed to access the resource.
maximumBatchingWindowInSeconds Integer
The maximum length of a time to wait for events.
startingPosition Changes to this property will trigger replacement. PipeMskStartPosition
The position in a stream from which to start reading.
topicName
This property is required.
Changes to this property will trigger replacement.
string
The name of the topic that the pipe will read from.
batchSize number
The maximum number of records to include in each batch.
consumerGroupId Changes to this property will trigger replacement. string
The name of the destination queue to consume.
credentials PipeMskAccessCredentials0Properties | PipeMskAccessCredentials1Properties
The credentials needed to access the resource.
maximumBatchingWindowInSeconds number
The maximum length of a time to wait for events.
startingPosition Changes to this property will trigger replacement. PipeMskStartPosition
The position in a stream from which to start reading.
topic_name
This property is required.
Changes to this property will trigger replacement.
str
The name of the topic that the pipe will read from.
batch_size int
The maximum number of records to include in each batch.
consumer_group_id Changes to this property will trigger replacement. str
The name of the destination queue to consume.
credentials PipeMskAccessCredentials0Properties | PipeMskAccessCredentials1Properties
The credentials needed to access the resource.
maximum_batching_window_in_seconds int
The maximum length of a time to wait for events.
starting_position Changes to this property will trigger replacement. PipeMskStartPosition
The position in a stream from which to start reading.
topicName
This property is required.
Changes to this property will trigger replacement.
String
The name of the topic that the pipe will read from.
batchSize Number
The maximum number of records to include in each batch.
consumerGroupId Changes to this property will trigger replacement. String
The name of the destination queue to consume.
credentials Property Map | Property Map
The credentials needed to access the resource.
maximumBatchingWindowInSeconds Number
The maximum length of a time to wait for events.
startingPosition Changes to this property will trigger replacement. "TRIM_HORIZON" | "LATEST"
The position in a stream from which to start reading.

PipeSourceParameters
, PipeSourceParametersArgs

ActiveMqBrokerParameters Pulumi.AwsNative.Pipes.Inputs.PipeSourceActiveMqBrokerParameters
The parameters for using an Active MQ broker as a source.
DynamoDbStreamParameters Pulumi.AwsNative.Pipes.Inputs.PipeSourceDynamoDbStreamParameters
The parameters for using a DynamoDB stream as a source.
FilterCriteria Pulumi.AwsNative.Pipes.Inputs.PipeFilterCriteria

The collection of event patterns used to filter events.

To remove a filter, specify a FilterCriteria object with an empty array of Filter objects.

For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .

KinesisStreamParameters Pulumi.AwsNative.Pipes.Inputs.PipeSourceKinesisStreamParameters
The parameters for using a Kinesis stream as a source.
ManagedStreamingKafkaParameters Pulumi.AwsNative.Pipes.Inputs.PipeSourceManagedStreamingKafkaParameters
The parameters for using an MSK stream as a source.
RabbitMqBrokerParameters Pulumi.AwsNative.Pipes.Inputs.PipeSourceRabbitMqBrokerParameters
The parameters for using a Rabbit MQ broker as a source.
SelfManagedKafkaParameters Pulumi.AwsNative.Pipes.Inputs.PipeSourceSelfManagedKafkaParameters

The parameters for using a self-managed Apache Kafka stream as a source.

A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .

SqsQueueParameters Pulumi.AwsNative.Pipes.Inputs.PipeSourceSqsQueueParameters
The parameters for using a Amazon SQS stream as a source.
ActiveMqBrokerParameters PipeSourceActiveMqBrokerParameters
The parameters for using an Active MQ broker as a source.
DynamoDbStreamParameters PipeSourceDynamoDbStreamParameters
The parameters for using a DynamoDB stream as a source.
FilterCriteria PipeFilterCriteria

The collection of event patterns used to filter events.

To remove a filter, specify a FilterCriteria object with an empty array of Filter objects.

For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .

KinesisStreamParameters PipeSourceKinesisStreamParameters
The parameters for using a Kinesis stream as a source.
ManagedStreamingKafkaParameters PipeSourceManagedStreamingKafkaParameters
The parameters for using an MSK stream as a source.
RabbitMqBrokerParameters PipeSourceRabbitMqBrokerParameters
The parameters for using a Rabbit MQ broker as a source.
SelfManagedKafkaParameters PipeSourceSelfManagedKafkaParameters

The parameters for using a self-managed Apache Kafka stream as a source.

A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .

SqsQueueParameters PipeSourceSqsQueueParameters
The parameters for using a Amazon SQS stream as a source.
activeMqBrokerParameters PipeSourceActiveMqBrokerParameters
The parameters for using an Active MQ broker as a source.
dynamoDbStreamParameters PipeSourceDynamoDbStreamParameters
The parameters for using a DynamoDB stream as a source.
filterCriteria PipeFilterCriteria

The collection of event patterns used to filter events.

To remove a filter, specify a FilterCriteria object with an empty array of Filter objects.

For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .

kinesisStreamParameters PipeSourceKinesisStreamParameters
The parameters for using a Kinesis stream as a source.
managedStreamingKafkaParameters PipeSourceManagedStreamingKafkaParameters
The parameters for using an MSK stream as a source.
rabbitMqBrokerParameters PipeSourceRabbitMqBrokerParameters
The parameters for using a Rabbit MQ broker as a source.
selfManagedKafkaParameters PipeSourceSelfManagedKafkaParameters

The parameters for using a self-managed Apache Kafka stream as a source.

A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .

sqsQueueParameters PipeSourceSqsQueueParameters
The parameters for using a Amazon SQS stream as a source.
activeMqBrokerParameters PipeSourceActiveMqBrokerParameters
The parameters for using an Active MQ broker as a source.
dynamoDbStreamParameters PipeSourceDynamoDbStreamParameters
The parameters for using a DynamoDB stream as a source.
filterCriteria PipeFilterCriteria

The collection of event patterns used to filter events.

To remove a filter, specify a FilterCriteria object with an empty array of Filter objects.

For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .

kinesisStreamParameters PipeSourceKinesisStreamParameters
The parameters for using a Kinesis stream as a source.
managedStreamingKafkaParameters PipeSourceManagedStreamingKafkaParameters
The parameters for using an MSK stream as a source.
rabbitMqBrokerParameters PipeSourceRabbitMqBrokerParameters
The parameters for using a Rabbit MQ broker as a source.
selfManagedKafkaParameters PipeSourceSelfManagedKafkaParameters

The parameters for using a self-managed Apache Kafka stream as a source.

A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .

sqsQueueParameters PipeSourceSqsQueueParameters
The parameters for using a Amazon SQS stream as a source.
active_mq_broker_parameters PipeSourceActiveMqBrokerParameters
The parameters for using an Active MQ broker as a source.
dynamo_db_stream_parameters PipeSourceDynamoDbStreamParameters
The parameters for using a DynamoDB stream as a source.
filter_criteria PipeFilterCriteria

The collection of event patterns used to filter events.

To remove a filter, specify a FilterCriteria object with an empty array of Filter objects.

For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .

kinesis_stream_parameters PipeSourceKinesisStreamParameters
The parameters for using a Kinesis stream as a source.
managed_streaming_kafka_parameters PipeSourceManagedStreamingKafkaParameters
The parameters for using an MSK stream as a source.
rabbit_mq_broker_parameters PipeSourceRabbitMqBrokerParameters
The parameters for using a Rabbit MQ broker as a source.
self_managed_kafka_parameters PipeSourceSelfManagedKafkaParameters

The parameters for using a self-managed Apache Kafka stream as a source.

A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .

sqs_queue_parameters PipeSourceSqsQueueParameters
The parameters for using a Amazon SQS stream as a source.
activeMqBrokerParameters Property Map
The parameters for using an Active MQ broker as a source.
dynamoDbStreamParameters Property Map
The parameters for using a DynamoDB stream as a source.
filterCriteria Property Map

The collection of event patterns used to filter events.

To remove a filter, specify a FilterCriteria object with an empty array of Filter objects.

For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .

kinesisStreamParameters Property Map
The parameters for using a Kinesis stream as a source.
managedStreamingKafkaParameters Property Map
The parameters for using an MSK stream as a source.
rabbitMqBrokerParameters Property Map
The parameters for using a Rabbit MQ broker as a source.
selfManagedKafkaParameters Property Map

The parameters for using a self-managed Apache Kafka stream as a source.

A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .

sqsQueueParameters Property Map
The parameters for using a Amazon SQS stream as a source.

PipeSourceRabbitMqBrokerParameters
, PipeSourceRabbitMqBrokerParametersArgs

Credentials This property is required. Pulumi.AwsNative.Pipes.Inputs.PipeMqBrokerAccessCredentialsProperties
The credentials needed to access the resource.
QueueName
This property is required.
Changes to this property will trigger replacement.
string
The name of the destination queue to consume.
BatchSize int
The maximum number of records to include in each batch.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
VirtualHost Changes to this property will trigger replacement. string
The name of the virtual host associated with the source broker.
Credentials This property is required. PipeMqBrokerAccessCredentialsProperties
The credentials needed to access the resource.
QueueName
This property is required.
Changes to this property will trigger replacement.
string
The name of the destination queue to consume.
BatchSize int
The maximum number of records to include in each batch.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
VirtualHost Changes to this property will trigger replacement. string
The name of the virtual host associated with the source broker.
credentials This property is required. PipeMqBrokerAccessCredentialsProperties
The credentials needed to access the resource.
queueName
This property is required.
Changes to this property will trigger replacement.
String
The name of the destination queue to consume.
batchSize Integer
The maximum number of records to include in each batch.
maximumBatchingWindowInSeconds Integer
The maximum length of a time to wait for events.
virtualHost Changes to this property will trigger replacement. String
The name of the virtual host associated with the source broker.
credentials This property is required. PipeMqBrokerAccessCredentialsProperties
The credentials needed to access the resource.
queueName
This property is required.
Changes to this property will trigger replacement.
string
The name of the destination queue to consume.
batchSize number
The maximum number of records to include in each batch.
maximumBatchingWindowInSeconds number
The maximum length of a time to wait for events.
virtualHost Changes to this property will trigger replacement. string
The name of the virtual host associated with the source broker.
credentials This property is required. PipeMqBrokerAccessCredentialsProperties
The credentials needed to access the resource.
queue_name
This property is required.
Changes to this property will trigger replacement.
str
The name of the destination queue to consume.
batch_size int
The maximum number of records to include in each batch.
maximum_batching_window_in_seconds int
The maximum length of a time to wait for events.
virtual_host Changes to this property will trigger replacement. str
The name of the virtual host associated with the source broker.
credentials This property is required. Property Map
The credentials needed to access the resource.
queueName
This property is required.
Changes to this property will trigger replacement.
String
The name of the destination queue to consume.
batchSize Number
The maximum number of records to include in each batch.
maximumBatchingWindowInSeconds Number
The maximum length of a time to wait for events.
virtualHost Changes to this property will trigger replacement. String
The name of the virtual host associated with the source broker.

PipeSourceSelfManagedKafkaParameters
, PipeSourceSelfManagedKafkaParametersArgs

TopicName
This property is required.
Changes to this property will trigger replacement.
string
The name of the topic that the pipe will read from.
AdditionalBootstrapServers Changes to this property will trigger replacement. List<string>
An array of server URLs.
BatchSize int
The maximum number of records to include in each batch.
ConsumerGroupId Changes to this property will trigger replacement. string
The name of the destination queue to consume.
Credentials Pulumi.AwsNative.Pipes.Inputs.PipeSelfManagedKafkaAccessConfigurationCredentials0Properties | Pulumi.AwsNative.Pipes.Inputs.PipeSelfManagedKafkaAccessConfigurationCredentials1Properties | Pulumi.AwsNative.Pipes.Inputs.PipeSelfManagedKafkaAccessConfigurationCredentials2Properties | Pulumi.AwsNative.Pipes.Inputs.PipeSelfManagedKafkaAccessConfigurationCredentials3Properties
The credentials needed to access the resource.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
ServerRootCaCertificate string
Optional SecretManager ARN which stores the database credentials
StartingPosition Changes to this property will trigger replacement. Pulumi.AwsNative.Pipes.PipeSelfManagedKafkaStartPosition
The position in a stream from which to start reading.
Vpc Pulumi.AwsNative.Pipes.Inputs.PipeSelfManagedKafkaAccessConfigurationVpc
This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
TopicName
This property is required.
Changes to this property will trigger replacement.
string
The name of the topic that the pipe will read from.
AdditionalBootstrapServers Changes to this property will trigger replacement. []string
An array of server URLs.
BatchSize int
The maximum number of records to include in each batch.
ConsumerGroupId Changes to this property will trigger replacement. string
The name of the destination queue to consume.
Credentials PipeSelfManagedKafkaAccessConfigurationCredentials0Properties | PipeSelfManagedKafkaAccessConfigurationCredentials1Properties | PipeSelfManagedKafkaAccessConfigurationCredentials2Properties | PipeSelfManagedKafkaAccessConfigurationCredentials3Properties
The credentials needed to access the resource.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
ServerRootCaCertificate string
Optional SecretManager ARN which stores the database credentials
StartingPosition Changes to this property will trigger replacement. PipeSelfManagedKafkaStartPosition
The position in a stream from which to start reading.
Vpc PipeSelfManagedKafkaAccessConfigurationVpc
This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
topicName
This property is required.
Changes to this property will trigger replacement.
String
The name of the topic that the pipe will read from.
additionalBootstrapServers Changes to this property will trigger replacement. List<String>
An array of server URLs.
batchSize Integer
The maximum number of records to include in each batch.
consumerGroupId Changes to this property will trigger replacement. String
The name of the destination queue to consume.
credentials PipeSelfManagedKafkaAccessConfigurationCredentials0Properties | PipeSelfManagedKafkaAccessConfigurationCredentials1Properties | PipeSelfManagedKafkaAccessConfigurationCredentials2Properties | PipeSelfManagedKafkaAccessConfigurationCredentials3Properties
The credentials needed to access the resource.
maximumBatchingWindowInSeconds Integer
The maximum length of a time to wait for events.
serverRootCaCertificate String
Optional SecretManager ARN which stores the database credentials
startingPosition Changes to this property will trigger replacement. PipeSelfManagedKafkaStartPosition
The position in a stream from which to start reading.
vpc PipeSelfManagedKafkaAccessConfigurationVpc
This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
topicName
This property is required.
Changes to this property will trigger replacement.
string
The name of the topic that the pipe will read from.
additionalBootstrapServers Changes to this property will trigger replacement. string[]
An array of server URLs.
batchSize number
The maximum number of records to include in each batch.
consumerGroupId Changes to this property will trigger replacement. string
The name of the destination queue to consume.
credentials PipeSelfManagedKafkaAccessConfigurationCredentials0Properties | PipeSelfManagedKafkaAccessConfigurationCredentials1Properties | PipeSelfManagedKafkaAccessConfigurationCredentials2Properties | PipeSelfManagedKafkaAccessConfigurationCredentials3Properties
The credentials needed to access the resource.
maximumBatchingWindowInSeconds number
The maximum length of a time to wait for events.
serverRootCaCertificate string
Optional SecretManager ARN which stores the database credentials
startingPosition Changes to this property will trigger replacement. PipeSelfManagedKafkaStartPosition
The position in a stream from which to start reading.
vpc PipeSelfManagedKafkaAccessConfigurationVpc
This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
topic_name
This property is required.
Changes to this property will trigger replacement.
str
The name of the topic that the pipe will read from.
additional_bootstrap_servers Changes to this property will trigger replacement. Sequence[str]
An array of server URLs.
batch_size int
The maximum number of records to include in each batch.
consumer_group_id Changes to this property will trigger replacement. str
The name of the destination queue to consume.
credentials PipeSelfManagedKafkaAccessConfigurationCredentials0Properties | PipeSelfManagedKafkaAccessConfigurationCredentials1Properties | PipeSelfManagedKafkaAccessConfigurationCredentials2Properties | PipeSelfManagedKafkaAccessConfigurationCredentials3Properties
The credentials needed to access the resource.
maximum_batching_window_in_seconds int
The maximum length of a time to wait for events.
server_root_ca_certificate str
Optional SecretManager ARN which stores the database credentials
starting_position Changes to this property will trigger replacement. PipeSelfManagedKafkaStartPosition
The position in a stream from which to start reading.
vpc PipeSelfManagedKafkaAccessConfigurationVpc
This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
topicName
This property is required.
Changes to this property will trigger replacement.
String
The name of the topic that the pipe will read from.
additionalBootstrapServers Changes to this property will trigger replacement. List<String>
An array of server URLs.
batchSize Number
The maximum number of records to include in each batch.
consumerGroupId Changes to this property will trigger replacement. String
The name of the destination queue to consume.
credentials Property Map | Property Map | Property Map | Property Map
The credentials needed to access the resource.
maximumBatchingWindowInSeconds Number
The maximum length of a time to wait for events.
serverRootCaCertificate String
Optional SecretManager ARN which stores the database credentials
startingPosition Changes to this property will trigger replacement. "TRIM_HORIZON" | "LATEST"
The position in a stream from which to start reading.
vpc Property Map
This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.

PipeSourceSqsQueueParameters
, PipeSourceSqsQueueParametersArgs

BatchSize int
The maximum number of records to include in each batch.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
BatchSize int
The maximum number of records to include in each batch.
MaximumBatchingWindowInSeconds int
The maximum length of a time to wait for events.
batchSize Integer
The maximum number of records to include in each batch.
maximumBatchingWindowInSeconds Integer
The maximum length of a time to wait for events.
batchSize number
The maximum number of records to include in each batch.
maximumBatchingWindowInSeconds number
The maximum length of a time to wait for events.
batch_size int
The maximum number of records to include in each batch.
maximum_batching_window_in_seconds int
The maximum length of a time to wait for events.
batchSize Number
The maximum number of records to include in each batch.
maximumBatchingWindowInSeconds Number
The maximum length of a time to wait for events.

PipeState
, PipeStateArgs

Running
RUNNING
Stopped
STOPPED
Creating
CREATING
Updating
UPDATING
Deleting
DELETING
Starting
STARTING
Stopping
STOPPING
CreateFailed
CREATE_FAILED
UpdateFailed
UPDATE_FAILED
StartFailed
START_FAILED
StopFailed
STOP_FAILED
DeleteFailed
DELETE_FAILED
CreateRollbackFailed
CREATE_ROLLBACK_FAILED
DeleteRollbackFailed
DELETE_ROLLBACK_FAILED
UpdateRollbackFailed
UPDATE_ROLLBACK_FAILED
PipeStateRunning
RUNNING
PipeStateStopped
STOPPED
PipeStateCreating
CREATING
PipeStateUpdating
UPDATING
PipeStateDeleting
DELETING
PipeStateStarting
STARTING
PipeStateStopping
STOPPING
PipeStateCreateFailed
CREATE_FAILED
PipeStateUpdateFailed
UPDATE_FAILED
PipeStateStartFailed
START_FAILED
PipeStateStopFailed
STOP_FAILED
PipeStateDeleteFailed
DELETE_FAILED
PipeStateCreateRollbackFailed
CREATE_ROLLBACK_FAILED
PipeStateDeleteRollbackFailed
DELETE_ROLLBACK_FAILED
PipeStateUpdateRollbackFailed
UPDATE_ROLLBACK_FAILED
Running
RUNNING
Stopped
STOPPED
Creating
CREATING
Updating
UPDATING
Deleting
DELETING
Starting
STARTING
Stopping
STOPPING
CreateFailed
CREATE_FAILED
UpdateFailed
UPDATE_FAILED
StartFailed
START_FAILED
StopFailed
STOP_FAILED
DeleteFailed
DELETE_FAILED
CreateRollbackFailed
CREATE_ROLLBACK_FAILED
DeleteRollbackFailed
DELETE_ROLLBACK_FAILED
UpdateRollbackFailed
UPDATE_ROLLBACK_FAILED
Running
RUNNING
Stopped
STOPPED
Creating
CREATING
Updating
UPDATING
Deleting
DELETING
Starting
STARTING
Stopping
STOPPING
CreateFailed
CREATE_FAILED
UpdateFailed
UPDATE_FAILED
StartFailed
START_FAILED
StopFailed
STOP_FAILED
DeleteFailed
DELETE_FAILED
CreateRollbackFailed
CREATE_ROLLBACK_FAILED
DeleteRollbackFailed
DELETE_ROLLBACK_FAILED
UpdateRollbackFailed
UPDATE_ROLLBACK_FAILED
RUNNING
RUNNING
STOPPED
STOPPED
CREATING
CREATING
UPDATING
UPDATING
DELETING
DELETING
STARTING
STARTING
STOPPING
STOPPING
CREATE_FAILED
CREATE_FAILED
UPDATE_FAILED
UPDATE_FAILED
START_FAILED
START_FAILED
STOP_FAILED
STOP_FAILED
DELETE_FAILED
DELETE_FAILED
CREATE_ROLLBACK_FAILED
CREATE_ROLLBACK_FAILED
DELETE_ROLLBACK_FAILED
DELETE_ROLLBACK_FAILED
UPDATE_ROLLBACK_FAILED
UPDATE_ROLLBACK_FAILED
"RUNNING"
RUNNING
"STOPPED"
STOPPED
"CREATING"
CREATING
"UPDATING"
UPDATING
"DELETING"
DELETING
"STARTING"
STARTING
"STOPPING"
STOPPING
"CREATE_FAILED"
CREATE_FAILED
"UPDATE_FAILED"
UPDATE_FAILED
"START_FAILED"
START_FAILED
"STOP_FAILED"
STOP_FAILED
"DELETE_FAILED"
DELETE_FAILED
"CREATE_ROLLBACK_FAILED"
CREATE_ROLLBACK_FAILED
"DELETE_ROLLBACK_FAILED"
DELETE_ROLLBACK_FAILED
"UPDATE_ROLLBACK_FAILED"
UPDATE_ROLLBACK_FAILED

PipeTag
, PipeTagArgs

Key This property is required. string
The key of the key-value pair.
Value This property is required. string
The value of the key-value pair.
Key This property is required. string
The key of the key-value pair.
Value This property is required. string
The value of the key-value pair.
key This property is required. String
The key of the key-value pair.
value This property is required. String
The value of the key-value pair.
key This property is required. string
The key of the key-value pair.
value This property is required. string
The value of the key-value pair.
key This property is required. str
The key of the key-value pair.
value This property is required. str
The value of the key-value pair.
key This property is required. String
The key of the key-value pair.
value This property is required. String
The value of the key-value pair.

PipeTargetBatchJobParameters
, PipeTargetBatchJobParametersArgs

JobDefinition This property is required. string
The job definition used by this job. This value can be one of name , name:revision , or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
JobName This property is required. string
The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
ArrayProperties Pulumi.AwsNative.Pipes.Inputs.PipeBatchArrayProperties
The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
ContainerOverrides Pulumi.AwsNative.Pipes.Inputs.PipeBatchContainerOverrides
The overrides that are sent to a container.
DependsOn List<Pulumi.AwsNative.Pipes.Inputs.PipeBatchJobDependency>
A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
Parameters Dictionary<string, string>
Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
RetryStrategy Pulumi.AwsNative.Pipes.Inputs.PipeBatchRetryStrategy
The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
JobDefinition This property is required. string
The job definition used by this job. This value can be one of name , name:revision , or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
JobName This property is required. string
The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
ArrayProperties PipeBatchArrayProperties
The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
ContainerOverrides PipeBatchContainerOverrides
The overrides that are sent to a container.
DependsOn []PipeBatchJobDependency
A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
Parameters map[string]string
Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
RetryStrategy PipeBatchRetryStrategy
The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
jobDefinition This property is required. String
The job definition used by this job. This value can be one of name , name:revision , or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
jobName This property is required. String
The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
arrayProperties PipeBatchArrayProperties
The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
containerOverrides PipeBatchContainerOverrides
The overrides that are sent to a container.
dependsOn List<PipeBatchJobDependency>
A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
parameters Map<String,String>
Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
retryStrategy PipeBatchRetryStrategy
The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
jobDefinition This property is required. string
The job definition used by this job. This value can be one of name , name:revision , or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
jobName This property is required. string
The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
arrayProperties PipeBatchArrayProperties
The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
containerOverrides PipeBatchContainerOverrides
The overrides that are sent to a container.
dependsOn PipeBatchJobDependency[]
A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
parameters {[key: string]: string}
Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
retryStrategy PipeBatchRetryStrategy
The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
job_definition This property is required. str
The job definition used by this job. This value can be one of name , name:revision , or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
job_name This property is required. str
The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
array_properties PipeBatchArrayProperties
The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
container_overrides PipeBatchContainerOverrides
The overrides that are sent to a container.
depends_on Sequence[PipeBatchJobDependency]
A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
parameters Mapping[str, str]
Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
retry_strategy PipeBatchRetryStrategy
The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
jobDefinition This property is required. String
The job definition used by this job. This value can be one of name , name:revision , or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
jobName This property is required. String
The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
arrayProperties Property Map
The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
containerOverrides Property Map
The overrides that are sent to a container.
dependsOn List<Property Map>
A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
parameters Map<String>
Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
retryStrategy Property Map
The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.

PipeTargetCloudWatchLogsParameters
, PipeTargetCloudWatchLogsParametersArgs

LogStreamName string
The name of the log stream.
Timestamp string

A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.

The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered.

If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe.

LogStreamName string
The name of the log stream.
Timestamp string

A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.

The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered.

If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe.

logStreamName String
The name of the log stream.
timestamp String

A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.

The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered.

If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe.

logStreamName string
The name of the log stream.
timestamp string

A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.

The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered.

If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe.

log_stream_name str
The name of the log stream.
timestamp str

A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.

The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered.

If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe.

logStreamName String
The name of the log stream.
timestamp String

A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.

The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered.

If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe.

PipeTargetEcsTaskParameters
, PipeTargetEcsTaskParametersArgs

TaskDefinitionArn This property is required. string
The ARN of the task definition to use if the event target is an Amazon ECS task.
CapacityProviderStrategy List<Pulumi.AwsNative.Pipes.Inputs.PipeCapacityProviderStrategyItem>

The capacity provider strategy to use for the task.

If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used.

EnableEcsManagedTags bool
Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
EnableExecuteCommand bool
Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
Group string
Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
LaunchType Pulumi.AwsNative.Pipes.PipeLaunchType
Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
NetworkConfiguration Pulumi.AwsNative.Pipes.Inputs.PipeNetworkConfiguration

Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks.

If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails.

Overrides Pulumi.AwsNative.Pipes.Inputs.PipeEcsTaskOverride
The overrides that are associated with a task.
PlacementConstraints List<Pulumi.AwsNative.Pipes.Inputs.PipePlacementConstraint>
An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
PlacementStrategy List<Pulumi.AwsNative.Pipes.Inputs.PipePlacementStrategy>
The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
PlatformVersion string

Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0 .

This structure is used only if LaunchType is FARGATE . For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .

PropagateTags Pulumi.AwsNative.Pipes.PipePropagateTags
Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action.
ReferenceId string
The reference ID to use for the task.
Tags List<Pulumi.AwsNative.Pipes.Inputs.PipeTag>
The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
TaskCount int
The number of tasks to create based on TaskDefinition . The default is 1.
TaskDefinitionArn This property is required. string
The ARN of the task definition to use if the event target is an Amazon ECS task.
CapacityProviderStrategy []PipeCapacityProviderStrategyItem

The capacity provider strategy to use for the task.

If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used.

EnableEcsManagedTags bool
Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
EnableExecuteCommand bool
Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
Group string
Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
LaunchType PipeLaunchType
Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
NetworkConfiguration PipeNetworkConfiguration

Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks.

If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails.

Overrides PipeEcsTaskOverride
The overrides that are associated with a task.
PlacementConstraints []PipePlacementConstraint
An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
PlacementStrategy []PipePlacementStrategy
The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
PlatformVersion string

Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0 .

This structure is used only if LaunchType is FARGATE . For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .

PropagateTags PipePropagateTags
Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action.
ReferenceId string
The reference ID to use for the task.
Tags []PipeTag
The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
TaskCount int
The number of tasks to create based on TaskDefinition . The default is 1.
taskDefinitionArn This property is required. String
The ARN of the task definition to use if the event target is an Amazon ECS task.
capacityProviderStrategy List<PipeCapacityProviderStrategyItem>

The capacity provider strategy to use for the task.

If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used.

enableEcsManagedTags Boolean
Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
enableExecuteCommand Boolean
Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
group String
Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
launchType PipeLaunchType
Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
networkConfiguration PipeNetworkConfiguration

Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks.

If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails.

overrides PipeEcsTaskOverride
The overrides that are associated with a task.
placementConstraints List<PipePlacementConstraint>
An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
placementStrategy List<PipePlacementStrategy>
The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
platformVersion String

Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0 .

This structure is used only if LaunchType is FARGATE . For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .

propagateTags PipePropagateTags
Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action.
referenceId String
The reference ID to use for the task.
tags List<PipeTag>
The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
taskCount Integer
The number of tasks to create based on TaskDefinition . The default is 1.
taskDefinitionArn This property is required. string
The ARN of the task definition to use if the event target is an Amazon ECS task.
capacityProviderStrategy PipeCapacityProviderStrategyItem[]

The capacity provider strategy to use for the task.

If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used.

enableEcsManagedTags boolean
Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
enableExecuteCommand boolean
Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
group string
Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
launchType PipeLaunchType
Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
networkConfiguration PipeNetworkConfiguration

Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks.

If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails.

overrides PipeEcsTaskOverride
The overrides that are associated with a task.
placementConstraints PipePlacementConstraint[]
An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
placementStrategy PipePlacementStrategy[]
The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
platformVersion string

Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0 .

This structure is used only if LaunchType is FARGATE . For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .

propagateTags PipePropagateTags
Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action.
referenceId string
The reference ID to use for the task.
tags PipeTag[]
The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
taskCount number
The number of tasks to create based on TaskDefinition . The default is 1.
task_definition_arn This property is required. str
The ARN of the task definition to use if the event target is an Amazon ECS task.
capacity_provider_strategy Sequence[PipeCapacityProviderStrategyItem]

The capacity provider strategy to use for the task.

If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used.

enable_ecs_managed_tags bool
Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
enable_execute_command bool
Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
group str
Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
launch_type PipeLaunchType
Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
network_configuration PipeNetworkConfiguration

Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks.

If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails.

overrides PipeEcsTaskOverride
The overrides that are associated with a task.
placement_constraints Sequence[PipePlacementConstraint]
An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
placement_strategy Sequence[PipePlacementStrategy]
The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
platform_version str

Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0 .

This structure is used only if LaunchType is FARGATE . For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .

propagate_tags PipePropagateTags
Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action.
reference_id str
The reference ID to use for the task.
tags Sequence[PipeTag]
The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
task_count int
The number of tasks to create based on TaskDefinition . The default is 1.
taskDefinitionArn This property is required. String
The ARN of the task definition to use if the event target is an Amazon ECS task.
capacityProviderStrategy List<Property Map>

The capacity provider strategy to use for the task.

If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used.

enableEcsManagedTags Boolean
Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
enableExecuteCommand Boolean
Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
group String
Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
launchType "EC2" | "FARGATE" | "EXTERNAL"
Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
networkConfiguration Property Map

Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks.

If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails.

overrides Property Map
The overrides that are associated with a task.
placementConstraints List<Property Map>
An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
placementStrategy List<Property Map>
The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
platformVersion String

Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0 .

This structure is used only if LaunchType is FARGATE . For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .

propagateTags "TASK_DEFINITION"
Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action.
referenceId String
The reference ID to use for the task.
tags List<Property Map>
The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
taskCount Number
The number of tasks to create based on TaskDefinition . The default is 1.

PipeTargetEventBridgeEventBusParameters
, PipeTargetEventBridgeEventBusParametersArgs

DetailType string
A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
EndpointId string
The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo .
Resources List<string>
AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
Source string
The source of the event.
Time string
The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
DetailType string
A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
EndpointId string
The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo .
Resources []string
AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
Source string
The source of the event.
Time string
The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
detailType String
A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
endpointId String
The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo .
resources List<String>
AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
source String
The source of the event.
time String
The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
detailType string
A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
endpointId string
The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo .
resources string[]
AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
source string
The source of the event.
time string
The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
detail_type str
A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
endpoint_id str
The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo .
resources Sequence[str]
AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
source str
The source of the event.
time str
The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
detailType String
A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
endpointId String
The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo .
resources List<String>
AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
source String
The source of the event.
time String
The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.

PipeTargetHttpParameters
, PipeTargetHttpParametersArgs

HeaderParameters Dictionary<string, string>
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
PathParameterValues List<string>
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
QueryStringParameters Dictionary<string, string>
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
HeaderParameters map[string]string
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
PathParameterValues []string
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
QueryStringParameters map[string]string
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
headerParameters Map<String,String>
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
pathParameterValues List<String>
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
queryStringParameters Map<String,String>
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
headerParameters {[key: string]: string}
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
pathParameterValues string[]
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
queryStringParameters {[key: string]: string}
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
header_parameters Mapping[str, str]
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
path_parameter_values Sequence[str]
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
query_string_parameters Mapping[str, str]
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
headerParameters Map<String>
The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
pathParameterValues List<String>
The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
queryStringParameters Map<String>
The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.

PipeTargetInvocationType
, PipeTargetInvocationTypeArgs

RequestResponse
REQUEST_RESPONSE
FireAndForget
FIRE_AND_FORGET
PipeTargetInvocationTypeRequestResponse
REQUEST_RESPONSE
PipeTargetInvocationTypeFireAndForget
FIRE_AND_FORGET
RequestResponse
REQUEST_RESPONSE
FireAndForget
FIRE_AND_FORGET
RequestResponse
REQUEST_RESPONSE
FireAndForget
FIRE_AND_FORGET
REQUEST_RESPONSE
REQUEST_RESPONSE
FIRE_AND_FORGET
FIRE_AND_FORGET
"REQUEST_RESPONSE"
REQUEST_RESPONSE
"FIRE_AND_FORGET"
FIRE_AND_FORGET

PipeTargetKinesisStreamParameters
, PipeTargetKinesisStreamParametersArgs

PartitionKey This property is required. string
Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
PartitionKey This property is required. string
Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
partitionKey This property is required. String
Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
partitionKey This property is required. string
Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
partition_key This property is required. str
Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
partitionKey This property is required. String
Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.

PipeTargetLambdaFunctionParameters
, PipeTargetLambdaFunctionParametersArgs

InvocationType Pulumi.AwsNative.Pipes.PipeTargetInvocationType

Specify whether to invoke the function synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. This corresponds to the RequestResponse option in the InvocationType parameter for the Lambda Invoke API.
  • FIRE_AND_FORGET - Invoke asynchronously. This corresponds to the Event option in the InvocationType parameter for the Lambda Invoke API.

For more information, see Invocation types in the Amazon EventBridge User Guide .

InvocationType PipeTargetInvocationType

Specify whether to invoke the function synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. This corresponds to the RequestResponse option in the InvocationType parameter for the Lambda Invoke API.
  • FIRE_AND_FORGET - Invoke asynchronously. This corresponds to the Event option in the InvocationType parameter for the Lambda Invoke API.

For more information, see Invocation types in the Amazon EventBridge User Guide .

invocationType PipeTargetInvocationType

Specify whether to invoke the function synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. This corresponds to the RequestResponse option in the InvocationType parameter for the Lambda Invoke API.
  • FIRE_AND_FORGET - Invoke asynchronously. This corresponds to the Event option in the InvocationType parameter for the Lambda Invoke API.

For more information, see Invocation types in the Amazon EventBridge User Guide .

invocationType PipeTargetInvocationType

Specify whether to invoke the function synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. This corresponds to the RequestResponse option in the InvocationType parameter for the Lambda Invoke API.
  • FIRE_AND_FORGET - Invoke asynchronously. This corresponds to the Event option in the InvocationType parameter for the Lambda Invoke API.

For more information, see Invocation types in the Amazon EventBridge User Guide .

invocation_type PipeTargetInvocationType

Specify whether to invoke the function synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. This corresponds to the RequestResponse option in the InvocationType parameter for the Lambda Invoke API.
  • FIRE_AND_FORGET - Invoke asynchronously. This corresponds to the Event option in the InvocationType parameter for the Lambda Invoke API.

For more information, see Invocation types in the Amazon EventBridge User Guide .

invocationType "REQUEST_RESPONSE" | "FIRE_AND_FORGET"

Specify whether to invoke the function synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. This corresponds to the RequestResponse option in the InvocationType parameter for the Lambda Invoke API.
  • FIRE_AND_FORGET - Invoke asynchronously. This corresponds to the Event option in the InvocationType parameter for the Lambda Invoke API.

For more information, see Invocation types in the Amazon EventBridge User Guide .

PipeTargetParameters
, PipeTargetParametersArgs

BatchJobParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetBatchJobParameters
The parameters for using an AWS Batch job as a target.
CloudWatchLogsParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetCloudWatchLogsParameters
The parameters for using an CloudWatch Logs log stream as a target.
EcsTaskParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetEcsTaskParameters
The parameters for using an Amazon ECS task as a target.
EventBridgeEventBusParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetEventBridgeEventBusParameters
The parameters for using an EventBridge event bus as a target.
HttpParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetHttpParameters
These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
InputTemplate string

Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

KinesisStreamParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetKinesisStreamParameters
The parameters for using a Kinesis stream as a target.
LambdaFunctionParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetLambdaFunctionParameters
The parameters for using a Lambda function as a target.
RedshiftDataParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetRedshiftDataParameters
These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
SageMakerPipelineParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetSageMakerPipelineParameters
The parameters for using a SageMaker AI pipeline as a target.
SqsQueueParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetSqsQueueParameters
The parameters for using a Amazon SQS stream as a target.
StepFunctionStateMachineParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetStateMachineParameters
The parameters for using a Step Functions state machine as a target.
TimestreamParameters Pulumi.AwsNative.Pipes.Inputs.PipeTargetTimestreamParameters
The parameters for using a Timestream for LiveAnalytics table as a target.
BatchJobParameters PipeTargetBatchJobParameters
The parameters for using an AWS Batch job as a target.
CloudWatchLogsParameters PipeTargetCloudWatchLogsParameters
The parameters for using an CloudWatch Logs log stream as a target.
EcsTaskParameters PipeTargetEcsTaskParameters
The parameters for using an Amazon ECS task as a target.
EventBridgeEventBusParameters PipeTargetEventBridgeEventBusParameters
The parameters for using an EventBridge event bus as a target.
HttpParameters PipeTargetHttpParameters
These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
InputTemplate string

Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

KinesisStreamParameters PipeTargetKinesisStreamParameters
The parameters for using a Kinesis stream as a target.
LambdaFunctionParameters PipeTargetLambdaFunctionParameters
The parameters for using a Lambda function as a target.
RedshiftDataParameters PipeTargetRedshiftDataParameters
These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
SageMakerPipelineParameters PipeTargetSageMakerPipelineParameters
The parameters for using a SageMaker AI pipeline as a target.
SqsQueueParameters PipeTargetSqsQueueParameters
The parameters for using a Amazon SQS stream as a target.
StepFunctionStateMachineParameters PipeTargetStateMachineParameters
The parameters for using a Step Functions state machine as a target.
TimestreamParameters PipeTargetTimestreamParameters
The parameters for using a Timestream for LiveAnalytics table as a target.
batchJobParameters PipeTargetBatchJobParameters
The parameters for using an AWS Batch job as a target.
cloudWatchLogsParameters PipeTargetCloudWatchLogsParameters
The parameters for using an CloudWatch Logs log stream as a target.
ecsTaskParameters PipeTargetEcsTaskParameters
The parameters for using an Amazon ECS task as a target.
eventBridgeEventBusParameters PipeTargetEventBridgeEventBusParameters
The parameters for using an EventBridge event bus as a target.
httpParameters PipeTargetHttpParameters
These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
inputTemplate String

Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

kinesisStreamParameters PipeTargetKinesisStreamParameters
The parameters for using a Kinesis stream as a target.
lambdaFunctionParameters PipeTargetLambdaFunctionParameters
The parameters for using a Lambda function as a target.
redshiftDataParameters PipeTargetRedshiftDataParameters
These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
sageMakerPipelineParameters PipeTargetSageMakerPipelineParameters
The parameters for using a SageMaker AI pipeline as a target.
sqsQueueParameters PipeTargetSqsQueueParameters
The parameters for using a Amazon SQS stream as a target.
stepFunctionStateMachineParameters PipeTargetStateMachineParameters
The parameters for using a Step Functions state machine as a target.
timestreamParameters PipeTargetTimestreamParameters
The parameters for using a Timestream for LiveAnalytics table as a target.
batchJobParameters PipeTargetBatchJobParameters
The parameters for using an AWS Batch job as a target.
cloudWatchLogsParameters PipeTargetCloudWatchLogsParameters
The parameters for using an CloudWatch Logs log stream as a target.
ecsTaskParameters PipeTargetEcsTaskParameters
The parameters for using an Amazon ECS task as a target.
eventBridgeEventBusParameters PipeTargetEventBridgeEventBusParameters
The parameters for using an EventBridge event bus as a target.
httpParameters PipeTargetHttpParameters
These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
inputTemplate string

Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

kinesisStreamParameters PipeTargetKinesisStreamParameters
The parameters for using a Kinesis stream as a target.
lambdaFunctionParameters PipeTargetLambdaFunctionParameters
The parameters for using a Lambda function as a target.
redshiftDataParameters PipeTargetRedshiftDataParameters
These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
sageMakerPipelineParameters PipeTargetSageMakerPipelineParameters
The parameters for using a SageMaker AI pipeline as a target.
sqsQueueParameters PipeTargetSqsQueueParameters
The parameters for using a Amazon SQS stream as a target.
stepFunctionStateMachineParameters PipeTargetStateMachineParameters
The parameters for using a Step Functions state machine as a target.
timestreamParameters PipeTargetTimestreamParameters
The parameters for using a Timestream for LiveAnalytics table as a target.
batch_job_parameters PipeTargetBatchJobParameters
The parameters for using an AWS Batch job as a target.
cloud_watch_logs_parameters PipeTargetCloudWatchLogsParameters
The parameters for using an CloudWatch Logs log stream as a target.
ecs_task_parameters PipeTargetEcsTaskParameters
The parameters for using an Amazon ECS task as a target.
event_bridge_event_bus_parameters PipeTargetEventBridgeEventBusParameters
The parameters for using an EventBridge event bus as a target.
http_parameters PipeTargetHttpParameters
These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
input_template str

Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

kinesis_stream_parameters PipeTargetKinesisStreamParameters
The parameters for using a Kinesis stream as a target.
lambda_function_parameters PipeTargetLambdaFunctionParameters
The parameters for using a Lambda function as a target.
redshift_data_parameters PipeTargetRedshiftDataParameters
These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
sage_maker_pipeline_parameters PipeTargetSageMakerPipelineParameters
The parameters for using a SageMaker AI pipeline as a target.
sqs_queue_parameters PipeTargetSqsQueueParameters
The parameters for using a Amazon SQS stream as a target.
step_function_state_machine_parameters PipeTargetStateMachineParameters
The parameters for using a Step Functions state machine as a target.
timestream_parameters PipeTargetTimestreamParameters
The parameters for using a Timestream for LiveAnalytics table as a target.
batchJobParameters Property Map
The parameters for using an AWS Batch job as a target.
cloudWatchLogsParameters Property Map
The parameters for using an CloudWatch Logs log stream as a target.
ecsTaskParameters Property Map
The parameters for using an Amazon ECS task as a target.
eventBridgeEventBusParameters Property Map
The parameters for using an EventBridge event bus as a target.
httpParameters Property Map
These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
inputTemplate String

Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .

To remove an input template, specify an empty string.

kinesisStreamParameters Property Map
The parameters for using a Kinesis stream as a target.
lambdaFunctionParameters Property Map
The parameters for using a Lambda function as a target.
redshiftDataParameters Property Map
These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
sageMakerPipelineParameters Property Map
The parameters for using a SageMaker AI pipeline as a target.
sqsQueueParameters Property Map
The parameters for using a Amazon SQS stream as a target.
stepFunctionStateMachineParameters Property Map
The parameters for using a Step Functions state machine as a target.
timestreamParameters Property Map
The parameters for using a Timestream for LiveAnalytics table as a target.

PipeTargetRedshiftDataParameters
, PipeTargetRedshiftDataParametersArgs

Database This property is required. string
Redshift Database
Sqls This property is required. List<string>
A list of SQLs.
DbUser string
Database user name
SecretManagerArn string
Optional SecretManager ARN which stores the database credentials
StatementName string
A name for Redshift DataAPI statement which can be used as filter of ListStatement.
WithEvent bool
Indicates whether to send an event back to EventBridge after the SQL statement runs.
Database This property is required. string
Redshift Database
Sqls This property is required. []string
A list of SQLs.
DbUser string
Database user name
SecretManagerArn string
Optional SecretManager ARN which stores the database credentials
StatementName string
A name for Redshift DataAPI statement which can be used as filter of ListStatement.
WithEvent bool
Indicates whether to send an event back to EventBridge after the SQL statement runs.
database This property is required. String
Redshift Database
sqls This property is required. List<String>
A list of SQLs.
dbUser String
Database user name
secretManagerArn String
Optional SecretManager ARN which stores the database credentials
statementName String
A name for Redshift DataAPI statement which can be used as filter of ListStatement.
withEvent Boolean
Indicates whether to send an event back to EventBridge after the SQL statement runs.
database This property is required. string
Redshift Database
sqls This property is required. string[]
A list of SQLs.
dbUser string
Database user name
secretManagerArn string
Optional SecretManager ARN which stores the database credentials
statementName string
A name for Redshift DataAPI statement which can be used as filter of ListStatement.
withEvent boolean
Indicates whether to send an event back to EventBridge after the SQL statement runs.
database This property is required. str
Redshift Database
sqls This property is required. Sequence[str]
A list of SQLs.
db_user str
Database user name
secret_manager_arn str
Optional SecretManager ARN which stores the database credentials
statement_name str
A name for Redshift DataAPI statement which can be used as filter of ListStatement.
with_event bool
Indicates whether to send an event back to EventBridge after the SQL statement runs.
database This property is required. String
Redshift Database
sqls This property is required. List<String>
A list of SQLs.
dbUser String
Database user name
secretManagerArn String
Optional SecretManager ARN which stores the database credentials
statementName String
A name for Redshift DataAPI statement which can be used as filter of ListStatement.
withEvent Boolean
Indicates whether to send an event back to EventBridge after the SQL statement runs.

PipeTargetSageMakerPipelineParameters
, PipeTargetSageMakerPipelineParametersArgs

PipelineParameterList List<Pulumi.AwsNative.Pipes.Inputs.PipeSageMakerPipelineParameter>
List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
PipelineParameterList []PipeSageMakerPipelineParameter
List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
pipelineParameterList List<PipeSageMakerPipelineParameter>
List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
pipelineParameterList PipeSageMakerPipelineParameter[]
List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
pipeline_parameter_list Sequence[PipeSageMakerPipelineParameter]
List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
pipelineParameterList List<Property Map>
List of Parameter names and values for SageMaker AI Model Building Pipeline execution.

PipeTargetSqsQueueParameters
, PipeTargetSqsQueueParametersArgs

MessageDeduplicationId string

This parameter applies only to FIFO (first-in-first-out) queues.

The token used for deduplication of sent messages.

MessageGroupId string
The FIFO message group ID to use as the target.
MessageDeduplicationId string

This parameter applies only to FIFO (first-in-first-out) queues.

The token used for deduplication of sent messages.

MessageGroupId string
The FIFO message group ID to use as the target.
messageDeduplicationId String

This parameter applies only to FIFO (first-in-first-out) queues.

The token used for deduplication of sent messages.

messageGroupId String
The FIFO message group ID to use as the target.
messageDeduplicationId string

This parameter applies only to FIFO (first-in-first-out) queues.

The token used for deduplication of sent messages.

messageGroupId string
The FIFO message group ID to use as the target.
message_deduplication_id str

This parameter applies only to FIFO (first-in-first-out) queues.

The token used for deduplication of sent messages.

message_group_id str
The FIFO message group ID to use as the target.
messageDeduplicationId String

This parameter applies only to FIFO (first-in-first-out) queues.

The token used for deduplication of sent messages.

messageGroupId String
The FIFO message group ID to use as the target.

PipeTargetStateMachineParameters
, PipeTargetStateMachineParametersArgs

InvocationType Pulumi.AwsNative.Pipes.PipeTargetInvocationType

Specify whether to invoke the Step Functions state machine synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .

REQUEST_RESPONSE is not supported for STANDARD state machine workflows.

  • FIRE_AND_FORGET - Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .

For more information, see Invocation types in the Amazon EventBridge User Guide .

InvocationType PipeTargetInvocationType

Specify whether to invoke the Step Functions state machine synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .

REQUEST_RESPONSE is not supported for STANDARD state machine workflows.

  • FIRE_AND_FORGET - Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .

For more information, see Invocation types in the Amazon EventBridge User Guide .

invocationType PipeTargetInvocationType

Specify whether to invoke the Step Functions state machine synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .

REQUEST_RESPONSE is not supported for STANDARD state machine workflows.

  • FIRE_AND_FORGET - Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .

For more information, see Invocation types in the Amazon EventBridge User Guide .

invocationType PipeTargetInvocationType

Specify whether to invoke the Step Functions state machine synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .

REQUEST_RESPONSE is not supported for STANDARD state machine workflows.

  • FIRE_AND_FORGET - Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .

For more information, see Invocation types in the Amazon EventBridge User Guide .

invocation_type PipeTargetInvocationType

Specify whether to invoke the Step Functions state machine synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .

REQUEST_RESPONSE is not supported for STANDARD state machine workflows.

  • FIRE_AND_FORGET - Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .

For more information, see Invocation types in the Amazon EventBridge User Guide .

invocationType "REQUEST_RESPONSE" | "FIRE_AND_FORGET"

Specify whether to invoke the Step Functions state machine synchronously or asynchronously.

  • REQUEST_RESPONSE (default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .

REQUEST_RESPONSE is not supported for STANDARD state machine workflows.

  • FIRE_AND_FORGET - Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .

For more information, see Invocation types in the Amazon EventBridge User Guide .

PipeTargetTimestreamParameters
, PipeTargetTimestreamParametersArgs

DimensionMappings This property is required. List<Pulumi.AwsNative.Pipes.Inputs.PipeDimensionMapping>

Map source data to dimensions in the target Timestream for LiveAnalytics table.

For more information, see Amazon Timestream for LiveAnalytics concepts

TimeValue This property is required. string
Dynamic path to the source data field that represents the time value for your data.
VersionValue This property is required. string

64 bit version value or source data field that represents the version value for your data.

Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.

Default value is 1.

Timestream for LiveAnalytics does not support updating partial measure values in a record.

Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, Version will still be updated. Default value is 1 .

Version must be 1 or greater, or you will receive a ValidationException error.

EpochTimeUnit Pulumi.AwsNative.Pipes.PipeEpochTimeUnit

The granularity of the time units used. Default is MILLISECONDS .

Required if TimeFieldType is specified as EPOCH .

MultiMeasureMappings List<Pulumi.AwsNative.Pipes.Inputs.PipeMultiMeasureMapping>
Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
SingleMeasureMappings List<Pulumi.AwsNative.Pipes.Inputs.PipeSingleMeasureMapping>
Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
TimeFieldType Pulumi.AwsNative.Pipes.PipeTimeFieldType

The type of time value used.

The default is EPOCH .

TimestampFormat string

How to format the timestamps. For example, yyyy-MM-dd'T'HH:mm:ss'Z' .

Required if TimeFieldType is specified as TIMESTAMP_FORMAT .

DimensionMappings This property is required. []PipeDimensionMapping

Map source data to dimensions in the target Timestream for LiveAnalytics table.

For more information, see Amazon Timestream for LiveAnalytics concepts

TimeValue This property is required. string
Dynamic path to the source data field that represents the time value for your data.
VersionValue This property is required. string

64 bit version value or source data field that represents the version value for your data.

Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.

Default value is 1.

Timestream for LiveAnalytics does not support updating partial measure values in a record.

Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, Version will still be updated. Default value is 1 .

Version must be 1 or greater, or you will receive a ValidationException error.

EpochTimeUnit PipeEpochTimeUnit

The granularity of the time units used. Default is MILLISECONDS .

Required if TimeFieldType is specified as EPOCH .

MultiMeasureMappings []PipeMultiMeasureMapping
Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
SingleMeasureMappings []PipeSingleMeasureMapping
Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
TimeFieldType PipeTimeFieldType

The type of time value used.

The default is EPOCH .

TimestampFormat string

How to format the timestamps. For example, yyyy-MM-dd'T'HH:mm:ss'Z' .

Required if TimeFieldType is specified as TIMESTAMP_FORMAT .

dimensionMappings This property is required. List<PipeDimensionMapping>

Map source data to dimensions in the target Timestream for LiveAnalytics table.

For more information, see Amazon Timestream for LiveAnalytics concepts

timeValue This property is required. String
Dynamic path to the source data field that represents the time value for your data.
versionValue This property is required. String

64 bit version value or source data field that represents the version value for your data.

Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.

Default value is 1.

Timestream for LiveAnalytics does not support updating partial measure values in a record.

Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, Version will still be updated. Default value is 1 .

Version must be 1 or greater, or you will receive a ValidationException error.

epochTimeUnit PipeEpochTimeUnit

The granularity of the time units used. Default is MILLISECONDS .

Required if TimeFieldType is specified as EPOCH .

multiMeasureMappings List<PipeMultiMeasureMapping>
Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
singleMeasureMappings List<PipeSingleMeasureMapping>
Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
timeFieldType PipeTimeFieldType

The type of time value used.

The default is EPOCH .

timestampFormat String

How to format the timestamps. For example, yyyy-MM-dd'T'HH:mm:ss'Z' .

Required if TimeFieldType is specified as TIMESTAMP_FORMAT .

dimensionMappings This property is required. PipeDimensionMapping[]

Map source data to dimensions in the target Timestream for LiveAnalytics table.

For more information, see Amazon Timestream for LiveAnalytics concepts

timeValue This property is required. string
Dynamic path to the source data field that represents the time value for your data.
versionValue This property is required. string

64 bit version value or source data field that represents the version value for your data.

Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.

Default value is 1.

Timestream for LiveAnalytics does not support updating partial measure values in a record.

Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, Version will still be updated. Default value is 1 .

Version must be 1 or greater, or you will receive a ValidationException error.

epochTimeUnit PipeEpochTimeUnit

The granularity of the time units used. Default is MILLISECONDS .

Required if TimeFieldType is specified as EPOCH .

multiMeasureMappings PipeMultiMeasureMapping[]
Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
singleMeasureMappings PipeSingleMeasureMapping[]
Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
timeFieldType PipeTimeFieldType

The type of time value used.

The default is EPOCH .

timestampFormat string

How to format the timestamps. For example, yyyy-MM-dd'T'HH:mm:ss'Z' .

Required if TimeFieldType is specified as TIMESTAMP_FORMAT .

dimension_mappings This property is required. Sequence[PipeDimensionMapping]

Map source data to dimensions in the target Timestream for LiveAnalytics table.

For more information, see Amazon Timestream for LiveAnalytics concepts

time_value This property is required. str
Dynamic path to the source data field that represents the time value for your data.
version_value This property is required. str

64 bit version value or source data field that represents the version value for your data.

Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.

Default value is 1.

Timestream for LiveAnalytics does not support updating partial measure values in a record.

Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, Version will still be updated. Default value is 1 .

Version must be 1 or greater, or you will receive a ValidationException error.

epoch_time_unit PipeEpochTimeUnit

The granularity of the time units used. Default is MILLISECONDS .

Required if TimeFieldType is specified as EPOCH .

multi_measure_mappings Sequence[PipeMultiMeasureMapping]
Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
single_measure_mappings Sequence[PipeSingleMeasureMapping]
Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
time_field_type PipeTimeFieldType

The type of time value used.

The default is EPOCH .

timestamp_format str

How to format the timestamps. For example, yyyy-MM-dd'T'HH:mm:ss'Z' .

Required if TimeFieldType is specified as TIMESTAMP_FORMAT .

dimensionMappings This property is required. List<Property Map>

Map source data to dimensions in the target Timestream for LiveAnalytics table.

For more information, see Amazon Timestream for LiveAnalytics concepts

timeValue This property is required. String
Dynamic path to the source data field that represents the time value for your data.
versionValue This property is required. String

64 bit version value or source data field that represents the version value for your data.

Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.

Default value is 1.

Timestream for LiveAnalytics does not support updating partial measure values in a record.

Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, Version will still be updated. Default value is 1 .

Version must be 1 or greater, or you will receive a ValidationException error.

epochTimeUnit "MILLISECONDS" | "SECONDS" | "MICROSECONDS" | "NANOSECONDS"

The granularity of the time units used. Default is MILLISECONDS .

Required if TimeFieldType is specified as EPOCH .

multiMeasureMappings List<Property Map>
Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
singleMeasureMappings List<Property Map>
Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
timeFieldType "EPOCH" | "TIMESTAMP_FORMAT"

The type of time value used.

The default is EPOCH .

timestampFormat String

How to format the timestamps. For example, yyyy-MM-dd'T'HH:mm:ss'Z' .

Required if TimeFieldType is specified as TIMESTAMP_FORMAT .

PipeTimeFieldType
, PipeTimeFieldTypeArgs

Epoch
EPOCH
TimestampFormat
TIMESTAMP_FORMAT
PipeTimeFieldTypeEpoch
EPOCH
PipeTimeFieldTypeTimestampFormat
TIMESTAMP_FORMAT
Epoch
EPOCH
TimestampFormat
TIMESTAMP_FORMAT
Epoch
EPOCH
TimestampFormat
TIMESTAMP_FORMAT
EPOCH
EPOCH
TIMESTAMP_FORMAT
TIMESTAMP_FORMAT
"EPOCH"
EPOCH
"TIMESTAMP_FORMAT"
TIMESTAMP_FORMAT

Package Details

Repository
AWS Native pulumi/pulumi-aws-native
License
Apache-2.0

We recommend new projects start with resources from the AWS provider.

AWS Cloud Control v1.27.0 published on Monday, Apr 14, 2025 by Pulumi