Try AWS Native preview for resources not in the classic version.
aws.sagemaker.Model
Explore with Pulumi AI
Try AWS Native preview for resources not in the classic version.
Provides a SageMaker model resource.
Example Usage
Basic usage:
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const assumeRole = aws.iam.getPolicyDocument({
statements: [{
actions: ["sts:AssumeRole"],
principals: [{
type: "Service",
identifiers: ["sagemaker.amazonaws.com"],
}],
}],
});
const exampleRole = new aws.iam.Role("example", {assumeRolePolicy: assumeRole.then(assumeRole => assumeRole.json)});
const test = aws.sagemaker.getPrebuiltEcrImage({
repositoryName: "kmeans",
});
const example = new aws.sagemaker.Model("example", {
name: "my-model",
executionRoleArn: exampleRole.arn,
primaryContainer: {
image: test.then(test => test.registryPath),
},
});
import pulumi
import pulumi_aws as aws
assume_role = aws.iam.get_policy_document(statements=[{
"actions": ["sts:AssumeRole"],
"principals": [{
"type": "Service",
"identifiers": ["sagemaker.amazonaws.com"],
}],
}])
example_role = aws.iam.Role("example", assume_role_policy=assume_role.json)
test = aws.sagemaker.get_prebuilt_ecr_image(repository_name="kmeans")
example = aws.sagemaker.Model("example",
name="my-model",
execution_role_arn=example_role.arn,
primary_container={
"image": test.registry_path,
})
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/iam"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/sagemaker"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
assumeRole, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
Statements: []iam.GetPolicyDocumentStatement{
{
Actions: []string{
"sts:AssumeRole",
},
Principals: []iam.GetPolicyDocumentStatementPrincipal{
{
Type: "Service",
Identifiers: []string{
"sagemaker.amazonaws.com",
},
},
},
},
},
}, nil)
if err != nil {
return err
}
exampleRole, err := iam.NewRole(ctx, "example", &iam.RoleArgs{
AssumeRolePolicy: pulumi.String(assumeRole.Json),
})
if err != nil {
return err
}
test, err := sagemaker.GetPrebuiltEcrImage(ctx, &sagemaker.GetPrebuiltEcrImageArgs{
RepositoryName: "kmeans",
}, nil)
if err != nil {
return err
}
_, err = sagemaker.NewModel(ctx, "example", &sagemaker.ModelArgs{
Name: pulumi.String("my-model"),
ExecutionRoleArn: exampleRole.Arn,
PrimaryContainer: &sagemaker.ModelPrimaryContainerArgs{
Image: pulumi.String(test.RegistryPath),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var assumeRole = Aws.Iam.GetPolicyDocument.Invoke(new()
{
Statements = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
{
Actions = new[]
{
"sts:AssumeRole",
},
Principals = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
{
Type = "Service",
Identifiers = new[]
{
"sagemaker.amazonaws.com",
},
},
},
},
},
});
var exampleRole = new Aws.Iam.Role("example", new()
{
AssumeRolePolicy = assumeRole.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
});
var test = Aws.Sagemaker.GetPrebuiltEcrImage.Invoke(new()
{
RepositoryName = "kmeans",
});
var example = new Aws.Sagemaker.Model("example", new()
{
Name = "my-model",
ExecutionRoleArn = exampleRole.Arn,
PrimaryContainer = new Aws.Sagemaker.Inputs.ModelPrimaryContainerArgs
{
Image = test.Apply(getPrebuiltEcrImageResult => getPrebuiltEcrImageResult.RegistryPath),
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.iam.IamFunctions;
import com.pulumi.aws.iam.inputs.GetPolicyDocumentArgs;
import com.pulumi.aws.iam.Role;
import com.pulumi.aws.iam.RoleArgs;
import com.pulumi.aws.sagemaker.SagemakerFunctions;
import com.pulumi.aws.sagemaker.inputs.GetPrebuiltEcrImageArgs;
import com.pulumi.aws.sagemaker.Model;
import com.pulumi.aws.sagemaker.ModelArgs;
import com.pulumi.aws.sagemaker.inputs.ModelPrimaryContainerArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var assumeRole = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
.statements(GetPolicyDocumentStatementArgs.builder()
.actions("sts:AssumeRole")
.principals(GetPolicyDocumentStatementPrincipalArgs.builder()
.type("Service")
.identifiers("sagemaker.amazonaws.com")
.build())
.build())
.build());
var exampleRole = new Role("exampleRole", RoleArgs.builder()
.assumeRolePolicy(assumeRole.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
.build());
final var test = SagemakerFunctions.getPrebuiltEcrImage(GetPrebuiltEcrImageArgs.builder()
.repositoryName("kmeans")
.build());
var example = new Model("example", ModelArgs.builder()
.name("my-model")
.executionRoleArn(exampleRole.arn())
.primaryContainer(ModelPrimaryContainerArgs.builder()
.image(test.applyValue(getPrebuiltEcrImageResult -> getPrebuiltEcrImageResult.registryPath()))
.build())
.build());
}
}
resources:
example:
type: aws:sagemaker:Model
properties:
name: my-model
executionRoleArn: ${exampleRole.arn}
primaryContainer:
image: ${test.registryPath}
exampleRole:
type: aws:iam:Role
name: example
properties:
assumeRolePolicy: ${assumeRole.json}
variables:
assumeRole:
fn::invoke:
Function: aws:iam:getPolicyDocument
Arguments:
statements:
- actions:
- sts:AssumeRole
principals:
- type: Service
identifiers:
- sagemaker.amazonaws.com
test:
fn::invoke:
Function: aws:sagemaker:getPrebuiltEcrImage
Arguments:
repositoryName: kmeans
Inference Execution Config
mode
- (Required) How containers in a multi-container are run. The following values are validSerial
andDirect
.
Create Model Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Model(name: string, args: ModelArgs, opts?: CustomResourceOptions);
@overload
def Model(resource_name: str,
args: ModelArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Model(resource_name: str,
opts: Optional[ResourceOptions] = None,
execution_role_arn: Optional[str] = None,
containers: Optional[Sequence[ModelContainerArgs]] = None,
enable_network_isolation: Optional[bool] = None,
inference_execution_config: Optional[ModelInferenceExecutionConfigArgs] = None,
name: Optional[str] = None,
primary_container: Optional[ModelPrimaryContainerArgs] = None,
tags: Optional[Mapping[str, str]] = None,
vpc_config: Optional[ModelVpcConfigArgs] = None)
func NewModel(ctx *Context, name string, args ModelArgs, opts ...ResourceOption) (*Model, error)
public Model(string name, ModelArgs args, CustomResourceOptions? opts = null)
type: aws:sagemaker:Model
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args ModelArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args ModelArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args ModelArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args ModelArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args ModelArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var examplemodelResourceResourceFromSagemakermodel = new Aws.Sagemaker.Model("examplemodelResourceResourceFromSagemakermodel", new()
{
ExecutionRoleArn = "string",
Containers = new[]
{
new Aws.Sagemaker.Inputs.ModelContainerArgs
{
ContainerHostname = "string",
Environment =
{
{ "string", "string" },
},
Image = "string",
ImageConfig = new Aws.Sagemaker.Inputs.ModelContainerImageConfigArgs
{
RepositoryAccessMode = "string",
RepositoryAuthConfig = new Aws.Sagemaker.Inputs.ModelContainerImageConfigRepositoryAuthConfigArgs
{
RepositoryCredentialsProviderArn = "string",
},
},
Mode = "string",
ModelDataSource = new Aws.Sagemaker.Inputs.ModelContainerModelDataSourceArgs
{
S3DataSources = new[]
{
new Aws.Sagemaker.Inputs.ModelContainerModelDataSourceS3DataSourceArgs
{
CompressionType = "string",
S3DataType = "string",
S3Uri = "string",
},
},
},
ModelDataUrl = "string",
ModelPackageName = "string",
},
},
EnableNetworkIsolation = false,
InferenceExecutionConfig = new Aws.Sagemaker.Inputs.ModelInferenceExecutionConfigArgs
{
Mode = "string",
},
Name = "string",
PrimaryContainer = new Aws.Sagemaker.Inputs.ModelPrimaryContainerArgs
{
ContainerHostname = "string",
Environment =
{
{ "string", "string" },
},
Image = "string",
ImageConfig = new Aws.Sagemaker.Inputs.ModelPrimaryContainerImageConfigArgs
{
RepositoryAccessMode = "string",
RepositoryAuthConfig = new Aws.Sagemaker.Inputs.ModelPrimaryContainerImageConfigRepositoryAuthConfigArgs
{
RepositoryCredentialsProviderArn = "string",
},
},
Mode = "string",
ModelDataSource = new Aws.Sagemaker.Inputs.ModelPrimaryContainerModelDataSourceArgs
{
S3DataSources = new[]
{
new Aws.Sagemaker.Inputs.ModelPrimaryContainerModelDataSourceS3DataSourceArgs
{
CompressionType = "string",
S3DataType = "string",
S3Uri = "string",
},
},
},
ModelDataUrl = "string",
ModelPackageName = "string",
},
Tags =
{
{ "string", "string" },
},
VpcConfig = new Aws.Sagemaker.Inputs.ModelVpcConfigArgs
{
SecurityGroupIds = new[]
{
"string",
},
Subnets = new[]
{
"string",
},
},
});
example, err := sagemaker.NewModel(ctx, "examplemodelResourceResourceFromSagemakermodel", &sagemaker.ModelArgs{
ExecutionRoleArn: pulumi.String("string"),
Containers: sagemaker.ModelContainerArray{
&sagemaker.ModelContainerArgs{
ContainerHostname: pulumi.String("string"),
Environment: pulumi.StringMap{
"string": pulumi.String("string"),
},
Image: pulumi.String("string"),
ImageConfig: &sagemaker.ModelContainerImageConfigArgs{
RepositoryAccessMode: pulumi.String("string"),
RepositoryAuthConfig: &sagemaker.ModelContainerImageConfigRepositoryAuthConfigArgs{
RepositoryCredentialsProviderArn: pulumi.String("string"),
},
},
Mode: pulumi.String("string"),
ModelDataSource: &sagemaker.ModelContainerModelDataSourceArgs{
S3DataSources: sagemaker.ModelContainerModelDataSourceS3DataSourceArray{
&sagemaker.ModelContainerModelDataSourceS3DataSourceArgs{
CompressionType: pulumi.String("string"),
S3DataType: pulumi.String("string"),
S3Uri: pulumi.String("string"),
},
},
},
ModelDataUrl: pulumi.String("string"),
ModelPackageName: pulumi.String("string"),
},
},
EnableNetworkIsolation: pulumi.Bool(false),
InferenceExecutionConfig: &sagemaker.ModelInferenceExecutionConfigArgs{
Mode: pulumi.String("string"),
},
Name: pulumi.String("string"),
PrimaryContainer: &sagemaker.ModelPrimaryContainerArgs{
ContainerHostname: pulumi.String("string"),
Environment: pulumi.StringMap{
"string": pulumi.String("string"),
},
Image: pulumi.String("string"),
ImageConfig: &sagemaker.ModelPrimaryContainerImageConfigArgs{
RepositoryAccessMode: pulumi.String("string"),
RepositoryAuthConfig: &sagemaker.ModelPrimaryContainerImageConfigRepositoryAuthConfigArgs{
RepositoryCredentialsProviderArn: pulumi.String("string"),
},
},
Mode: pulumi.String("string"),
ModelDataSource: &sagemaker.ModelPrimaryContainerModelDataSourceArgs{
S3DataSources: sagemaker.ModelPrimaryContainerModelDataSourceS3DataSourceArray{
&sagemaker.ModelPrimaryContainerModelDataSourceS3DataSourceArgs{
CompressionType: pulumi.String("string"),
S3DataType: pulumi.String("string"),
S3Uri: pulumi.String("string"),
},
},
},
ModelDataUrl: pulumi.String("string"),
ModelPackageName: pulumi.String("string"),
},
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
VpcConfig: &sagemaker.ModelVpcConfigArgs{
SecurityGroupIds: pulumi.StringArray{
pulumi.String("string"),
},
Subnets: pulumi.StringArray{
pulumi.String("string"),
},
},
})
var examplemodelResourceResourceFromSagemakermodel = new Model("examplemodelResourceResourceFromSagemakermodel", ModelArgs.builder()
.executionRoleArn("string")
.containers(ModelContainerArgs.builder()
.containerHostname("string")
.environment(Map.of("string", "string"))
.image("string")
.imageConfig(ModelContainerImageConfigArgs.builder()
.repositoryAccessMode("string")
.repositoryAuthConfig(ModelContainerImageConfigRepositoryAuthConfigArgs.builder()
.repositoryCredentialsProviderArn("string")
.build())
.build())
.mode("string")
.modelDataSource(ModelContainerModelDataSourceArgs.builder()
.s3DataSources(ModelContainerModelDataSourceS3DataSourceArgs.builder()
.compressionType("string")
.s3DataType("string")
.s3Uri("string")
.build())
.build())
.modelDataUrl("string")
.modelPackageName("string")
.build())
.enableNetworkIsolation(false)
.inferenceExecutionConfig(ModelInferenceExecutionConfigArgs.builder()
.mode("string")
.build())
.name("string")
.primaryContainer(ModelPrimaryContainerArgs.builder()
.containerHostname("string")
.environment(Map.of("string", "string"))
.image("string")
.imageConfig(ModelPrimaryContainerImageConfigArgs.builder()
.repositoryAccessMode("string")
.repositoryAuthConfig(ModelPrimaryContainerImageConfigRepositoryAuthConfigArgs.builder()
.repositoryCredentialsProviderArn("string")
.build())
.build())
.mode("string")
.modelDataSource(ModelPrimaryContainerModelDataSourceArgs.builder()
.s3DataSources(ModelPrimaryContainerModelDataSourceS3DataSourceArgs.builder()
.compressionType("string")
.s3DataType("string")
.s3Uri("string")
.build())
.build())
.modelDataUrl("string")
.modelPackageName("string")
.build())
.tags(Map.of("string", "string"))
.vpcConfig(ModelVpcConfigArgs.builder()
.securityGroupIds("string")
.subnets("string")
.build())
.build());
examplemodel_resource_resource_from_sagemakermodel = aws.sagemaker.Model("examplemodelResourceResourceFromSagemakermodel",
execution_role_arn="string",
containers=[{
"containerHostname": "string",
"environment": {
"string": "string",
},
"image": "string",
"imageConfig": {
"repositoryAccessMode": "string",
"repositoryAuthConfig": {
"repositoryCredentialsProviderArn": "string",
},
},
"mode": "string",
"modelDataSource": {
"s3DataSources": [{
"compressionType": "string",
"s3DataType": "string",
"s3Uri": "string",
}],
},
"modelDataUrl": "string",
"modelPackageName": "string",
}],
enable_network_isolation=False,
inference_execution_config={
"mode": "string",
},
name="string",
primary_container={
"containerHostname": "string",
"environment": {
"string": "string",
},
"image": "string",
"imageConfig": {
"repositoryAccessMode": "string",
"repositoryAuthConfig": {
"repositoryCredentialsProviderArn": "string",
},
},
"mode": "string",
"modelDataSource": {
"s3DataSources": [{
"compressionType": "string",
"s3DataType": "string",
"s3Uri": "string",
}],
},
"modelDataUrl": "string",
"modelPackageName": "string",
},
tags={
"string": "string",
},
vpc_config={
"securityGroupIds": ["string"],
"subnets": ["string"],
})
const examplemodelResourceResourceFromSagemakermodel = new aws.sagemaker.Model("examplemodelResourceResourceFromSagemakermodel", {
executionRoleArn: "string",
containers: [{
containerHostname: "string",
environment: {
string: "string",
},
image: "string",
imageConfig: {
repositoryAccessMode: "string",
repositoryAuthConfig: {
repositoryCredentialsProviderArn: "string",
},
},
mode: "string",
modelDataSource: {
s3DataSources: [{
compressionType: "string",
s3DataType: "string",
s3Uri: "string",
}],
},
modelDataUrl: "string",
modelPackageName: "string",
}],
enableNetworkIsolation: false,
inferenceExecutionConfig: {
mode: "string",
},
name: "string",
primaryContainer: {
containerHostname: "string",
environment: {
string: "string",
},
image: "string",
imageConfig: {
repositoryAccessMode: "string",
repositoryAuthConfig: {
repositoryCredentialsProviderArn: "string",
},
},
mode: "string",
modelDataSource: {
s3DataSources: [{
compressionType: "string",
s3DataType: "string",
s3Uri: "string",
}],
},
modelDataUrl: "string",
modelPackageName: "string",
},
tags: {
string: "string",
},
vpcConfig: {
securityGroupIds: ["string"],
subnets: ["string"],
},
});
type: aws:sagemaker:Model
properties:
containers:
- containerHostname: string
environment:
string: string
image: string
imageConfig:
repositoryAccessMode: string
repositoryAuthConfig:
repositoryCredentialsProviderArn: string
mode: string
modelDataSource:
s3DataSources:
- compressionType: string
s3DataType: string
s3Uri: string
modelDataUrl: string
modelPackageName: string
enableNetworkIsolation: false
executionRoleArn: string
inferenceExecutionConfig:
mode: string
name: string
primaryContainer:
containerHostname: string
environment:
string: string
image: string
imageConfig:
repositoryAccessMode: string
repositoryAuthConfig:
repositoryCredentialsProviderArn: string
mode: string
modelDataSource:
s3DataSources:
- compressionType: string
s3DataType: string
s3Uri: string
modelDataUrl: string
modelPackageName: string
tags:
string: string
vpcConfig:
securityGroupIds:
- string
subnets:
- string
Model Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Model resource accepts the following input properties:
- Execution
Role stringArn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- Containers
List<Pulumi.
Aws. Sagemaker. Inputs. Model Container> - Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - Enable
Network boolIsolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- Inference
Execution Pulumi.Config Aws. Sagemaker. Inputs. Model Inference Execution Config - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- Name string
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- Primary
Container Pulumi.Aws. Sagemaker. Inputs. Model Primary Container - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - Dictionary<string, string>
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Vpc
Config Pulumi.Aws. Sagemaker. Inputs. Model Vpc Config - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
- Execution
Role stringArn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- Containers
[]Model
Container Args - Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - Enable
Network boolIsolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- Inference
Execution ModelConfig Inference Execution Config Args - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- Name string
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- Primary
Container ModelPrimary Container Args - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - map[string]string
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Vpc
Config ModelVpc Config Args - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
- execution
Role StringArn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- containers
List<Model
Container> - Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - enable
Network BooleanIsolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- inference
Execution ModelConfig Inference Execution Config - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- name String
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- primary
Container ModelPrimary Container - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - Map<String,String>
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - vpc
Config ModelVpc Config - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
- execution
Role stringArn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- containers
Model
Container[] - Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - enable
Network booleanIsolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- inference
Execution ModelConfig Inference Execution Config - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- name string
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- primary
Container ModelPrimary Container - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - {[key: string]: string}
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - vpc
Config ModelVpc Config - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
- execution_
role_ strarn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- containers
Sequence[Model
Container Args] - Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - enable_
network_ boolisolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- inference_
execution_ Modelconfig Inference Execution Config Args - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- name str
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- primary_
container ModelPrimary Container Args - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - Mapping[str, str]
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - vpc_
config ModelVpc Config Args - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
- execution
Role StringArn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- containers List<Property Map>
- Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - enable
Network BooleanIsolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- inference
Execution Property MapConfig - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- name String
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- primary
Container Property Map - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - Map<String>
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - vpc
Config Property Map - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
Outputs
All input properties are implicitly available as output properties. Additionally, the Model resource produces the following output properties:
Look up Existing Model Resource
Get an existing Model resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: ModelState, opts?: CustomResourceOptions): Model
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
arn: Optional[str] = None,
containers: Optional[Sequence[ModelContainerArgs]] = None,
enable_network_isolation: Optional[bool] = None,
execution_role_arn: Optional[str] = None,
inference_execution_config: Optional[ModelInferenceExecutionConfigArgs] = None,
name: Optional[str] = None,
primary_container: Optional[ModelPrimaryContainerArgs] = None,
tags: Optional[Mapping[str, str]] = None,
tags_all: Optional[Mapping[str, str]] = None,
vpc_config: Optional[ModelVpcConfigArgs] = None) -> Model
func GetModel(ctx *Context, name string, id IDInput, state *ModelState, opts ...ResourceOption) (*Model, error)
public static Model Get(string name, Input<string> id, ModelState? state, CustomResourceOptions? opts = null)
public static Model get(String name, Output<String> id, ModelState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Arn string
- The Amazon Resource Name (ARN) assigned by AWS to this model.
- Containers
List<Pulumi.
Aws. Sagemaker. Inputs. Model Container> - Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - Enable
Network boolIsolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- Execution
Role stringArn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- Inference
Execution Pulumi.Config Aws. Sagemaker. Inputs. Model Inference Execution Config - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- Name string
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- Primary
Container Pulumi.Aws. Sagemaker. Inputs. Model Primary Container - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - Dictionary<string, string>
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Dictionary<string, string>
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - Vpc
Config Pulumi.Aws. Sagemaker. Inputs. Model Vpc Config - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
- Arn string
- The Amazon Resource Name (ARN) assigned by AWS to this model.
- Containers
[]Model
Container Args - Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - Enable
Network boolIsolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- Execution
Role stringArn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- Inference
Execution ModelConfig Inference Execution Config Args - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- Name string
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- Primary
Container ModelPrimary Container Args - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - map[string]string
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - map[string]string
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - Vpc
Config ModelVpc Config Args - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
- arn String
- The Amazon Resource Name (ARN) assigned by AWS to this model.
- containers
List<Model
Container> - Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - enable
Network BooleanIsolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- execution
Role StringArn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- inference
Execution ModelConfig Inference Execution Config - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- name String
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- primary
Container ModelPrimary Container - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - Map<String,String>
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Map<String,String>
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - vpc
Config ModelVpc Config - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
- arn string
- The Amazon Resource Name (ARN) assigned by AWS to this model.
- containers
Model
Container[] - Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - enable
Network booleanIsolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- execution
Role stringArn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- inference
Execution ModelConfig Inference Execution Config - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- name string
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- primary
Container ModelPrimary Container - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - {[key: string]: string}
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - {[key: string]: string}
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - vpc
Config ModelVpc Config - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
- arn str
- The Amazon Resource Name (ARN) assigned by AWS to this model.
- containers
Sequence[Model
Container Args] - Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - enable_
network_ boolisolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- execution_
role_ strarn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- inference_
execution_ Modelconfig Inference Execution Config Args - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- name str
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- primary_
container ModelPrimary Container Args - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - Mapping[str, str]
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Mapping[str, str]
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - vpc_
config ModelVpc Config Args - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
- arn String
- The Amazon Resource Name (ARN) assigned by AWS to this model.
- containers List<Property Map>
- Specifies containers in the inference pipeline. If not specified, the
primary_container
argument is required. Fields are documented below. - enable
Network BooleanIsolation - Isolates the model container. No inbound or outbound network calls can be made to or from the model container.
- execution
Role StringArn - A role that SageMaker can assume to access model artifacts and docker images for deployment.
- inference
Execution Property MapConfig - Specifies details of how containers in a multi-container endpoint are called. see Inference Execution Config.
- name String
- The name of the model (must be unique). If omitted, this provider will assign a random, unique name.
- primary
Container Property Map - The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the
container
argument is required. Fields are documented below. - Map<String>
- A map of tags to assign to the resource. .If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Map<String>
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - vpc
Config Property Map - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform.
Supporting Types
ModelContainer, ModelContainerArgs
- Container
Hostname string - The DNS host name for the container.
- Environment Dictionary<string, string>
- Environment variables for the Docker container. A list of key value pairs.
- Image string
- The registry path where the inference code image is stored in Amazon ECR.
- Image
Config Pulumi.Aws. Sagemaker. Inputs. Model Container Image Config - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). For more information see Using a Private Docker Registry for Real-Time Inference Containers. see Image Config.
- Mode string
- The container hosts value
SingleModel/MultiModel
. The default value isSingleModel
. - Model
Data Pulumi.Source Aws. Sagemaker. Inputs. Model Container Model Data Source - The location of model data to deploy. Use this for uncompressed model deployment. For information about how to deploy an uncompressed model, see Deploying uncompressed models in the AWS SageMaker Developer Guide.
- Model
Data stringUrl - The URL for the S3 location where model artifacts are stored.
- Model
Package stringName - The Amazon Resource Name (ARN) of the model package to use to create the model.
- Container
Hostname string - The DNS host name for the container.
- Environment map[string]string
- Environment variables for the Docker container. A list of key value pairs.
- Image string
- The registry path where the inference code image is stored in Amazon ECR.
- Image
Config ModelContainer Image Config - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). For more information see Using a Private Docker Registry for Real-Time Inference Containers. see Image Config.
- Mode string
- The container hosts value
SingleModel/MultiModel
. The default value isSingleModel
. - Model
Data ModelSource Container Model Data Source - The location of model data to deploy. Use this for uncompressed model deployment. For information about how to deploy an uncompressed model, see Deploying uncompressed models in the AWS SageMaker Developer Guide.
- Model
Data stringUrl - The URL for the S3 location where model artifacts are stored.
- Model
Package stringName - The Amazon Resource Name (ARN) of the model package to use to create the model.
- container
Hostname String - The DNS host name for the container.
- environment Map<String,String>
- Environment variables for the Docker container. A list of key value pairs.
- image String
- The registry path where the inference code image is stored in Amazon ECR.
- image
Config ModelContainer Image Config - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). For more information see Using a Private Docker Registry for Real-Time Inference Containers. see Image Config.
- mode String
- The container hosts value
SingleModel/MultiModel
. The default value isSingleModel
. - model
Data ModelSource Container Model Data Source - The location of model data to deploy. Use this for uncompressed model deployment. For information about how to deploy an uncompressed model, see Deploying uncompressed models in the AWS SageMaker Developer Guide.
- model
Data StringUrl - The URL for the S3 location where model artifacts are stored.
- model
Package StringName - The Amazon Resource Name (ARN) of the model package to use to create the model.
- container
Hostname string - The DNS host name for the container.
- environment {[key: string]: string}
- Environment variables for the Docker container. A list of key value pairs.
- image string
- The registry path where the inference code image is stored in Amazon ECR.
- image
Config ModelContainer Image Config - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). For more information see Using a Private Docker Registry for Real-Time Inference Containers. see Image Config.
- mode string
- The container hosts value
SingleModel/MultiModel
. The default value isSingleModel
. - model
Data ModelSource Container Model Data Source - The location of model data to deploy. Use this for uncompressed model deployment. For information about how to deploy an uncompressed model, see Deploying uncompressed models in the AWS SageMaker Developer Guide.
- model
Data stringUrl - The URL for the S3 location where model artifacts are stored.
- model
Package stringName - The Amazon Resource Name (ARN) of the model package to use to create the model.
- container_
hostname str - The DNS host name for the container.
- environment Mapping[str, str]
- Environment variables for the Docker container. A list of key value pairs.
- image str
- The registry path where the inference code image is stored in Amazon ECR.
- image_
config ModelContainer Image Config - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). For more information see Using a Private Docker Registry for Real-Time Inference Containers. see Image Config.
- mode str
- The container hosts value
SingleModel/MultiModel
. The default value isSingleModel
. - model_
data_ Modelsource Container Model Data Source - The location of model data to deploy. Use this for uncompressed model deployment. For information about how to deploy an uncompressed model, see Deploying uncompressed models in the AWS SageMaker Developer Guide.
- model_
data_ strurl - The URL for the S3 location where model artifacts are stored.
- model_
package_ strname - The Amazon Resource Name (ARN) of the model package to use to create the model.
- container
Hostname String - The DNS host name for the container.
- environment Map<String>
- Environment variables for the Docker container. A list of key value pairs.
- image String
- The registry path where the inference code image is stored in Amazon ECR.
- image
Config Property Map - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). For more information see Using a Private Docker Registry for Real-Time Inference Containers. see Image Config.
- mode String
- The container hosts value
SingleModel/MultiModel
. The default value isSingleModel
. - model
Data Property MapSource - The location of model data to deploy. Use this for uncompressed model deployment. For information about how to deploy an uncompressed model, see Deploying uncompressed models in the AWS SageMaker Developer Guide.
- model
Data StringUrl - The URL for the S3 location where model artifacts are stored.
- model
Package StringName - The Amazon Resource Name (ARN) of the model package to use to create the model.
ModelContainerImageConfig, ModelContainerImageConfigArgs
- Repository
Access stringMode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - Repository
Auth Pulumi.Config Aws. Sagemaker. Inputs. Model Container Image Config Repository Auth Config - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
- Repository
Access stringMode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - Repository
Auth ModelConfig Container Image Config Repository Auth Config - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
- repository
Access StringMode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - repository
Auth ModelConfig Container Image Config Repository Auth Config - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
- repository
Access stringMode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - repository
Auth ModelConfig Container Image Config Repository Auth Config - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
- repository_
access_ strmode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - repository_
auth_ Modelconfig Container Image Config Repository Auth Config - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
- repository
Access StringMode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - repository
Auth Property MapConfig - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
ModelContainerImageConfigRepositoryAuthConfig, ModelContainerImageConfigRepositoryAuthConfigArgs
- Repository
Credentials stringProvider Arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
- Repository
Credentials stringProvider Arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
- repository
Credentials StringProvider Arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
- repository
Credentials stringProvider Arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
- repository_
credentials_ strprovider_ arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
- repository
Credentials StringProvider Arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
ModelContainerModelDataSource, ModelContainerModelDataSourceArgs
- S3Data
Sources List<Pulumi.Aws. Sagemaker. Inputs. Model Container Model Data Source S3Data Source> - The S3 location of model data to deploy.
- S3Data
Sources []ModelContainer Model Data Source S3Data Source - The S3 location of model data to deploy.
- s3Data
Sources List<ModelContainer Model Data Source S3Data Source> - The S3 location of model data to deploy.
- s3Data
Sources ModelContainer Model Data Source S3Data Source[] - The S3 location of model data to deploy.
- s3_
data_ Sequence[Modelsources Container Model Data Source S3Data Source] - The S3 location of model data to deploy.
- s3Data
Sources List<Property Map> - The S3 location of model data to deploy.
ModelContainerModelDataSourceS3DataSource, ModelContainerModelDataSourceS3DataSourceArgs
- Compression
Type string - How the model data is prepared. Allowed values are:
None
andGzip
. - S3Data
Type string - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - S3Uri string
- The S3 path of model data to deploy.
- Compression
Type string - How the model data is prepared. Allowed values are:
None
andGzip
. - S3Data
Type string - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - S3Uri string
- The S3 path of model data to deploy.
- compression
Type String - How the model data is prepared. Allowed values are:
None
andGzip
. - s3Data
Type String - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - s3Uri String
- The S3 path of model data to deploy.
- compression
Type string - How the model data is prepared. Allowed values are:
None
andGzip
. - s3Data
Type string - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - s3Uri string
- The S3 path of model data to deploy.
- compression_
type str - How the model data is prepared. Allowed values are:
None
andGzip
. - s3_
data_ strtype - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - s3_
uri str - The S3 path of model data to deploy.
- compression
Type String - How the model data is prepared. Allowed values are:
None
andGzip
. - s3Data
Type String - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - s3Uri String
- The S3 path of model data to deploy.
ModelInferenceExecutionConfig, ModelInferenceExecutionConfigArgs
- Mode string
- Mode string
- mode String
- mode string
- mode str
- mode String
ModelPrimaryContainer, ModelPrimaryContainerArgs
- Container
Hostname string - Environment Dictionary<string, string>
- Image string
- Image
Config Pulumi.Aws. Sagemaker. Inputs. Model Primary Container Image Config - Mode string
- Model
Data Pulumi.Source Aws. Sagemaker. Inputs. Model Primary Container Model Data Source - Model
Data stringUrl - Model
Package stringName
- Container
Hostname string - Environment map[string]string
- Image string
- Image
Config ModelPrimary Container Image Config - Mode string
- Model
Data ModelSource Primary Container Model Data Source - Model
Data stringUrl - Model
Package stringName
- container
Hostname String - environment Map<String,String>
- image String
- image
Config ModelPrimary Container Image Config - mode String
- model
Data ModelSource Primary Container Model Data Source - model
Data StringUrl - model
Package StringName
- container
Hostname string - environment {[key: string]: string}
- image string
- image
Config ModelPrimary Container Image Config - mode string
- model
Data ModelSource Primary Container Model Data Source - model
Data stringUrl - model
Package stringName
- container
Hostname String - environment Map<String>
- image String
- image
Config Property Map - mode String
- model
Data Property MapSource - model
Data StringUrl - model
Package StringName
ModelPrimaryContainerImageConfig, ModelPrimaryContainerImageConfigArgs
- Repository
Access stringMode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - Repository
Auth Pulumi.Config Aws. Sagemaker. Inputs. Model Primary Container Image Config Repository Auth Config - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
- Repository
Access stringMode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - Repository
Auth ModelConfig Primary Container Image Config Repository Auth Config - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
- repository
Access StringMode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - repository
Auth ModelConfig Primary Container Image Config Repository Auth Config - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
- repository
Access stringMode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - repository
Auth ModelConfig Primary Container Image Config Repository Auth Config - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
- repository_
access_ strmode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - repository_
auth_ Modelconfig Primary Container Image Config Repository Auth Config - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
- repository
Access StringMode - Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are:
Platform
andVpc
. - repository
Auth Property MapConfig - Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see Repository Auth Config.
ModelPrimaryContainerImageConfigRepositoryAuthConfig, ModelPrimaryContainerImageConfigRepositoryAuthConfigArgs
- Repository
Credentials stringProvider Arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
- Repository
Credentials stringProvider Arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
- repository
Credentials StringProvider Arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
- repository
Credentials stringProvider Arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
- repository_
credentials_ strprovider_ arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
- repository
Credentials StringProvider Arn - The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see Create a Lambda function with the console in the AWS Lambda Developer Guide.
ModelPrimaryContainerModelDataSource, ModelPrimaryContainerModelDataSourceArgs
- S3Data
Sources List<Pulumi.Aws. Sagemaker. Inputs. Model Primary Container Model Data Source S3Data Source> - The S3 location of model data to deploy.
- S3Data
Sources []ModelPrimary Container Model Data Source S3Data Source - The S3 location of model data to deploy.
- s3Data
Sources List<ModelPrimary Container Model Data Source S3Data Source> - The S3 location of model data to deploy.
- s3Data
Sources ModelPrimary Container Model Data Source S3Data Source[] - The S3 location of model data to deploy.
- s3_
data_ Sequence[Modelsources Primary Container Model Data Source S3Data Source] - The S3 location of model data to deploy.
- s3Data
Sources List<Property Map> - The S3 location of model data to deploy.
ModelPrimaryContainerModelDataSourceS3DataSource, ModelPrimaryContainerModelDataSourceS3DataSourceArgs
- Compression
Type string - How the model data is prepared. Allowed values are:
None
andGzip
. - S3Data
Type string - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - S3Uri string
- The S3 path of model data to deploy.
- Compression
Type string - How the model data is prepared. Allowed values are:
None
andGzip
. - S3Data
Type string - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - S3Uri string
- The S3 path of model data to deploy.
- compression
Type String - How the model data is prepared. Allowed values are:
None
andGzip
. - s3Data
Type String - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - s3Uri String
- The S3 path of model data to deploy.
- compression
Type string - How the model data is prepared. Allowed values are:
None
andGzip
. - s3Data
Type string - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - s3Uri string
- The S3 path of model data to deploy.
- compression_
type str - How the model data is prepared. Allowed values are:
None
andGzip
. - s3_
data_ strtype - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - s3_
uri str - The S3 path of model data to deploy.
- compression
Type String - How the model data is prepared. Allowed values are:
None
andGzip
. - s3Data
Type String - The type of model data to deploy. Allowed values are:
S3Object
andS3Prefix
. - s3Uri String
- The S3 path of model data to deploy.
ModelVpcConfig, ModelVpcConfigArgs
- Security
Group List<string>Ids - Subnets List<string>
- Security
Group []stringIds - Subnets []string
- security
Group List<String>Ids - subnets List<String>
- security
Group string[]Ids - subnets string[]
- security_
group_ Sequence[str]ids - subnets Sequence[str]
- security
Group List<String>Ids - subnets List<String>
Import
Using pulumi import
, import models using the name
. For example:
$ pulumi import aws:sagemaker/model:Model test_model model-foo
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
aws
Terraform Provider.
Try AWS Native preview for resources not in the classic version.