oci.DataFlow.getInvokeRuns
Explore with Pulumi AI
This data source provides the list of Invoke Runs in Oracle Cloud Infrastructure Data Flow service.
Lists all runs of an application in the specified compartment. Only one parameter other than compartmentId may also be included in a query. The query must include compartmentId. If the query does not include compartmentId, or includes compartmentId but two or more other parameters an error is returned.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as oci from "@pulumi/oci";
const testInvokeRuns = oci.DataFlow.getInvokeRuns({
compartmentId: compartmentId,
applicationId: testApplication.id,
displayName: invokeRunDisplayName,
displayNameStartsWith: invokeRunDisplayNameStartsWith,
ownerPrincipalId: testOwnerPrincipal.id,
poolId: testPool.id,
state: invokeRunState,
timeCreatedGreaterThan: invokeRunTimeCreatedGreaterThan,
});
import pulumi
import pulumi_oci as oci
test_invoke_runs = oci.DataFlow.get_invoke_runs(compartment_id=compartment_id,
application_id=test_application["id"],
display_name=invoke_run_display_name,
display_name_starts_with=invoke_run_display_name_starts_with,
owner_principal_id=test_owner_principal["id"],
pool_id=test_pool["id"],
state=invoke_run_state,
time_created_greater_than=invoke_run_time_created_greater_than)
package main
import (
"github.com/pulumi/pulumi-oci/sdk/go/oci/DataFlow"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := DataFlow.GetInvokeRuns(ctx, &dataflow.GetInvokeRunsArgs{
CompartmentId: compartmentId,
ApplicationId: pulumi.StringRef(testApplication.Id),
DisplayName: pulumi.StringRef(invokeRunDisplayName),
DisplayNameStartsWith: pulumi.StringRef(invokeRunDisplayNameStartsWith),
OwnerPrincipalId: pulumi.StringRef(testOwnerPrincipal.Id),
PoolId: pulumi.StringRef(testPool.Id),
State: pulumi.StringRef(invokeRunState),
TimeCreatedGreaterThan: pulumi.StringRef(invokeRunTimeCreatedGreaterThan),
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Oci = Pulumi.Oci;
return await Deployment.RunAsync(() =>
{
var testInvokeRuns = Oci.DataFlow.GetInvokeRuns.Invoke(new()
{
CompartmentId = compartmentId,
ApplicationId = testApplication.Id,
DisplayName = invokeRunDisplayName,
DisplayNameStartsWith = invokeRunDisplayNameStartsWith,
OwnerPrincipalId = testOwnerPrincipal.Id,
PoolId = testPool.Id,
State = invokeRunState,
TimeCreatedGreaterThan = invokeRunTimeCreatedGreaterThan,
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.oci.DataFlow.DataFlowFunctions;
import com.pulumi.oci.DataFlow.inputs.GetInvokeRunsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var testInvokeRuns = DataFlowFunctions.getInvokeRuns(GetInvokeRunsArgs.builder()
.compartmentId(compartmentId)
.applicationId(testApplication.id())
.displayName(invokeRunDisplayName)
.displayNameStartsWith(invokeRunDisplayNameStartsWith)
.ownerPrincipalId(testOwnerPrincipal.id())
.poolId(testPool.id())
.state(invokeRunState)
.timeCreatedGreaterThan(invokeRunTimeCreatedGreaterThan)
.build());
}
}
variables:
testInvokeRuns:
fn::invoke:
Function: oci:DataFlow:getInvokeRuns
Arguments:
compartmentId: ${compartmentId}
applicationId: ${testApplication.id}
displayName: ${invokeRunDisplayName}
displayNameStartsWith: ${invokeRunDisplayNameStartsWith}
ownerPrincipalId: ${testOwnerPrincipal.id}
poolId: ${testPool.id}
state: ${invokeRunState}
timeCreatedGreaterThan: ${invokeRunTimeCreatedGreaterThan}
Using getInvokeRuns
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getInvokeRuns(args: GetInvokeRunsArgs, opts?: InvokeOptions): Promise<GetInvokeRunsResult>
function getInvokeRunsOutput(args: GetInvokeRunsOutputArgs, opts?: InvokeOptions): Output<GetInvokeRunsResult>
def get_invoke_runs(application_id: Optional[str] = None,
compartment_id: Optional[str] = None,
display_name: Optional[str] = None,
display_name_starts_with: Optional[str] = None,
filters: Optional[Sequence[_dataflow.GetInvokeRunsFilter]] = None,
owner_principal_id: Optional[str] = None,
pool_id: Optional[str] = None,
state: Optional[str] = None,
time_created_greater_than: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetInvokeRunsResult
def get_invoke_runs_output(application_id: Optional[pulumi.Input[str]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
display_name_starts_with: Optional[pulumi.Input[str]] = None,
filters: Optional[pulumi.Input[Sequence[pulumi.Input[_dataflow.GetInvokeRunsFilterArgs]]]] = None,
owner_principal_id: Optional[pulumi.Input[str]] = None,
pool_id: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created_greater_than: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetInvokeRunsResult]
func GetInvokeRuns(ctx *Context, args *GetInvokeRunsArgs, opts ...InvokeOption) (*GetInvokeRunsResult, error)
func GetInvokeRunsOutput(ctx *Context, args *GetInvokeRunsOutputArgs, opts ...InvokeOption) GetInvokeRunsResultOutput
> Note: This function is named GetInvokeRuns
in the Go SDK.
public static class GetInvokeRuns
{
public static Task<GetInvokeRunsResult> InvokeAsync(GetInvokeRunsArgs args, InvokeOptions? opts = null)
public static Output<GetInvokeRunsResult> Invoke(GetInvokeRunsInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetInvokeRunsResult> getInvokeRuns(GetInvokeRunsArgs args, InvokeOptions options)
// Output-based functions aren't available in Java yet
fn::invoke:
function: oci:DataFlow/getInvokeRuns:getInvokeRuns
arguments:
# arguments dictionary
The following arguments are supported:
- Compartment
Id string - The OCID of the compartment.
- Application
Id string - The ID of the application.
- Display
Name string - The query parameter for the Spark application name.
- Display
Name stringStarts With - The displayName prefix.
- Filters
List<Get
Invoke Runs Filter> - Owner
Principal stringId - The OCID of the user who created the resource.
- Pool
Id string - The ID of the pool.
- State string
- The LifecycleState of the run.
- Time
Created stringGreater Than - The epoch time that the resource was created.
- Compartment
Id string - The OCID of the compartment.
- Application
Id string - The ID of the application.
- Display
Name string - The query parameter for the Spark application name.
- Display
Name stringStarts With - The displayName prefix.
- Filters
[]Get
Invoke Runs Filter - Owner
Principal stringId - The OCID of the user who created the resource.
- Pool
Id string - The ID of the pool.
- State string
- The LifecycleState of the run.
- Time
Created stringGreater Than - The epoch time that the resource was created.
- compartment
Id String - The OCID of the compartment.
- application
Id String - The ID of the application.
- display
Name String - The query parameter for the Spark application name.
- display
Name StringStarts With - The displayName prefix.
- filters
List<Get
Invoke Runs Filter> - owner
Principal StringId - The OCID of the user who created the resource.
- pool
Id String - The ID of the pool.
- state String
- The LifecycleState of the run.
- time
Created StringGreater Than - The epoch time that the resource was created.
- compartment
Id string - The OCID of the compartment.
- application
Id string - The ID of the application.
- display
Name string - The query parameter for the Spark application name.
- display
Name stringStarts With - The displayName prefix.
- filters
Get
Invoke Runs Filter[] - owner
Principal stringId - The OCID of the user who created the resource.
- pool
Id string - The ID of the pool.
- state string
- The LifecycleState of the run.
- time
Created stringGreater Than - The epoch time that the resource was created.
- compartment_
id str - The OCID of the compartment.
- application_
id str - The ID of the application.
- display_
name str - The query parameter for the Spark application name.
- display_
name_ strstarts_ with - The displayName prefix.
- filters
Sequence[dataflow.
Get Invoke Runs Filter] - owner_
principal_ strid - The OCID of the user who created the resource.
- pool_
id str - The ID of the pool.
- state str
- The LifecycleState of the run.
- time_
created_ strgreater_ than - The epoch time that the resource was created.
- compartment
Id String - The OCID of the compartment.
- application
Id String - The ID of the application.
- display
Name String - The query parameter for the Spark application name.
- display
Name StringStarts With - The displayName prefix.
- filters List<Property Map>
- owner
Principal StringId - The OCID of the user who created the resource.
- pool
Id String - The ID of the pool.
- state String
- The LifecycleState of the run.
- time
Created StringGreater Than - The epoch time that the resource was created.
getInvokeRuns Result
The following output properties are available:
- Compartment
Id string - The OCID of a compartment.
- Id string
- The provider-assigned unique ID for this managed resource.
- Runs
List<Get
Invoke Runs Run> - The list of runs.
- Application
Id string - The application ID.
- Display
Name string - A user-friendly name. This name is not necessarily unique.
- Display
Name stringStarts With - Filters
List<Get
Invoke Runs Filter> - Owner
Principal stringId - The OCID of the user who created the resource.
- Pool
Id string - The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- State string
- The current state of this run.
- Time
Created stringGreater Than
- Compartment
Id string - The OCID of a compartment.
- Id string
- The provider-assigned unique ID for this managed resource.
- Runs
[]Get
Invoke Runs Run - The list of runs.
- Application
Id string - The application ID.
- Display
Name string - A user-friendly name. This name is not necessarily unique.
- Display
Name stringStarts With - Filters
[]Get
Invoke Runs Filter - Owner
Principal stringId - The OCID of the user who created the resource.
- Pool
Id string - The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- State string
- The current state of this run.
- Time
Created stringGreater Than
- compartment
Id String - The OCID of a compartment.
- id String
- The provider-assigned unique ID for this managed resource.
- runs
List<Get
Invoke Runs Run> - The list of runs.
- application
Id String - The application ID.
- display
Name String - A user-friendly name. This name is not necessarily unique.
- display
Name StringStarts With - filters
List<Get
Invoke Runs Filter> - owner
Principal StringId - The OCID of the user who created the resource.
- pool
Id String - The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- state String
- The current state of this run.
- time
Created StringGreater Than
- compartment
Id string - The OCID of a compartment.
- id string
- The provider-assigned unique ID for this managed resource.
- runs
Get
Invoke Runs Run[] - The list of runs.
- application
Id string - The application ID.
- display
Name string - A user-friendly name. This name is not necessarily unique.
- display
Name stringStarts With - filters
Get
Invoke Runs Filter[] - owner
Principal stringId - The OCID of the user who created the resource.
- pool
Id string - The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- state string
- The current state of this run.
- time
Created stringGreater Than
- compartment_
id str - The OCID of a compartment.
- id str
- The provider-assigned unique ID for this managed resource.
- runs
Sequence[dataflow.
Get Invoke Runs Run] - The list of runs.
- application_
id str - The application ID.
- display_
name str - A user-friendly name. This name is not necessarily unique.
- display_
name_ strstarts_ with - filters
Sequence[dataflow.
Get Invoke Runs Filter] - owner_
principal_ strid - The OCID of the user who created the resource.
- pool_
id str - The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- state str
- The current state of this run.
- time_
created_ strgreater_ than
- compartment
Id String - The OCID of a compartment.
- id String
- The provider-assigned unique ID for this managed resource.
- runs List<Property Map>
- The list of runs.
- application
Id String - The application ID.
- display
Name String - A user-friendly name. This name is not necessarily unique.
- display
Name StringStarts With - filters List<Property Map>
- owner
Principal StringId - The OCID of the user who created the resource.
- pool
Id String - The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- state String
- The current state of this run.
- time
Created StringGreater Than
Supporting Types
GetInvokeRunsFilter
GetInvokeRunsRun
- Application
Id string - The ID of the application.
- Application
Log List<GetConfigs Invoke Runs Run Application Log Config> - Logging details of Application logs for Data Flow Run.
- Archive
Uri string - A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example,
oci://path/to/a.zip,oci://path/to/b.zip
. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - Arguments List<string>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as
Service Api Spec
, wherename
is the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]
If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- Asynchronous bool
- Class
Name string - The class for the application.
- Compartment
Id string - The OCID of the compartment.
- Configuration Dictionary<string, object>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- Data
Read stringIn Bytes - The data read by the run in bytes.
- Data
Written stringIn Bytes - The data written by the run in bytes.
- Dictionary<string, object>
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example:
{"Operations.CostCenter": "42"}
- Display
Name string - The query parameter for the Spark application name.
- Driver
Shape string - The VM shape for the driver. Sets the driver cores and memory.
- Driver
Shape List<GetConfigs Invoke Runs Run Driver Shape Config> - This is used to configure the shape of the driver or executor if a flexible shape is used.
- Execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include
--class
,--file
,--jars
,--conf
,--py-files
, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10
Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only. - Executor
Shape string - The VM shape for the executors. Sets the executor cores and memory.
- Executor
Shape List<GetConfigs Invoke Runs Run Executor Shape Config> - This is used to configure the shape of the driver or executor if a flexible shape is used.
- File
Uri string - An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Dictionary<string, object>
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example:
{"Department": "Finance"}
- Id string
- The ID of a run.
- Idle
Timeout stringIn Minutes - The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type
SESSION
. Default value is 2880 minutes (2 days) - Language string
- The Spark language.
- Lifecycle
Details string - The detailed messages about the lifecycle state.
- Logs
Bucket stringUri - An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Max
Duration stringIn Minutes - The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to
IN_PROGRESS
state. - Metastore
Id string - The OCID of Oracle Cloud Infrastructure Hive Metastore.
- Num
Executors int - The number of executor VMs requested.
- Opc
Parent stringRpt Url - Opc
Request stringId - Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- Owner
Principal stringId - The OCID of the user who created the resource.
- Owner
User stringName - The username of the user who created the resource. If the username of the owner does not exist,
null
will be returned and the caller should refer to the ownerPrincipalId value instead. - Parameters
List<Get
Invoke Runs Run Parameter> - An array of name/value pairs used to fill placeholders found in properties like
Application.arguments
. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ] - Pool
Id string - The ID of the pool.
- Private
Endpoint List<string>Dns Zones - An array of DNS zone names. Example:
[ "app.examplecorp.com", "app.examplecorp2.com" ]
- Private
Endpoint stringId - The OCID of a private endpoint.
- Private
Endpoint intMax Host Count - The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- Private
Endpoint List<string>Nsg Ids - An array of network security group OCIDs.
- Private
Endpoint stringSubnet Id - The OCID of a subnet.
- Run
Duration stringIn Milliseconds - The duration of the run in milliseconds.
- Spark
Version string - The Spark version utilized to run the application.
- State string
- The LifecycleState of the run.
- Time
Created string - The date and time the resource was created, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- Time
Updated string - The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- Total
Ocpu int - The total number of oCPU requested by the run.
- Type string
- The Spark application processing type.
- Warehouse
Bucket stringUri - An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Application
Id string - The ID of the application.
- Application
Log []GetConfigs Invoke Runs Run Application Log Config - Logging details of Application logs for Data Flow Run.
- Archive
Uri string - A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example,
oci://path/to/a.zip,oci://path/to/b.zip
. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - Arguments []string
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as
Service Api Spec
, wherename
is the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]
If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- Asynchronous bool
- Class
Name string - The class for the application.
- Compartment
Id string - The OCID of the compartment.
- Configuration map[string]interface{}
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- Data
Read stringIn Bytes - The data read by the run in bytes.
- Data
Written stringIn Bytes - The data written by the run in bytes.
- map[string]interface{}
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example:
{"Operations.CostCenter": "42"}
- Display
Name string - The query parameter for the Spark application name.
- Driver
Shape string - The VM shape for the driver. Sets the driver cores and memory.
- Driver
Shape []GetConfigs Invoke Runs Run Driver Shape Config - This is used to configure the shape of the driver or executor if a flexible shape is used.
- Execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include
--class
,--file
,--jars
,--conf
,--py-files
, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10
Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only. - Executor
Shape string - The VM shape for the executors. Sets the executor cores and memory.
- Executor
Shape []GetConfigs Invoke Runs Run Executor Shape Config - This is used to configure the shape of the driver or executor if a flexible shape is used.
- File
Uri string - An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- map[string]interface{}
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example:
{"Department": "Finance"}
- Id string
- The ID of a run.
- Idle
Timeout stringIn Minutes - The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type
SESSION
. Default value is 2880 minutes (2 days) - Language string
- The Spark language.
- Lifecycle
Details string - The detailed messages about the lifecycle state.
- Logs
Bucket stringUri - An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Max
Duration stringIn Minutes - The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to
IN_PROGRESS
state. - Metastore
Id string - The OCID of Oracle Cloud Infrastructure Hive Metastore.
- Num
Executors int - The number of executor VMs requested.
- Opc
Parent stringRpt Url - Opc
Request stringId - Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- Owner
Principal stringId - The OCID of the user who created the resource.
- Owner
User stringName - The username of the user who created the resource. If the username of the owner does not exist,
null
will be returned and the caller should refer to the ownerPrincipalId value instead. - Parameters
[]Get
Invoke Runs Run Parameter - An array of name/value pairs used to fill placeholders found in properties like
Application.arguments
. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ] - Pool
Id string - The ID of the pool.
- Private
Endpoint []stringDns Zones - An array of DNS zone names. Example:
[ "app.examplecorp.com", "app.examplecorp2.com" ]
- Private
Endpoint stringId - The OCID of a private endpoint.
- Private
Endpoint intMax Host Count - The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- Private
Endpoint []stringNsg Ids - An array of network security group OCIDs.
- Private
Endpoint stringSubnet Id - The OCID of a subnet.
- Run
Duration stringIn Milliseconds - The duration of the run in milliseconds.
- Spark
Version string - The Spark version utilized to run the application.
- State string
- The LifecycleState of the run.
- Time
Created string - The date and time the resource was created, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- Time
Updated string - The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- Total
Ocpu int - The total number of oCPU requested by the run.
- Type string
- The Spark application processing type.
- Warehouse
Bucket stringUri - An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- application
Id String - The ID of the application.
- application
Log List<GetConfigs Invoke Runs Run Application Log Config> - Logging details of Application logs for Data Flow Run.
- archive
Uri String - A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example,
oci://path/to/a.zip,oci://path/to/b.zip
. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - arguments List<String>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as
Service Api Spec
, wherename
is the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]
If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous Boolean
- class
Name String - The class for the application.
- compartment
Id String - The OCID of the compartment.
- configuration Map<String,Object>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- data
Read StringIn Bytes - The data read by the run in bytes.
- data
Written StringIn Bytes - The data written by the run in bytes.
- Map<String,Object>
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example:
{"Operations.CostCenter": "42"}
- display
Name String - The query parameter for the Spark application name.
- driver
Shape String - The VM shape for the driver. Sets the driver cores and memory.
- driver
Shape List<GetConfigs Invoke Runs Run Driver Shape Config> - This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute String
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include
--class
,--file
,--jars
,--conf
,--py-files
, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10
Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only. - executor
Shape String - The VM shape for the executors. Sets the executor cores and memory.
- executor
Shape List<GetConfigs Invoke Runs Run Executor Shape Config> - This is used to configure the shape of the driver or executor if a flexible shape is used.
- file
Uri String - An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Map<String,Object>
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example:
{"Department": "Finance"}
- id String
- The ID of a run.
- idle
Timeout StringIn Minutes - The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type
SESSION
. Default value is 2880 minutes (2 days) - language String
- The Spark language.
- lifecycle
Details String - The detailed messages about the lifecycle state.
- logs
Bucket StringUri - An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- max
Duration StringIn Minutes - The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to
IN_PROGRESS
state. - metastore
Id String - The OCID of Oracle Cloud Infrastructure Hive Metastore.
- num
Executors Integer - The number of executor VMs requested.
- opc
Parent StringRpt Url - opc
Request StringId - Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- owner
Principal StringId - The OCID of the user who created the resource.
- owner
User StringName - The username of the user who created the resource. If the username of the owner does not exist,
null
will be returned and the caller should refer to the ownerPrincipalId value instead. - parameters
List<Get
Invoke Runs Run Parameter> - An array of name/value pairs used to fill placeholders found in properties like
Application.arguments
. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ] - pool
Id String - The ID of the pool.
- private
Endpoint List<String>Dns Zones - An array of DNS zone names. Example:
[ "app.examplecorp.com", "app.examplecorp2.com" ]
- private
Endpoint StringId - The OCID of a private endpoint.
- private
Endpoint IntegerMax Host Count - The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- private
Endpoint List<String>Nsg Ids - An array of network security group OCIDs.
- private
Endpoint StringSubnet Id - The OCID of a subnet.
- run
Duration StringIn Milliseconds - The duration of the run in milliseconds.
- spark
Version String - The Spark version utilized to run the application.
- state String
- The LifecycleState of the run.
- time
Created String - The date and time the resource was created, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- time
Updated String - The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- total
Ocpu Integer - The total number of oCPU requested by the run.
- type String
- The Spark application processing type.
- warehouse
Bucket StringUri - An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- application
Id string - The ID of the application.
- application
Log GetConfigs Invoke Runs Run Application Log Config[] - Logging details of Application logs for Data Flow Run.
- archive
Uri string - A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example,
oci://path/to/a.zip,oci://path/to/b.zip
. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - arguments string[]
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as
Service Api Spec
, wherename
is the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]
If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous boolean
- class
Name string - The class for the application.
- compartment
Id string - The OCID of the compartment.
- configuration {[key: string]: any}
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- data
Read stringIn Bytes - The data read by the run in bytes.
- data
Written stringIn Bytes - The data written by the run in bytes.
- {[key: string]: any}
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example:
{"Operations.CostCenter": "42"}
- display
Name string - The query parameter for the Spark application name.
- driver
Shape string - The VM shape for the driver. Sets the driver cores and memory.
- driver
Shape GetConfigs Invoke Runs Run Driver Shape Config[] - This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include
--class
,--file
,--jars
,--conf
,--py-files
, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10
Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only. - executor
Shape string - The VM shape for the executors. Sets the executor cores and memory.
- executor
Shape GetConfigs Invoke Runs Run Executor Shape Config[] - This is used to configure the shape of the driver or executor if a flexible shape is used.
- file
Uri string - An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- {[key: string]: any}
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example:
{"Department": "Finance"}
- id string
- The ID of a run.
- idle
Timeout stringIn Minutes - The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type
SESSION
. Default value is 2880 minutes (2 days) - language string
- The Spark language.
- lifecycle
Details string - The detailed messages about the lifecycle state.
- logs
Bucket stringUri - An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- max
Duration stringIn Minutes - The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to
IN_PROGRESS
state. - metastore
Id string - The OCID of Oracle Cloud Infrastructure Hive Metastore.
- num
Executors number - The number of executor VMs requested.
- opc
Parent stringRpt Url - opc
Request stringId - Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- owner
Principal stringId - The OCID of the user who created the resource.
- owner
User stringName - The username of the user who created the resource. If the username of the owner does not exist,
null
will be returned and the caller should refer to the ownerPrincipalId value instead. - parameters
Get
Invoke Runs Run Parameter[] - An array of name/value pairs used to fill placeholders found in properties like
Application.arguments
. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ] - pool
Id string - The ID of the pool.
- private
Endpoint string[]Dns Zones - An array of DNS zone names. Example:
[ "app.examplecorp.com", "app.examplecorp2.com" ]
- private
Endpoint stringId - The OCID of a private endpoint.
- private
Endpoint numberMax Host Count - The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- private
Endpoint string[]Nsg Ids - An array of network security group OCIDs.
- private
Endpoint stringSubnet Id - The OCID of a subnet.
- run
Duration stringIn Milliseconds - The duration of the run in milliseconds.
- spark
Version string - The Spark version utilized to run the application.
- state string
- The LifecycleState of the run.
- time
Created string - The date and time the resource was created, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- time
Updated string - The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- total
Ocpu number - The total number of oCPU requested by the run.
- type string
- The Spark application processing type.
- warehouse
Bucket stringUri - An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- application_
id str - The ID of the application.
- application_
log_ Sequence[dataflow.configs Get Invoke Runs Run Application Log Config] - Logging details of Application logs for Data Flow Run.
- archive_
uri str - A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example,
oci://path/to/a.zip,oci://path/to/b.zip
. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - arguments Sequence[str]
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as
Service Api Spec
, wherename
is the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]
If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous bool
- class_
name str - The class for the application.
- compartment_
id str - The OCID of the compartment.
- configuration Mapping[str, Any]
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- data_
read_ strin_ bytes - The data read by the run in bytes.
- data_
written_ strin_ bytes - The data written by the run in bytes.
- Mapping[str, Any]
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example:
{"Operations.CostCenter": "42"}
- display_
name str - The query parameter for the Spark application name.
- driver_
shape str - The VM shape for the driver. Sets the driver cores and memory.
- driver_
shape_ Sequence[dataflow.configs Get Invoke Runs Run Driver Shape Config] - This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute str
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include
--class
,--file
,--jars
,--conf
,--py-files
, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10
Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only. - executor_
shape str - The VM shape for the executors. Sets the executor cores and memory.
- executor_
shape_ Sequence[dataflow.configs Get Invoke Runs Run Executor Shape Config] - This is used to configure the shape of the driver or executor if a flexible shape is used.
- file_
uri str - An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Mapping[str, Any]
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example:
{"Department": "Finance"}
- id str
- The ID of a run.
- idle_
timeout_ strin_ minutes - The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type
SESSION
. Default value is 2880 minutes (2 days) - language str
- The Spark language.
- lifecycle_
details str - The detailed messages about the lifecycle state.
- logs_
bucket_ struri - An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- max_
duration_ strin_ minutes - The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to
IN_PROGRESS
state. - metastore_
id str - The OCID of Oracle Cloud Infrastructure Hive Metastore.
- num_
executors int - The number of executor VMs requested.
- opc_
parent_ strrpt_ url - opc_
request_ strid - Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- owner_
principal_ strid - The OCID of the user who created the resource.
- owner_
user_ strname - The username of the user who created the resource. If the username of the owner does not exist,
null
will be returned and the caller should refer to the ownerPrincipalId value instead. - parameters
Sequence[dataflow.
Get Invoke Runs Run Parameter] - An array of name/value pairs used to fill placeholders found in properties like
Application.arguments
. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ] - pool_
id str - The ID of the pool.
- private_
endpoint_ Sequence[str]dns_ zones - An array of DNS zone names. Example:
[ "app.examplecorp.com", "app.examplecorp2.com" ]
- private_
endpoint_ strid - The OCID of a private endpoint.
- private_
endpoint_ intmax_ host_ count - The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- private_
endpoint_ Sequence[str]nsg_ ids - An array of network security group OCIDs.
- private_
endpoint_ strsubnet_ id - The OCID of a subnet.
- run_
duration_ strin_ milliseconds - The duration of the run in milliseconds.
- spark_
version str - The Spark version utilized to run the application.
- state str
- The LifecycleState of the run.
- time_
created str - The date and time the resource was created, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- time_
updated str - The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- total_
ocpu int - The total number of oCPU requested by the run.
- type str
- The Spark application processing type.
- warehouse_
bucket_ struri - An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- application
Id String - The ID of the application.
- application
Log List<Property Map>Configs - Logging details of Application logs for Data Flow Run.
- archive
Uri String - A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example,
oci://path/to/a.zip,oci://path/to/b.zip
. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - arguments List<String>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as
Service Api Spec
, wherename
is the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]
If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous Boolean
- class
Name String - The class for the application.
- compartment
Id String - The OCID of the compartment.
- configuration Map<Any>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- data
Read StringIn Bytes - The data read by the run in bytes.
- data
Written StringIn Bytes - The data written by the run in bytes.
- Map<Any>
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example:
{"Operations.CostCenter": "42"}
- display
Name String - The query parameter for the Spark application name.
- driver
Shape String - The VM shape for the driver. Sets the driver cores and memory.
- driver
Shape List<Property Map>Configs - This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute String
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include
--class
,--file
,--jars
,--conf
,--py-files
, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10
Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only. - executor
Shape String - The VM shape for the executors. Sets the executor cores and memory.
- executor
Shape List<Property Map>Configs - This is used to configure the shape of the driver or executor if a flexible shape is used.
- file
Uri String - An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Map<Any>
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example:
{"Department": "Finance"}
- id String
- The ID of a run.
- idle
Timeout StringIn Minutes - The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type
SESSION
. Default value is 2880 minutes (2 days) - language String
- The Spark language.
- lifecycle
Details String - The detailed messages about the lifecycle state.
- logs
Bucket StringUri - An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- max
Duration StringIn Minutes - The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to
IN_PROGRESS
state. - metastore
Id String - The OCID of Oracle Cloud Infrastructure Hive Metastore.
- num
Executors Number - The number of executor VMs requested.
- opc
Parent StringRpt Url - opc
Request StringId - Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- owner
Principal StringId - The OCID of the user who created the resource.
- owner
User StringName - The username of the user who created the resource. If the username of the owner does not exist,
null
will be returned and the caller should refer to the ownerPrincipalId value instead. - parameters List<Property Map>
- An array of name/value pairs used to fill placeholders found in properties like
Application.arguments
. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ] - pool
Id String - The ID of the pool.
- private
Endpoint List<String>Dns Zones - An array of DNS zone names. Example:
[ "app.examplecorp.com", "app.examplecorp2.com" ]
- private
Endpoint StringId - The OCID of a private endpoint.
- private
Endpoint NumberMax Host Count - The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- private
Endpoint List<String>Nsg Ids - An array of network security group OCIDs.
- private
Endpoint StringSubnet Id - The OCID of a subnet.
- run
Duration StringIn Milliseconds - The duration of the run in milliseconds.
- spark
Version String - The Spark version utilized to run the application.
- state String
- The LifecycleState of the run.
- time
Created String - The date and time the resource was created, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- time
Updated String - The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example:
2018-04-03T21:10:29.600Z
- total
Ocpu Number - The total number of oCPU requested by the run.
- type String
- The Spark application processing type.
- warehouse
Bucket StringUri - An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
GetInvokeRunsRunApplicationLogConfig
- Log
Group stringId - The log group id for where log objects will be for Data Flow Runs.
- Log
Id string - The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- Log
Group stringId - The log group id for where log objects will be for Data Flow Runs.
- Log
Id string - The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- log
Group StringId - The log group id for where log objects will be for Data Flow Runs.
- log
Id String - The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- log
Group stringId - The log group id for where log objects will be for Data Flow Runs.
- log
Id string - The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- log_
group_ strid - The log group id for where log objects will be for Data Flow Runs.
- log_
id str - The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- log
Group StringId - The log group id for where log objects will be for Data Flow Runs.
- log
Id String - The log id of the log object the Application Logs of Data Flow Run will be shipped to.
GetInvokeRunsRunDriverShapeConfig
- Memory
In doubleGbs - The amount of memory used for the driver or executors.
- Ocpus double
- The total number of OCPUs used for the driver or executors. See here for details.
- Memory
In float64Gbs - The amount of memory used for the driver or executors.
- Ocpus float64
- The total number of OCPUs used for the driver or executors. See here for details.
- memory
In DoubleGbs - The amount of memory used for the driver or executors.
- ocpus Double
- The total number of OCPUs used for the driver or executors. See here for details.
- memory
In numberGbs - The amount of memory used for the driver or executors.
- ocpus number
- The total number of OCPUs used for the driver or executors. See here for details.
- memory_
in_ floatgbs - The amount of memory used for the driver or executors.
- ocpus float
- The total number of OCPUs used for the driver or executors. See here for details.
- memory
In NumberGbs - The amount of memory used for the driver or executors.
- ocpus Number
- The total number of OCPUs used for the driver or executors. See here for details.
GetInvokeRunsRunExecutorShapeConfig
- Memory
In doubleGbs - The amount of memory used for the driver or executors.
- Ocpus double
- The total number of OCPUs used for the driver or executors. See here for details.
- Memory
In float64Gbs - The amount of memory used for the driver or executors.
- Ocpus float64
- The total number of OCPUs used for the driver or executors. See here for details.
- memory
In DoubleGbs - The amount of memory used for the driver or executors.
- ocpus Double
- The total number of OCPUs used for the driver or executors. See here for details.
- memory
In numberGbs - The amount of memory used for the driver or executors.
- ocpus number
- The total number of OCPUs used for the driver or executors. See here for details.
- memory_
in_ floatgbs - The amount of memory used for the driver or executors.
- ocpus float
- The total number of OCPUs used for the driver or executors. See here for details.
- memory
In NumberGbs - The amount of memory used for the driver or executors.
- ocpus Number
- The total number of OCPUs used for the driver or executors. See here for details.
GetInvokeRunsRunParameter
Package Details
- Repository
- oci pulumi/pulumi-oci
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
oci
Terraform Provider.