1. Packages
  2. Opentelekomcloud Provider
  3. API Docs
  4. MrsJobV1
opentelekomcloud 1.36.37 published on Thursday, Apr 24, 2025 by opentelekomcloud

opentelekomcloud.MrsJobV1

Explore with Pulumi AI

Up-to-date reference of API arguments for MRS job you can get at documentation portal

Manages resource job within OpenTelekomCloud MRS.

Example Usage

import * as pulumi from "@pulumi/pulumi";
import * as opentelekomcloud from "@pulumi/opentelekomcloud";

const job1 = new opentelekomcloud.MrsJobV1("job1", {
    arguments: "wordcount",
    clusterId: "ef43d2ff-1ecf-4f13-bd0c-0004c429a058",
    input: "s3a://wordcount/input/",
    jarPath: "s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar",
    jobLog: "s3a://wordcount/log/",
    jobName: "test_mapreduce_job1",
    jobType: 1,
    output: "s3a://wordcount/output/",
});
Copy
import pulumi
import pulumi_opentelekomcloud as opentelekomcloud

job1 = opentelekomcloud.MrsJobV1("job1",
    arguments="wordcount",
    cluster_id="ef43d2ff-1ecf-4f13-bd0c-0004c429a058",
    input="s3a://wordcount/input/",
    jar_path="s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar",
    job_log="s3a://wordcount/log/",
    job_name="test_mapreduce_job1",
    job_type=1,
    output="s3a://wordcount/output/")
Copy
package main

import (
	"github.com/pulumi/pulumi-terraform-provider/sdks/go/opentelekomcloud/opentelekomcloud"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := opentelekomcloud.NewMrsJobV1(ctx, "job1", &opentelekomcloud.MrsJobV1Args{
			Arguments: pulumi.String("wordcount"),
			ClusterId: pulumi.String("ef43d2ff-1ecf-4f13-bd0c-0004c429a058"),
			Input:     pulumi.String("s3a://wordcount/input/"),
			JarPath:   pulumi.String("s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar"),
			JobLog:    pulumi.String("s3a://wordcount/log/"),
			JobName:   pulumi.String("test_mapreduce_job1"),
			JobType:   pulumi.Float64(1),
			Output:    pulumi.String("s3a://wordcount/output/"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Opentelekomcloud = Pulumi.Opentelekomcloud;

return await Deployment.RunAsync(() => 
{
    var job1 = new Opentelekomcloud.MrsJobV1("job1", new()
    {
        Arguments = "wordcount",
        ClusterId = "ef43d2ff-1ecf-4f13-bd0c-0004c429a058",
        Input = "s3a://wordcount/input/",
        JarPath = "s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar",
        JobLog = "s3a://wordcount/log/",
        JobName = "test_mapreduce_job1",
        JobType = 1,
        Output = "s3a://wordcount/output/",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.opentelekomcloud.MrsJobV1;
import com.pulumi.opentelekomcloud.MrsJobV1Args;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var job1 = new MrsJobV1("job1", MrsJobV1Args.builder()
            .arguments("wordcount")
            .clusterId("ef43d2ff-1ecf-4f13-bd0c-0004c429a058")
            .input("s3a://wordcount/input/")
            .jarPath("s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar")
            .jobLog("s3a://wordcount/log/")
            .jobName("test_mapreduce_job1")
            .jobType(1)
            .output("s3a://wordcount/output/")
            .build());

    }
}
Copy
resources:
  job1:
    type: opentelekomcloud:MrsJobV1
    properties:
      arguments: wordcount
      clusterId: ef43d2ff-1ecf-4f13-bd0c-0004c429a058
      input: s3a://wordcount/input/
      jarPath: s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar
      jobLog: s3a://wordcount/log/
      jobName: test_mapreduce_job1
      jobType: 1
      output: s3a://wordcount/output/
Copy

Create MrsJobV1 Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new MrsJobV1(name: string, args: MrsJobV1Args, opts?: CustomResourceOptions);
@overload
def MrsJobV1(resource_name: str,
             args: MrsJobV1Args,
             opts: Optional[ResourceOptions] = None)

@overload
def MrsJobV1(resource_name: str,
             opts: Optional[ResourceOptions] = None,
             job_type: Optional[float] = None,
             job_name: Optional[str] = None,
             cluster_id: Optional[str] = None,
             jar_path: Optional[str] = None,
             region: Optional[str] = None,
             is_public: Optional[bool] = None,
             is_protected: Optional[bool] = None,
             input: Optional[str] = None,
             hive_script_path: Optional[str] = None,
             arguments: Optional[str] = None,
             mrs_job_v1_id: Optional[str] = None,
             output: Optional[str] = None,
             job_log: Optional[str] = None,
             timeouts: Optional[MrsJobV1TimeoutsArgs] = None)
func NewMrsJobV1(ctx *Context, name string, args MrsJobV1Args, opts ...ResourceOption) (*MrsJobV1, error)
public MrsJobV1(string name, MrsJobV1Args args, CustomResourceOptions? opts = null)
public MrsJobV1(String name, MrsJobV1Args args)
public MrsJobV1(String name, MrsJobV1Args args, CustomResourceOptions options)
type: opentelekomcloud:MrsJobV1
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. MrsJobV1Args
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. MrsJobV1Args
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. MrsJobV1Args
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. MrsJobV1Args
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. MrsJobV1Args
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var mrsJobV1Resource = new Opentelekomcloud.MrsJobV1("mrsJobV1Resource", new()
{
    JobType = 0,
    JobName = "string",
    ClusterId = "string",
    JarPath = "string",
    Region = "string",
    IsPublic = false,
    IsProtected = false,
    Input = "string",
    HiveScriptPath = "string",
    Arguments = "string",
    MrsJobV1Id = "string",
    Output = "string",
    JobLog = "string",
    Timeouts = new Opentelekomcloud.Inputs.MrsJobV1TimeoutsArgs
    {
        Create = "string",
        Delete = "string",
        Update = "string",
    },
});
Copy
example, err := opentelekomcloud.NewMrsJobV1(ctx, "mrsJobV1Resource", &opentelekomcloud.MrsJobV1Args{
	JobType:        pulumi.Float64(0),
	JobName:        pulumi.String("string"),
	ClusterId:      pulumi.String("string"),
	JarPath:        pulumi.String("string"),
	Region:         pulumi.String("string"),
	IsPublic:       pulumi.Bool(false),
	IsProtected:    pulumi.Bool(false),
	Input:          pulumi.String("string"),
	HiveScriptPath: pulumi.String("string"),
	Arguments:      pulumi.String("string"),
	MrsJobV1Id:     pulumi.String("string"),
	Output:         pulumi.String("string"),
	JobLog:         pulumi.String("string"),
	Timeouts: &opentelekomcloud.MrsJobV1TimeoutsArgs{
		Create: pulumi.String("string"),
		Delete: pulumi.String("string"),
		Update: pulumi.String("string"),
	},
})
Copy
var mrsJobV1Resource = new MrsJobV1("mrsJobV1Resource", MrsJobV1Args.builder()
    .jobType(0)
    .jobName("string")
    .clusterId("string")
    .jarPath("string")
    .region("string")
    .isPublic(false)
    .isProtected(false)
    .input("string")
    .hiveScriptPath("string")
    .arguments("string")
    .mrsJobV1Id("string")
    .output("string")
    .jobLog("string")
    .timeouts(MrsJobV1TimeoutsArgs.builder()
        .create("string")
        .delete("string")
        .update("string")
        .build())
    .build());
Copy
mrs_job_v1_resource = opentelekomcloud.MrsJobV1("mrsJobV1Resource",
    job_type=0,
    job_name="string",
    cluster_id="string",
    jar_path="string",
    region="string",
    is_public=False,
    is_protected=False,
    input="string",
    hive_script_path="string",
    arguments="string",
    mrs_job_v1_id="string",
    output="string",
    job_log="string",
    timeouts={
        "create": "string",
        "delete": "string",
        "update": "string",
    })
Copy
const mrsJobV1Resource = new opentelekomcloud.MrsJobV1("mrsJobV1Resource", {
    jobType: 0,
    jobName: "string",
    clusterId: "string",
    jarPath: "string",
    region: "string",
    isPublic: false,
    isProtected: false,
    input: "string",
    hiveScriptPath: "string",
    arguments: "string",
    mrsJobV1Id: "string",
    output: "string",
    jobLog: "string",
    timeouts: {
        create: "string",
        "delete": "string",
        update: "string",
    },
});
Copy
type: opentelekomcloud:MrsJobV1
properties:
    arguments: string
    clusterId: string
    hiveScriptPath: string
    input: string
    isProtected: false
    isPublic: false
    jarPath: string
    jobLog: string
    jobName: string
    jobType: 0
    mrsJobV1Id: string
    output: string
    region: string
    timeouts:
        create: string
        delete: string
        update: string
Copy

MrsJobV1 Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The MrsJobV1 resource accepts the following input properties:

ClusterId This property is required. string
Cluster ID
JarPath This property is required. string
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
JobName This property is required. string

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

JobType This property is required. double

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

Arguments string
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
HiveScriptPath string
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
Input string
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
IsProtected bool
Whether a job is protected true false The current version does not support this function.
IsPublic bool
Whether a job is public true false The current version does not support this function.
JobLog string
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
MrsJobV1Id string
Output string
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
Region string
Timeouts MrsJobV1Timeouts
ClusterId This property is required. string
Cluster ID
JarPath This property is required. string
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
JobName This property is required. string

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

JobType This property is required. float64

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

Arguments string
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
HiveScriptPath string
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
Input string
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
IsProtected bool
Whether a job is protected true false The current version does not support this function.
IsPublic bool
Whether a job is public true false The current version does not support this function.
JobLog string
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
MrsJobV1Id string
Output string
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
Region string
Timeouts MrsJobV1TimeoutsArgs
clusterId This property is required. String
Cluster ID
jarPath This property is required. String
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
jobName This property is required. String

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

jobType This property is required. Double

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

arguments String
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
hiveScriptPath String
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
input String
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
isProtected Boolean
Whether a job is protected true false The current version does not support this function.
isPublic Boolean
Whether a job is public true false The current version does not support this function.
jobLog String
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
mrsJobV1Id String
output String
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
region String
timeouts MrsJobV1Timeouts
clusterId This property is required. string
Cluster ID
jarPath This property is required. string
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
jobName This property is required. string

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

jobType This property is required. number

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

arguments string
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
hiveScriptPath string
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
input string
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
isProtected boolean
Whether a job is protected true false The current version does not support this function.
isPublic boolean
Whether a job is public true false The current version does not support this function.
jobLog string
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
mrsJobV1Id string
output string
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
region string
timeouts MrsJobV1Timeouts
cluster_id This property is required. str
Cluster ID
jar_path This property is required. str
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
job_name This property is required. str

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

job_type This property is required. float

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

arguments str
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
hive_script_path str
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
input str
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
is_protected bool
Whether a job is protected true false The current version does not support this function.
is_public bool
Whether a job is public true false The current version does not support this function.
job_log str
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
mrs_job_v1_id str
output str
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
region str
timeouts MrsJobV1TimeoutsArgs
clusterId This property is required. String
Cluster ID
jarPath This property is required. String
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
jobName This property is required. String

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

jobType This property is required. Number

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

arguments String
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
hiveScriptPath String
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
input String
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
isProtected Boolean
Whether a job is protected true false The current version does not support this function.
isPublic Boolean
Whether a job is public true false The current version does not support this function.
jobLog String
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
mrsJobV1Id String
output String
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
region String
timeouts Property Map

Outputs

All input properties are implicitly available as output properties. Additionally, the MrsJobV1 resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
JobState string
Id string
The provider-assigned unique ID for this managed resource.
JobState string
id String
The provider-assigned unique ID for this managed resource.
jobState String
id string
The provider-assigned unique ID for this managed resource.
jobState string
id str
The provider-assigned unique ID for this managed resource.
job_state str
id String
The provider-assigned unique ID for this managed resource.
jobState String

Look up Existing MrsJobV1 Resource

Get an existing MrsJobV1 resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: MrsJobV1State, opts?: CustomResourceOptions): MrsJobV1
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        arguments: Optional[str] = None,
        cluster_id: Optional[str] = None,
        hive_script_path: Optional[str] = None,
        input: Optional[str] = None,
        is_protected: Optional[bool] = None,
        is_public: Optional[bool] = None,
        jar_path: Optional[str] = None,
        job_log: Optional[str] = None,
        job_name: Optional[str] = None,
        job_state: Optional[str] = None,
        job_type: Optional[float] = None,
        mrs_job_v1_id: Optional[str] = None,
        output: Optional[str] = None,
        region: Optional[str] = None,
        timeouts: Optional[MrsJobV1TimeoutsArgs] = None) -> MrsJobV1
func GetMrsJobV1(ctx *Context, name string, id IDInput, state *MrsJobV1State, opts ...ResourceOption) (*MrsJobV1, error)
public static MrsJobV1 Get(string name, Input<string> id, MrsJobV1State? state, CustomResourceOptions? opts = null)
public static MrsJobV1 get(String name, Output<String> id, MrsJobV1State state, CustomResourceOptions options)
resources:  _:    type: opentelekomcloud:MrsJobV1    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Arguments string
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
ClusterId string
Cluster ID
HiveScriptPath string
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
Input string
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
IsProtected bool
Whether a job is protected true false The current version does not support this function.
IsPublic bool
Whether a job is public true false The current version does not support this function.
JarPath string
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
JobLog string
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
JobName string

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

JobState string
JobType double

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

MrsJobV1Id string
Output string
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
Region string
Timeouts MrsJobV1Timeouts
Arguments string
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
ClusterId string
Cluster ID
HiveScriptPath string
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
Input string
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
IsProtected bool
Whether a job is protected true false The current version does not support this function.
IsPublic bool
Whether a job is public true false The current version does not support this function.
JarPath string
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
JobLog string
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
JobName string

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

JobState string
JobType float64

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

MrsJobV1Id string
Output string
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
Region string
Timeouts MrsJobV1TimeoutsArgs
arguments String
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
clusterId String
Cluster ID
hiveScriptPath String
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
input String
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
isProtected Boolean
Whether a job is protected true false The current version does not support this function.
isPublic Boolean
Whether a job is public true false The current version does not support this function.
jarPath String
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
jobLog String
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
jobName String

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

jobState String
jobType Double

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

mrsJobV1Id String
output String
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
region String
timeouts MrsJobV1Timeouts
arguments string
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
clusterId string
Cluster ID
hiveScriptPath string
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
input string
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
isProtected boolean
Whether a job is protected true false The current version does not support this function.
isPublic boolean
Whether a job is public true false The current version does not support this function.
jarPath string
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
jobLog string
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
jobName string

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

jobState string
jobType number

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

mrsJobV1Id string
output string
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
region string
timeouts MrsJobV1Timeouts
arguments str
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
cluster_id str
Cluster ID
hive_script_path str
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
input str
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
is_protected bool
Whether a job is protected true false The current version does not support this function.
is_public bool
Whether a job is public true false The current version does not support this function.
jar_path str
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
job_log str
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
job_name str

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

job_state str
job_type float

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

mrs_job_v1_id str
output str
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
region str
timeouts MrsJobV1TimeoutsArgs
arguments String
Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
clusterId String
Cluster ID
hiveScriptPath String
SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
input String
Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
isProtected Boolean
Whether a job is protected true false The current version does not support this function.
isPublic Boolean
Whether a job is public true false The current version does not support this function.
jarPath String
Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
jobLog String
Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
jobName String

Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).

Note: Identical job names are allowed but not recommended.

jobState String
jobType Number

Job type

  • 1: MapReduce
  • 2: Spark
  • 3: Hive Script
  • 4: HiveQL (not supported currently)
  • 5: DistCp, importing and exporting data.
  • 6: Spark Script
  • 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)

Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.

mrsJobV1Id String
output String
Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
region String
timeouts Property Map

Supporting Types

MrsJobV1Timeouts
, MrsJobV1TimeoutsArgs

Create string
Delete string
Update string
Create string
Delete string
Update string
create String
delete String
update String
create string
delete string
update string
create str
delete str
update str
create String
delete String
update String

Package Details

Repository
opentelekomcloud opentelekomcloud/terraform-provider-opentelekomcloud
License
Notes
This Pulumi package is based on the opentelekomcloud Terraform Provider.