Reference documentation and code samples for the Cloud Dataproc V1beta2 API class Google::Cloud::Dataproc::V1beta2::OrderedJob.
A job executed by the workflow.
Inherits
- Object
Extended By
- Google::Protobuf::MessageExts::ClassMethods
Includes
- Google::Protobuf::MessageExts
Methods
#hadoop_job
def hadoop_job() -> ::Google::Cloud::Dataproc::V1beta2::HadoopJob
- (::Google::Cloud::Dataproc::V1beta2::HadoopJob) — Optional. Job is a Hadoop job.
#hadoop_job=
def hadoop_job=(value) -> ::Google::Cloud::Dataproc::V1beta2::HadoopJob
- value (::Google::Cloud::Dataproc::V1beta2::HadoopJob) — Optional. Job is a Hadoop job.
- (::Google::Cloud::Dataproc::V1beta2::HadoopJob) — Optional. Job is a Hadoop job.
#hive_job
def hive_job() -> ::Google::Cloud::Dataproc::V1beta2::HiveJob
- (::Google::Cloud::Dataproc::V1beta2::HiveJob) — Optional. Job is a Hive job.
#hive_job=
def hive_job=(value) -> ::Google::Cloud::Dataproc::V1beta2::HiveJob
- value (::Google::Cloud::Dataproc::V1beta2::HiveJob) — Optional. Job is a Hive job.
- (::Google::Cloud::Dataproc::V1beta2::HiveJob) — Optional. Job is a Hive job.
#labels
def labels() -> ::Google::Protobuf::Map{::String => ::String}
-
(::Google::Protobuf::Map{::String => ::String}) — Optional. The labels to associate with this job.
Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
Label values must be between 1 and 63 characters long, and must conform to the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
No more than 32 labels can be associated with a given job.
#labels=
def labels=(value) -> ::Google::Protobuf::Map{::String => ::String}
-
value (::Google::Protobuf::Map{::String => ::String}) — Optional. The labels to associate with this job.
Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
Label values must be between 1 and 63 characters long, and must conform to the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
No more than 32 labels can be associated with a given job.
-
(::Google::Protobuf::Map{::String => ::String}) — Optional. The labels to associate with this job.
Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
Label values must be between 1 and 63 characters long, and must conform to the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
No more than 32 labels can be associated with a given job.
#pig_job
def pig_job() -> ::Google::Cloud::Dataproc::V1beta2::PigJob
- (::Google::Cloud::Dataproc::V1beta2::PigJob) — Optional. Job is a Pig job.
#pig_job=
def pig_job=(value) -> ::Google::Cloud::Dataproc::V1beta2::PigJob
- value (::Google::Cloud::Dataproc::V1beta2::PigJob) — Optional. Job is a Pig job.
- (::Google::Cloud::Dataproc::V1beta2::PigJob) — Optional. Job is a Pig job.
#prerequisite_step_ids
def prerequisite_step_ids() -> ::Array<::String>
- (::Array<::String>) — Optional. The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
#prerequisite_step_ids=
def prerequisite_step_ids=(value) -> ::Array<::String>
- value (::Array<::String>) — Optional. The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
- (::Array<::String>) — Optional. The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
#presto_job
def presto_job() -> ::Google::Cloud::Dataproc::V1beta2::PrestoJob
- (::Google::Cloud::Dataproc::V1beta2::PrestoJob) — Optional. Job is a Presto job.
#presto_job=
def presto_job=(value) -> ::Google::Cloud::Dataproc::V1beta2::PrestoJob
- value (::Google::Cloud::Dataproc::V1beta2::PrestoJob) — Optional. Job is a Presto job.
- (::Google::Cloud::Dataproc::V1beta2::PrestoJob) — Optional. Job is a Presto job.
#pyspark_job
def pyspark_job() -> ::Google::Cloud::Dataproc::V1beta2::PySparkJob
- (::Google::Cloud::Dataproc::V1beta2::PySparkJob) — Optional. Job is a PySpark job.
#pyspark_job=
def pyspark_job=(value) -> ::Google::Cloud::Dataproc::V1beta2::PySparkJob
- value (::Google::Cloud::Dataproc::V1beta2::PySparkJob) — Optional. Job is a PySpark job.
- (::Google::Cloud::Dataproc::V1beta2::PySparkJob) — Optional. Job is a PySpark job.
#scheduling
def scheduling() -> ::Google::Cloud::Dataproc::V1beta2::JobScheduling
- (::Google::Cloud::Dataproc::V1beta2::JobScheduling) — Optional. Job scheduling configuration.
#scheduling=
def scheduling=(value) -> ::Google::Cloud::Dataproc::V1beta2::JobScheduling
- value (::Google::Cloud::Dataproc::V1beta2::JobScheduling) — Optional. Job scheduling configuration.
- (::Google::Cloud::Dataproc::V1beta2::JobScheduling) — Optional. Job scheduling configuration.
#spark_job
def spark_job() -> ::Google::Cloud::Dataproc::V1beta2::SparkJob
- (::Google::Cloud::Dataproc::V1beta2::SparkJob) — Optional. Job is a Spark job.
#spark_job=
def spark_job=(value) -> ::Google::Cloud::Dataproc::V1beta2::SparkJob
- value (::Google::Cloud::Dataproc::V1beta2::SparkJob) — Optional. Job is a Spark job.
- (::Google::Cloud::Dataproc::V1beta2::SparkJob) — Optional. Job is a Spark job.
#spark_r_job
def spark_r_job() -> ::Google::Cloud::Dataproc::V1beta2::SparkRJob
- (::Google::Cloud::Dataproc::V1beta2::SparkRJob) — Optional. Job is a SparkR job.
#spark_r_job=
def spark_r_job=(value) -> ::Google::Cloud::Dataproc::V1beta2::SparkRJob
- value (::Google::Cloud::Dataproc::V1beta2::SparkRJob) — Optional. Job is a SparkR job.
- (::Google::Cloud::Dataproc::V1beta2::SparkRJob) — Optional. Job is a SparkR job.
#spark_sql_job
def spark_sql_job() -> ::Google::Cloud::Dataproc::V1beta2::SparkSqlJob
- (::Google::Cloud::Dataproc::V1beta2::SparkSqlJob) — Optional. Job is a SparkSql job.
#spark_sql_job=
def spark_sql_job=(value) -> ::Google::Cloud::Dataproc::V1beta2::SparkSqlJob
- value (::Google::Cloud::Dataproc::V1beta2::SparkSqlJob) — Optional. Job is a SparkSql job.
- (::Google::Cloud::Dataproc::V1beta2::SparkSqlJob) — Optional. Job is a SparkSql job.
#step_id
def step_id() -> ::String
-
(::String) — Required. The step id. The id must be unique among all jobs
within the template.
The step id is used as prefix for job id, as job
goog-dataproc-workflow-step-id
label, and in prerequisiteStepIds field from other steps.The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
#step_id=
def step_id=(value) -> ::String
-
value (::String) — Required. The step id. The id must be unique among all jobs
within the template.
The step id is used as prefix for job id, as job
goog-dataproc-workflow-step-id
label, and in prerequisiteStepIds field from other steps.The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
-
(::String) — Required. The step id. The id must be unique among all jobs
within the template.
The step id is used as prefix for job id, as job
goog-dataproc-workflow-step-id
label, and in prerequisiteStepIds field from other steps.The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.