Reference documentation and code samples for the Cloud Dataproc V1 API class Google::Cloud::Dataproc::V1::PySparkBatch.
A configuration for running an Apache PySpark batch workload.
Inherits
- Object
Extended By
- Google::Protobuf::MessageExts::ClassMethods
Includes
- Google::Protobuf::MessageExts
Methods
#archive_uris
def archive_uris() -> ::Array<::String>
Returns
-
(::Array<::String>) — Optional. HCFS URIs of archives to be extracted into the working directory
of each executor. Supported file types:
.jar
,.tar
,.tar.gz
,.tgz
, and.zip
.
#archive_uris=
def archive_uris=(value) -> ::Array<::String>
Parameter
-
value (::Array<::String>) — Optional. HCFS URIs of archives to be extracted into the working directory
of each executor. Supported file types:
.jar
,.tar
,.tar.gz
,.tgz
, and.zip
.
Returns
-
(::Array<::String>) — Optional. HCFS URIs of archives to be extracted into the working directory
of each executor. Supported file types:
.jar
,.tar
,.tar.gz
,.tgz
, and.zip
.
#args
def args() -> ::Array<::String>
Returns
-
(::Array<::String>) — Optional. The arguments to pass to the driver. Do not include arguments
that can be set as batch properties, such as
--conf
, since a collision can occur that causes an incorrect batch submission.
#args=
def args=(value) -> ::Array<::String>
Parameter
-
value (::Array<::String>) — Optional. The arguments to pass to the driver. Do not include arguments
that can be set as batch properties, such as
--conf
, since a collision can occur that causes an incorrect batch submission.
Returns
-
(::Array<::String>) — Optional. The arguments to pass to the driver. Do not include arguments
that can be set as batch properties, such as
--conf
, since a collision can occur that causes an incorrect batch submission.
#file_uris
def file_uris() -> ::Array<::String>
Returns
- (::Array<::String>) — Optional. HCFS URIs of files to be placed in the working directory of each executor.
#file_uris=
def file_uris=(value) -> ::Array<::String>
Parameter
- value (::Array<::String>) — Optional. HCFS URIs of files to be placed in the working directory of each executor.
Returns
- (::Array<::String>) — Optional. HCFS URIs of files to be placed in the working directory of each executor.
#jar_file_uris
def jar_file_uris() -> ::Array<::String>
Returns
- (::Array<::String>) — Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
#jar_file_uris=
def jar_file_uris=(value) -> ::Array<::String>
Parameter
- value (::Array<::String>) — Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
Returns
- (::Array<::String>) — Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
#main_python_file_uri
def main_python_file_uri() -> ::String
Returns
- (::String) — Required. The HCFS URI of the main Python file to use as the Spark driver. Must be a .py file.
#main_python_file_uri=
def main_python_file_uri=(value) -> ::String
Parameter
- value (::String) — Required. The HCFS URI of the main Python file to use as the Spark driver. Must be a .py file.
Returns
- (::String) — Required. The HCFS URI of the main Python file to use as the Spark driver. Must be a .py file.
#python_file_uris
def python_file_uris() -> ::Array<::String>
Returns
-
(::Array<::String>) — Optional. HCFS file URIs of Python files to pass to the PySpark
framework. Supported file types:
.py
,.egg
, and.zip
.
#python_file_uris=
def python_file_uris=(value) -> ::Array<::String>
Parameter
-
value (::Array<::String>) — Optional. HCFS file URIs of Python files to pass to the PySpark
framework. Supported file types:
.py
,.egg
, and.zip
.
Returns
-
(::Array<::String>) — Optional. HCFS file URIs of Python files to pass to the PySpark
framework. Supported file types:
.py
,.egg
, and.zip
.