Class PySparkJob (4.36.0)

public final class PySparkJob extends GeneratedMessageV3 implements PySparkJobOrBuilder

A Dataproc job for running Apache PySpark applications on YARN.

Protobuf type google.cloud.dataproc.v1.PySparkJob

Implements

PySparkJobOrBuilder

Static Fields

ARCHIVE_URIS_FIELD_NUMBER

public static final int ARCHIVE_URIS_FIELD_NUMBER
Field Value
TypeDescription
int

ARGS_FIELD_NUMBER

public static final int ARGS_FIELD_NUMBER
Field Value
TypeDescription
int

FILE_URIS_FIELD_NUMBER

public static final int FILE_URIS_FIELD_NUMBER
Field Value
TypeDescription
int

JAR_FILE_URIS_FIELD_NUMBER

public static final int JAR_FILE_URIS_FIELD_NUMBER
Field Value
TypeDescription
int

LOGGING_CONFIG_FIELD_NUMBER

public static final int LOGGING_CONFIG_FIELD_NUMBER
Field Value
TypeDescription
int

MAIN_PYTHON_FILE_URI_FIELD_NUMBER

public static final int MAIN_PYTHON_FILE_URI_FIELD_NUMBER
Field Value
TypeDescription
int

PROPERTIES_FIELD_NUMBER

public static final int PROPERTIES_FIELD_NUMBER
Field Value
TypeDescription
int

PYTHON_FILE_URIS_FIELD_NUMBER

public static final int PYTHON_FILE_URIS_FIELD_NUMBER
Field Value
TypeDescription
int

Static Methods

getDefaultInstance()

public static PySparkJob getDefaultInstance()
Returns
TypeDescription
PySparkJob

getDescriptor()

public static final Descriptors.Descriptor getDescriptor()
Returns
TypeDescription
Descriptor

newBuilder()

public static PySparkJob.Builder newBuilder()
Returns
TypeDescription
PySparkJob.Builder

newBuilder(PySparkJob prototype)

public static PySparkJob.Builder newBuilder(PySparkJob prototype)
Parameter
NameDescription
prototypePySparkJob
Returns
TypeDescription
PySparkJob.Builder

parseDelimitedFrom(InputStream input)

public static PySparkJob parseDelimitedFrom(InputStream input)
Parameter
NameDescription
inputInputStream
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
IOException

parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)

public static PySparkJob parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
NameDescription
inputInputStream
extensionRegistryExtensionRegistryLite
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
IOException

parseFrom(byte[] data)

public static PySparkJob parseFrom(byte[] data)
Parameter
NameDescription
databyte[]
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
InvalidProtocolBufferException

parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)

public static PySparkJob parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
Parameters
NameDescription
databyte[]
extensionRegistryExtensionRegistryLite
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
InvalidProtocolBufferException

parseFrom(ByteString data)

public static PySparkJob parseFrom(ByteString data)
Parameter
NameDescription
dataByteString
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
InvalidProtocolBufferException

parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)

public static PySparkJob parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
Parameters
NameDescription
dataByteString
extensionRegistryExtensionRegistryLite
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
InvalidProtocolBufferException

parseFrom(CodedInputStream input)

public static PySparkJob parseFrom(CodedInputStream input)
Parameter
NameDescription
inputCodedInputStream
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
IOException

parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)

public static PySparkJob parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
NameDescription
inputCodedInputStream
extensionRegistryExtensionRegistryLite
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
IOException

parseFrom(InputStream input)

public static PySparkJob parseFrom(InputStream input)
Parameter
NameDescription
inputInputStream
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
IOException

parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)

public static PySparkJob parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
NameDescription
inputInputStream
extensionRegistryExtensionRegistryLite
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
IOException

parseFrom(ByteBuffer data)

public static PySparkJob parseFrom(ByteBuffer data)
Parameter
NameDescription
dataByteBuffer
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
InvalidProtocolBufferException

parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)

public static PySparkJob parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
Parameters
NameDescription
dataByteBuffer
extensionRegistryExtensionRegistryLite
Returns
TypeDescription
PySparkJob
Exceptions
TypeDescription
InvalidProtocolBufferException

parser()

public static Parser<PySparkJob> parser()
Returns
TypeDescription
Parser<PySparkJob>

Methods

containsProperties(String key)

public boolean containsProperties(String key)

Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
keyString
Returns
TypeDescription
boolean

equals(Object obj)

public boolean equals(Object obj)
Parameter
NameDescription
objObject
Returns
TypeDescription
boolean
Overrides

getArchiveUris(int index)

public String getArchiveUris(int index)

Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.

repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
indexint

The index of the element to return.

Returns
TypeDescription
String

The archiveUris at the given index.

getArchiveUrisBytes(int index)

public ByteString getArchiveUrisBytes(int index)

Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.

repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
indexint

The index of the value to return.

Returns
TypeDescription
ByteString

The bytes of the archiveUris at the given index.

getArchiveUrisCount()

public int getArchiveUrisCount()

Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.

repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
int

The count of archiveUris.

getArchiveUrisList()

public ProtocolStringList getArchiveUrisList()

Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.

repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
ProtocolStringList

A list containing the archiveUris.

getArgs(int index)

public String getArgs(int index)

Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
indexint

The index of the element to return.

Returns
TypeDescription
String

The args at the given index.

getArgsBytes(int index)

public ByteString getArgsBytes(int index)

Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
indexint

The index of the value to return.

Returns
TypeDescription
ByteString

The bytes of the args at the given index.

getArgsCount()

public int getArgsCount()

Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
int

The count of args.

getArgsList()

public ProtocolStringList getArgsList()

Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
ProtocolStringList

A list containing the args.

getDefaultInstanceForType()

public PySparkJob getDefaultInstanceForType()
Returns
TypeDescription
PySparkJob

getFileUris(int index)

public String getFileUris(int index)

Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.

repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
indexint

The index of the element to return.

Returns
TypeDescription
String

The fileUris at the given index.

getFileUrisBytes(int index)

public ByteString getFileUrisBytes(int index)

Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.

repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
indexint

The index of the value to return.

Returns
TypeDescription
ByteString

The bytes of the fileUris at the given index.

getFileUrisCount()

public int getFileUrisCount()

Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.

repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
int

The count of fileUris.

getFileUrisList()

public ProtocolStringList getFileUrisList()

Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.

repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
ProtocolStringList

A list containing the fileUris.

getJarFileUris(int index)

public String getJarFileUris(int index)

Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.

repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
indexint

The index of the element to return.

Returns
TypeDescription
String

The jarFileUris at the given index.

getJarFileUrisBytes(int index)

public ByteString getJarFileUrisBytes(int index)

Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.

repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
indexint

The index of the value to return.

Returns
TypeDescription
ByteString

The bytes of the jarFileUris at the given index.

getJarFileUrisCount()

public int getJarFileUrisCount()

Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.

repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
int

The count of jarFileUris.

getJarFileUrisList()

public ProtocolStringList getJarFileUrisList()

Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.

repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
ProtocolStringList

A list containing the jarFileUris.

getLoggingConfig()

public LoggingConfig getLoggingConfig()

Optional. The runtime log config for job execution.

.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
LoggingConfig

The loggingConfig.

getLoggingConfigOrBuilder()

public LoggingConfigOrBuilder getLoggingConfigOrBuilder()

Optional. The runtime log config for job execution.

.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
LoggingConfigOrBuilder

getMainPythonFileUri()

public String getMainPythonFileUri()

Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.

string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];

Returns
TypeDescription
String

The mainPythonFileUri.

getMainPythonFileUriBytes()

public ByteString getMainPythonFileUriBytes()

Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.

string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];

Returns
TypeDescription
ByteString

The bytes for mainPythonFileUri.

getParserForType()

public Parser<PySparkJob> getParserForType()
Returns
TypeDescription
Parser<PySparkJob>
Overrides

getProperties() (deprecated)

public Map<String,String> getProperties()

Use #getPropertiesMap() instead.

Returns
TypeDescription
Map<String,String>

getPropertiesCount()

public int getPropertiesCount()

Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
int

getPropertiesMap()

public Map<String,String> getPropertiesMap()

Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
Map<String,String>

getPropertiesOrDefault(String key, String defaultValue)

public String getPropertiesOrDefault(String key, String defaultValue)

Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];

Parameters
NameDescription
keyString
defaultValueString
Returns
TypeDescription
String

getPropertiesOrThrow(String key)

public String getPropertiesOrThrow(String key)

Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
keyString
Returns
TypeDescription
String

getPythonFileUris(int index)

public String getPythonFileUris(int index)

Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
indexint

The index of the element to return.

Returns
TypeDescription
String

The pythonFileUris at the given index.

getPythonFileUrisBytes(int index)

public ByteString getPythonFileUrisBytes(int index)

Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];

Parameter
NameDescription
indexint

The index of the value to return.

Returns
TypeDescription
ByteString

The bytes of the pythonFileUris at the given index.

getPythonFileUrisCount()

public int getPythonFileUrisCount()

Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
int

The count of pythonFileUris.

getPythonFileUrisList()

public ProtocolStringList getPythonFileUrisList()

Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
ProtocolStringList

A list containing the pythonFileUris.

getSerializedSize()

public int getSerializedSize()
Returns
TypeDescription
int
Overrides

hasLoggingConfig()

public boolean hasLoggingConfig()

Optional. The runtime log config for job execution.

.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];

Returns
TypeDescription
boolean

Whether the loggingConfig field is set.

hashCode()

public int hashCode()
Returns
TypeDescription
int
Overrides

internalGetFieldAccessorTable()

protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
Returns
TypeDescription
FieldAccessorTable
Overrides

internalGetMapFieldReflection(int number)

protected MapFieldReflectionAccessor internalGetMapFieldReflection(int number)
Parameter
NameDescription
numberint
Returns
TypeDescription
com.google.protobuf.MapFieldReflectionAccessor
Overrides
com.google.protobuf.GeneratedMessageV3.internalGetMapFieldReflection(int)

isInitialized()

public final boolean isInitialized()
Returns
TypeDescription
boolean
Overrides

newBuilderForType()

public PySparkJob.Builder newBuilderForType()
Returns
TypeDescription
PySparkJob.Builder

newBuilderForType(GeneratedMessageV3.BuilderParent parent)

protected PySparkJob.Builder newBuilderForType(GeneratedMessageV3.BuilderParent parent)
Parameter
NameDescription
parentBuilderParent
Returns
TypeDescription
PySparkJob.Builder
Overrides

newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)

protected Object newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)
Parameter
NameDescription
unusedUnusedPrivateParameter
Returns
TypeDescription
Object
Overrides

toBuilder()

public PySparkJob.Builder toBuilder()
Returns
TypeDescription
PySparkJob.Builder

writeTo(CodedOutputStream output)

public void writeTo(CodedOutputStream output)
Parameter
NameDescription
outputCodedOutputStream
Overrides
Exceptions
TypeDescription
IOException