public final class PySparkJob extends GeneratedMessageV3 implements PySparkJobOrBuilder
A Dataproc job for running
Apache
PySpark
applications on YARN.
Protobuf type google.cloud.dataproc.v1.PySparkJob
Inherited Members
com.google.protobuf.GeneratedMessageV3.<ListT>makeMutableCopy(ListT)
com.google.protobuf.GeneratedMessageV3.<ListT>makeMutableCopy(ListT,int)
com.google.protobuf.GeneratedMessageV3.<T>emptyList(java.lang.Class<T>)
com.google.protobuf.GeneratedMessageV3.internalGetMapFieldReflection(int)
Static Fields
ARCHIVE_URIS_FIELD_NUMBER
public static final int ARCHIVE_URIS_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
ARGS_FIELD_NUMBER
public static final int ARGS_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
FILE_URIS_FIELD_NUMBER
public static final int FILE_URIS_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
JAR_FILE_URIS_FIELD_NUMBER
public static final int JAR_FILE_URIS_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
LOGGING_CONFIG_FIELD_NUMBER
public static final int LOGGING_CONFIG_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
MAIN_PYTHON_FILE_URI_FIELD_NUMBER
public static final int MAIN_PYTHON_FILE_URI_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
PROPERTIES_FIELD_NUMBER
public static final int PROPERTIES_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
PYTHON_FILE_URIS_FIELD_NUMBER
public static final int PYTHON_FILE_URIS_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
Static Methods
getDefaultInstance()
public static PySparkJob getDefaultInstance()
getDescriptor()
public static final Descriptors.Descriptor getDescriptor()
newBuilder()
public static PySparkJob.Builder newBuilder()
newBuilder(PySparkJob prototype)
public static PySparkJob.Builder newBuilder(PySparkJob prototype)
public static PySparkJob parseDelimitedFrom(InputStream input)
public static PySparkJob parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
parseFrom(byte[] data)
public static PySparkJob parseFrom(byte[] data)
Parameter |
Name |
Description |
data |
byte[]
|
parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
public static PySparkJob parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
parseFrom(ByteString data)
public static PySparkJob parseFrom(ByteString data)
parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
public static PySparkJob parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
public static PySparkJob parseFrom(CodedInputStream input)
public static PySparkJob parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
public static PySparkJob parseFrom(InputStream input)
public static PySparkJob parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
parseFrom(ByteBuffer data)
public static PySparkJob parseFrom(ByteBuffer data)
parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
public static PySparkJob parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
parser()
public static Parser<PySparkJob> parser()
Methods
containsProperties(String key)
public boolean containsProperties(String key)
Optional. A mapping of property names to values, used to configure PySpark.
Properties that conflict with values set by the Dataproc API might be
overwritten. Can include properties set in
/etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
key |
String
|
equals(Object obj)
public boolean equals(Object obj)
Parameter |
Name |
Description |
obj |
Object
|
Overrides
getArchiveUris(int index)
public String getArchiveUris(int index)
Optional. HCFS URIs of archives to be extracted into the working directory
of each executor. Supported file types:
.jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
index |
int
The index of the element to return.
|
Returns |
Type |
Description |
String |
The archiveUris at the given index.
|
getArchiveUrisBytes(int index)
public ByteString getArchiveUrisBytes(int index)
Optional. HCFS URIs of archives to be extracted into the working directory
of each executor. Supported file types:
.jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
index |
int
The index of the value to return.
|
Returns |
Type |
Description |
ByteString |
The bytes of the archiveUris at the given index.
|
getArchiveUrisCount()
public int getArchiveUrisCount()
Optional. HCFS URIs of archives to be extracted into the working directory
of each executor. Supported file types:
.jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
int |
The count of archiveUris.
|
getArchiveUrisList()
public ProtocolStringList getArchiveUrisList()
Optional. HCFS URIs of archives to be extracted into the working directory
of each executor. Supported file types:
.jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
getArgs(int index)
public String getArgs(int index)
Optional. The arguments to pass to the driver. Do not include arguments,
such as --conf
, that can be set as job properties, since a collision may
occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
index |
int
The index of the element to return.
|
Returns |
Type |
Description |
String |
The args at the given index.
|
getArgsBytes(int index)
public ByteString getArgsBytes(int index)
Optional. The arguments to pass to the driver. Do not include arguments,
such as --conf
, that can be set as job properties, since a collision may
occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
index |
int
The index of the value to return.
|
Returns |
Type |
Description |
ByteString |
The bytes of the args at the given index.
|
getArgsCount()
public int getArgsCount()
Optional. The arguments to pass to the driver. Do not include arguments,
such as --conf
, that can be set as job properties, since a collision may
occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
int |
The count of args.
|
getArgsList()
public ProtocolStringList getArgsList()
Optional. The arguments to pass to the driver. Do not include arguments,
such as --conf
, that can be set as job properties, since a collision may
occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
getDefaultInstanceForType()
public PySparkJob getDefaultInstanceForType()
getFileUris(int index)
public String getFileUris(int index)
Optional. HCFS URIs of files to be placed in the working directory of
each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
index |
int
The index of the element to return.
|
Returns |
Type |
Description |
String |
The fileUris at the given index.
|
getFileUrisBytes(int index)
public ByteString getFileUrisBytes(int index)
Optional. HCFS URIs of files to be placed in the working directory of
each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
index |
int
The index of the value to return.
|
Returns |
Type |
Description |
ByteString |
The bytes of the fileUris at the given index.
|
getFileUrisCount()
public int getFileUrisCount()
Optional. HCFS URIs of files to be placed in the working directory of
each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
int |
The count of fileUris.
|
getFileUrisList()
public ProtocolStringList getFileUrisList()
Optional. HCFS URIs of files to be placed in the working directory of
each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
getJarFileUris(int index)
public String getJarFileUris(int index)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
index |
int
The index of the element to return.
|
Returns |
Type |
Description |
String |
The jarFileUris at the given index.
|
getJarFileUrisBytes(int index)
public ByteString getJarFileUrisBytes(int index)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
index |
int
The index of the value to return.
|
Returns |
Type |
Description |
ByteString |
The bytes of the jarFileUris at the given index.
|
getJarFileUrisCount()
public int getJarFileUrisCount()
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
int |
The count of jarFileUris.
|
getJarFileUrisList()
public ProtocolStringList getJarFileUrisList()
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
getLoggingConfig()
public LoggingConfig getLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
getLoggingConfigOrBuilder()
public LoggingConfigOrBuilder getLoggingConfigOrBuilder()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
getMainPythonFileUri()
public String getMainPythonFileUri()
Required. The HCFS URI of the main Python file to use as the driver. Must
be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
Returns |
Type |
Description |
String |
The mainPythonFileUri.
|
getMainPythonFileUriBytes()
public ByteString getMainPythonFileUriBytes()
Required. The HCFS URI of the main Python file to use as the driver. Must
be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
Returns |
Type |
Description |
ByteString |
The bytes for mainPythonFileUri.
|
getParserForType()
public Parser<PySparkJob> getParserForType()
Overrides
getProperties() (deprecated)
public Map<String,String> getProperties()
getPropertiesCount()
public int getPropertiesCount()
Optional. A mapping of property names to values, used to configure PySpark.
Properties that conflict with values set by the Dataproc API might be
overwritten. Can include properties set in
/etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
int |
|
getPropertiesMap()
public Map<String,String> getPropertiesMap()
Optional. A mapping of property names to values, used to configure PySpark.
Properties that conflict with values set by the Dataproc API might be
overwritten. Can include properties set in
/etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
getPropertiesOrDefault(String key, String defaultValue)
public String getPropertiesOrDefault(String key, String defaultValue)
Optional. A mapping of property names to values, used to configure PySpark.
Properties that conflict with values set by the Dataproc API might be
overwritten. Can include properties set in
/etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
String |
|
getPropertiesOrThrow(String key)
public String getPropertiesOrThrow(String key)
Optional. A mapping of property names to values, used to configure PySpark.
Properties that conflict with values set by the Dataproc API might be
overwritten. Can include properties set in
/etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
key |
String
|
Returns |
Type |
Description |
String |
|
getPythonFileUris(int index)
public String getPythonFileUris(int index)
Optional. HCFS file URIs of Python files to pass to the PySpark
framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
index |
int
The index of the element to return.
|
Returns |
Type |
Description |
String |
The pythonFileUris at the given index.
|
getPythonFileUrisBytes(int index)
public ByteString getPythonFileUrisBytes(int index)
Optional. HCFS file URIs of Python files to pass to the PySpark
framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
Name |
Description |
index |
int
The index of the value to return.
|
Returns |
Type |
Description |
ByteString |
The bytes of the pythonFileUris at the given index.
|
getPythonFileUrisCount()
public int getPythonFileUrisCount()
Optional. HCFS file URIs of Python files to pass to the PySpark
framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
int |
The count of pythonFileUris.
|
getPythonFileUrisList()
public ProtocolStringList getPythonFileUrisList()
Optional. HCFS file URIs of Python files to pass to the PySpark
framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
getSerializedSize()
public int getSerializedSize()
Returns |
Type |
Description |
int |
|
Overrides
hasLoggingConfig()
public boolean hasLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
boolean |
Whether the loggingConfig field is set.
|
hashCode()
Returns |
Type |
Description |
int |
|
Overrides
internalGetFieldAccessorTable()
protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
Overrides
internalGetMapFieldReflection(int number)
protected MapFieldReflectionAccessor internalGetMapFieldReflection(int number)
Parameter |
Name |
Description |
number |
int
|
Returns |
Type |
Description |
com.google.protobuf.MapFieldReflectionAccessor |
|
Overrides
com.google.protobuf.GeneratedMessageV3.internalGetMapFieldReflection(int)
isInitialized()
public final boolean isInitialized()
Overrides
newBuilderForType()
public PySparkJob.Builder newBuilderForType()
newBuilderForType(GeneratedMessageV3.BuilderParent parent)
protected PySparkJob.Builder newBuilderForType(GeneratedMessageV3.BuilderParent parent)
Overrides
newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)
protected Object newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)
Returns |
Type |
Description |
Object |
|
Overrides
toBuilder()
public PySparkJob.Builder toBuilder()
writeTo(CodedOutputStream output)
public void writeTo(CodedOutputStream output)
Overrides