public final class SparkSqlJob extends GeneratedMessageV3 implements SparkSqlJobOrBuilder
A Dataproc job for running Apache Spark
SQL queries.
Protobuf type google.cloud.dataproc.v1.SparkSqlJob
Fields
JAR_FILE_URIS_FIELD_NUMBER
public static final int JAR_FILE_URIS_FIELD_NUMBER
Field Value
LOGGING_CONFIG_FIELD_NUMBER
public static final int LOGGING_CONFIG_FIELD_NUMBER
Field Value
PROPERTIES_FIELD_NUMBER
public static final int PROPERTIES_FIELD_NUMBER
Field Value
QUERY_FILE_URI_FIELD_NUMBER
public static final int QUERY_FILE_URI_FIELD_NUMBER
Field Value
QUERY_LIST_FIELD_NUMBER
public static final int QUERY_LIST_FIELD_NUMBER
Field Value
SCRIPT_VARIABLES_FIELD_NUMBER
public static final int SCRIPT_VARIABLES_FIELD_NUMBER
Field Value
Methods
containsProperties(String key)
public boolean containsProperties(String key)
Optional. A mapping of property names to values, used to configure
Spark SQL's SparkConf. Properties that conflict with values set by the
Dataproc API may be overwritten.
map<string, string> properties = 4 [(.google.api.field_behavior) = OPTIONAL];
Parameter
Returns
containsScriptVariables(String key)
public boolean containsScriptVariables(String key)
Optional. Mapping of query variable names to values (equivalent to the
Spark SQL command: SET name="value";
).
map<string, string> script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
Parameter
Returns
equals(Object obj)
public boolean equals(Object obj)
Parameter
Returns
Overrides
getDefaultInstance()
public static SparkSqlJob getDefaultInstance()
Returns
getDefaultInstanceForType()
public SparkSqlJob getDefaultInstanceForType()
Returns
getDescriptor()
public static final Descriptors.Descriptor getDescriptor()
Returns
getJarFileUris(int index)
public String getJarFileUris(int index)
Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];
Parameter
Name | Description |
index | int
The index of the element to return.
|
Returns
Type | Description |
String | The jarFileUris at the given index.
|
getJarFileUrisBytes(int index)
public ByteString getJarFileUrisBytes(int index)
Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];
Parameter
Name | Description |
index | int
The index of the value to return.
|
Returns
Type | Description |
ByteString | The bytes of the jarFileUris at the given index.
|
getJarFileUrisCount()
public int getJarFileUrisCount()
Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];
Returns
Type | Description |
int | The count of jarFileUris.
|
getJarFileUrisList()
public ProtocolStringList getJarFileUrisList()
Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];
Returns
getLoggingConfig()
public LoggingConfig getLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
Returns
getLoggingConfigOrBuilder()
public LoggingConfigOrBuilder getLoggingConfigOrBuilder()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
Returns
getParserForType()
public Parser<SparkSqlJob> getParserForType()
Returns
Overrides
getProperties()
public Map<String,String> getProperties()
Returns
getPropertiesCount()
public int getPropertiesCount()
Optional. A mapping of property names to values, used to configure
Spark SQL's SparkConf. Properties that conflict with values set by the
Dataproc API may be overwritten.
map<string, string> properties = 4 [(.google.api.field_behavior) = OPTIONAL];
Returns
getPropertiesMap()
public Map<String,String> getPropertiesMap()
Optional. A mapping of property names to values, used to configure
Spark SQL's SparkConf. Properties that conflict with values set by the
Dataproc API may be overwritten.
map<string, string> properties = 4 [(.google.api.field_behavior) = OPTIONAL];
Returns
getPropertiesOrDefault(String key, String defaultValue)
public String getPropertiesOrDefault(String key, String defaultValue)
Optional. A mapping of property names to values, used to configure
Spark SQL's SparkConf. Properties that conflict with values set by the
Dataproc API may be overwritten.
map<string, string> properties = 4 [(.google.api.field_behavior) = OPTIONAL];
Parameters
Returns
getPropertiesOrThrow(String key)
public String getPropertiesOrThrow(String key)
Optional. A mapping of property names to values, used to configure
Spark SQL's SparkConf. Properties that conflict with values set by the
Dataproc API may be overwritten.
map<string, string> properties = 4 [(.google.api.field_behavior) = OPTIONAL];
Parameter
Returns
getQueriesCase()
public SparkSqlJob.QueriesCase getQueriesCase()
Returns
getQueryFileUri()
public String getQueryFileUri()
The HCFS URI of the script that contains SQL queries.
string query_file_uri = 1;
Returns
Type | Description |
String | The queryFileUri.
|
getQueryFileUriBytes()
public ByteString getQueryFileUriBytes()
The HCFS URI of the script that contains SQL queries.
string query_file_uri = 1;
Returns
Type | Description |
ByteString | The bytes for queryFileUri.
|
getQueryList()
public QueryList getQueryList()
A list of queries.
.google.cloud.dataproc.v1.QueryList query_list = 2;
Returns
getQueryListOrBuilder()
public QueryListOrBuilder getQueryListOrBuilder()
A list of queries.
.google.cloud.dataproc.v1.QueryList query_list = 2;
Returns
getScriptVariables()
public Map<String,String> getScriptVariables()
Returns
getScriptVariablesCount()
public int getScriptVariablesCount()
Optional. Mapping of query variable names to values (equivalent to the
Spark SQL command: SET name="value";
).
map<string, string> script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
Returns
getScriptVariablesMap()
public Map<String,String> getScriptVariablesMap()
Optional. Mapping of query variable names to values (equivalent to the
Spark SQL command: SET name="value";
).
map<string, string> script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
Returns
getScriptVariablesOrDefault(String key, String defaultValue)
public String getScriptVariablesOrDefault(String key, String defaultValue)
Optional. Mapping of query variable names to values (equivalent to the
Spark SQL command: SET name="value";
).
map<string, string> script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
Parameters
Returns
getScriptVariablesOrThrow(String key)
public String getScriptVariablesOrThrow(String key)
Optional. Mapping of query variable names to values (equivalent to the
Spark SQL command: SET name="value";
).
map<string, string> script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
Parameter
Returns
getSerializedSize()
public int getSerializedSize()
Returns
Overrides
getUnknownFields()
public final UnknownFieldSet getUnknownFields()
Returns
Overrides
hasLoggingConfig()
public boolean hasLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
Returns
Type | Description |
boolean | Whether the loggingConfig field is set.
|
hasQueryFileUri()
public boolean hasQueryFileUri()
The HCFS URI of the script that contains SQL queries.
string query_file_uri = 1;
Returns
Type | Description |
boolean | Whether the queryFileUri field is set.
|
hasQueryList()
public boolean hasQueryList()
A list of queries.
.google.cloud.dataproc.v1.QueryList query_list = 2;
Returns
Type | Description |
boolean | Whether the queryList field is set.
|
hashCode()
Returns
Overrides
internalGetFieldAccessorTable()
protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
Returns
Overrides
internalGetMapField(int number)
protected MapField internalGetMapField(int number)
Parameter
Returns
Overrides
isInitialized()
public final boolean isInitialized()
Returns
Overrides
newBuilder()
public static SparkSqlJob.Builder newBuilder()
Returns
newBuilder(SparkSqlJob prototype)
public static SparkSqlJob.Builder newBuilder(SparkSqlJob prototype)
Parameter
Returns
newBuilderForType()
public SparkSqlJob.Builder newBuilderForType()
Returns
newBuilderForType(GeneratedMessageV3.BuilderParent parent)
protected SparkSqlJob.Builder newBuilderForType(GeneratedMessageV3.BuilderParent parent)
Parameter
Returns
Overrides
newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)
protected Object newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)
Parameter
Returns
Overrides
public static SparkSqlJob parseDelimitedFrom(InputStream input)
Parameter
Returns
Exceptions
public static SparkSqlJob parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
parseFrom(byte[] data)
public static SparkSqlJob parseFrom(byte[] data)
Parameter
Name | Description |
data | byte[]
|
Returns
Exceptions
parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
public static SparkSqlJob parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
parseFrom(ByteString data)
public static SparkSqlJob parseFrom(ByteString data)
Parameter
Returns
Exceptions
parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
public static SparkSqlJob parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
public static SparkSqlJob parseFrom(CodedInputStream input)
Parameter
Returns
Exceptions
public static SparkSqlJob parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
public static SparkSqlJob parseFrom(InputStream input)
Parameter
Returns
Exceptions
public static SparkSqlJob parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
parseFrom(ByteBuffer data)
public static SparkSqlJob parseFrom(ByteBuffer data)
Parameter
Returns
Exceptions
parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
public static SparkSqlJob parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
parser()
public static Parser<SparkSqlJob> parser()
Returns
toBuilder()
public SparkSqlJob.Builder toBuilder()
Returns
writeTo(CodedOutputStream output)
public void writeTo(CodedOutputStream output)
Parameter
Overrides
Exceptions