public final class SparkSqlJob extends GeneratedMessageV3 implements SparkSqlJobOrBuilder
A Dataproc job for running Apache Spark
SQL queries.
Protobuf type google.cloud.dataproc.v1.SparkSqlJob
Static Fields
JAR_FILE_URIS_FIELD_NUMBER
public static final int JAR_FILE_URIS_FIELD_NUMBER
Field Value |
---|
Type | Description |
int | |
LOGGING_CONFIG_FIELD_NUMBER
public static final int LOGGING_CONFIG_FIELD_NUMBER
Field Value |
---|
Type | Description |
int | |
PROPERTIES_FIELD_NUMBER
public static final int PROPERTIES_FIELD_NUMBER
Field Value |
---|
Type | Description |
int | |
QUERY_FILE_URI_FIELD_NUMBER
public static final int QUERY_FILE_URI_FIELD_NUMBER
Field Value |
---|
Type | Description |
int | |
QUERY_LIST_FIELD_NUMBER
public static final int QUERY_LIST_FIELD_NUMBER
Field Value |
---|
Type | Description |
int | |
SCRIPT_VARIABLES_FIELD_NUMBER
public static final int SCRIPT_VARIABLES_FIELD_NUMBER
Field Value |
---|
Type | Description |
int | |
Static Methods
getDefaultInstance()
public static SparkSqlJob getDefaultInstance()
getDescriptor()
public static final Descriptors.Descriptor getDescriptor()
newBuilder()
public static SparkSqlJob.Builder newBuilder()
newBuilder(SparkSqlJob prototype)
public static SparkSqlJob.Builder newBuilder(SparkSqlJob prototype)
public static SparkSqlJob parseDelimitedFrom(InputStream input)
public static SparkSqlJob parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
parseFrom(byte[] data)
public static SparkSqlJob parseFrom(byte[] data)
Parameter |
---|
Name | Description |
data | byte[]
|
parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
public static SparkSqlJob parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
parseFrom(ByteString data)
public static SparkSqlJob parseFrom(ByteString data)
parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
public static SparkSqlJob parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
public static SparkSqlJob parseFrom(CodedInputStream input)
public static SparkSqlJob parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
public static SparkSqlJob parseFrom(InputStream input)
public static SparkSqlJob parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
parseFrom(ByteBuffer data)
public static SparkSqlJob parseFrom(ByteBuffer data)
parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
public static SparkSqlJob parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
parser()
public static Parser<SparkSqlJob> parser()
Methods
containsProperties(String key)
public boolean containsProperties(String key)
Optional. A mapping of property names to values, used to configure
Spark SQL's SparkConf. Properties that conflict with values set by the
Dataproc API may be overwritten.
map<string, string> properties = 4 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
---|
Name | Description |
key | String
|
containsScriptVariables(String key)
public boolean containsScriptVariables(String key)
Optional. Mapping of query variable names to values (equivalent to the
Spark SQL command: SET name="value";
).
map<string, string> script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
---|
Name | Description |
key | String
|
equals(Object obj)
public boolean equals(Object obj)
Parameter |
---|
Name | Description |
obj | Object
|
Overrides
getDefaultInstanceForType()
public SparkSqlJob getDefaultInstanceForType()
getJarFileUris(int index)
public String getJarFileUris(int index)
Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
---|
Name | Description |
index | int
The index of the element to return.
|
Returns |
---|
Type | Description |
String | The jarFileUris at the given index.
|
getJarFileUrisBytes(int index)
public ByteString getJarFileUrisBytes(int index)
Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
---|
Name | Description |
index | int
The index of the value to return.
|
Returns |
---|
Type | Description |
ByteString | The bytes of the jarFileUris at the given index.
|
getJarFileUrisCount()
public int getJarFileUrisCount()
Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];
Returns |
---|
Type | Description |
int | The count of jarFileUris.
|
getJarFileUrisList()
public ProtocolStringList getJarFileUrisList()
Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];
getLoggingConfig()
public LoggingConfig getLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
getLoggingConfigOrBuilder()
public LoggingConfigOrBuilder getLoggingConfigOrBuilder()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
getParserForType()
public Parser<SparkSqlJob> getParserForType()
Overrides
getProperties()
public Map<String,String> getProperties()
getPropertiesCount()
public int getPropertiesCount()
Optional. A mapping of property names to values, used to configure
Spark SQL's SparkConf. Properties that conflict with values set by the
Dataproc API may be overwritten.
map<string, string> properties = 4 [(.google.api.field_behavior) = OPTIONAL];
Returns |
---|
Type | Description |
int | |
getPropertiesMap()
public Map<String,String> getPropertiesMap()
Optional. A mapping of property names to values, used to configure
Spark SQL's SparkConf. Properties that conflict with values set by the
Dataproc API may be overwritten.
map<string, string> properties = 4 [(.google.api.field_behavior) = OPTIONAL];
getPropertiesOrDefault(String key, String defaultValue)
public String getPropertiesOrDefault(String key, String defaultValue)
Optional. A mapping of property names to values, used to configure
Spark SQL's SparkConf. Properties that conflict with values set by the
Dataproc API may be overwritten.
map<string, string> properties = 4 [(.google.api.field_behavior) = OPTIONAL];
getPropertiesOrThrow(String key)
public String getPropertiesOrThrow(String key)
Optional. A mapping of property names to values, used to configure
Spark SQL's SparkConf. Properties that conflict with values set by the
Dataproc API may be overwritten.
map<string, string> properties = 4 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
---|
Name | Description |
key | String
|
getQueriesCase()
public SparkSqlJob.QueriesCase getQueriesCase()
getQueryFileUri()
public String getQueryFileUri()
The HCFS URI of the script that contains SQL queries.
string query_file_uri = 1;
Returns |
---|
Type | Description |
String | The queryFileUri.
|
getQueryFileUriBytes()
public ByteString getQueryFileUriBytes()
The HCFS URI of the script that contains SQL queries.
string query_file_uri = 1;
Returns |
---|
Type | Description |
ByteString | The bytes for queryFileUri.
|
getQueryList()
public QueryList getQueryList()
A list of queries.
.google.cloud.dataproc.v1.QueryList query_list = 2;
Returns |
---|
Type | Description |
QueryList | The queryList.
|
getQueryListOrBuilder()
public QueryListOrBuilder getQueryListOrBuilder()
A list of queries.
.google.cloud.dataproc.v1.QueryList query_list = 2;
getScriptVariables()
public Map<String,String> getScriptVariables()
getScriptVariablesCount()
public int getScriptVariablesCount()
Optional. Mapping of query variable names to values (equivalent to the
Spark SQL command: SET name="value";
).
map<string, string> script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
Returns |
---|
Type | Description |
int | |
getScriptVariablesMap()
public Map<String,String> getScriptVariablesMap()
Optional. Mapping of query variable names to values (equivalent to the
Spark SQL command: SET name="value";
).
map<string, string> script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
getScriptVariablesOrDefault(String key, String defaultValue)
public String getScriptVariablesOrDefault(String key, String defaultValue)
Optional. Mapping of query variable names to values (equivalent to the
Spark SQL command: SET name="value";
).
map<string, string> script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
getScriptVariablesOrThrow(String key)
public String getScriptVariablesOrThrow(String key)
Optional. Mapping of query variable names to values (equivalent to the
Spark SQL command: SET name="value";
).
map<string, string> script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
Parameter |
---|
Name | Description |
key | String
|
getSerializedSize()
public int getSerializedSize()
Returns |
---|
Type | Description |
int | |
Overrides
getUnknownFields()
public final UnknownFieldSet getUnknownFields()
Overrides
hasLoggingConfig()
public boolean hasLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
Returns |
---|
Type | Description |
boolean | Whether the loggingConfig field is set.
|
hasQueryFileUri()
public boolean hasQueryFileUri()
The HCFS URI of the script that contains SQL queries.
string query_file_uri = 1;
Returns |
---|
Type | Description |
boolean | Whether the queryFileUri field is set.
|
hasQueryList()
public boolean hasQueryList()
A list of queries.
.google.cloud.dataproc.v1.QueryList query_list = 2;
Returns |
---|
Type | Description |
boolean | Whether the queryList field is set.
|
hashCode()
Returns |
---|
Type | Description |
int | |
Overrides
internalGetFieldAccessorTable()
protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
Overrides
internalGetMapField(int number)
protected MapField internalGetMapField(int number)
Parameter |
---|
Name | Description |
number | int
|
Overrides
isInitialized()
public final boolean isInitialized()
Overrides
newBuilderForType()
public SparkSqlJob.Builder newBuilderForType()
newBuilderForType(GeneratedMessageV3.BuilderParent parent)
protected SparkSqlJob.Builder newBuilderForType(GeneratedMessageV3.BuilderParent parent)
Overrides
newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)
protected Object newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)
Overrides
toBuilder()
public SparkSqlJob.Builder toBuilder()
writeTo(CodedOutputStream output)
public void writeTo(CodedOutputStream output)
Overrides