public final class DataSourceDefinition extends GeneratedMessageV3 implements DataSourceDefinitionOrBuilder
Represents the data source definition.
Protobuf type google.cloud.bigquery.datatransfer.v1.DataSourceDefinition
Fields
DATA_SOURCE_FIELD_NUMBER
public static final int DATA_SOURCE_FIELD_NUMBER
Field Value
DISABLED_FIELD_NUMBER
public static final int DISABLED_FIELD_NUMBER
Field Value
NAME_FIELD_NUMBER
public static final int NAME_FIELD_NUMBER
Field Value
RUN_TIME_OFFSET_FIELD_NUMBER
public static final int RUN_TIME_OFFSET_FIELD_NUMBER
Field Value
SERVICE_ACCOUNT_FIELD_NUMBER
public static final int SERVICE_ACCOUNT_FIELD_NUMBER
Field Value
SUPPORTED_LOCATION_IDS_FIELD_NUMBER
public static final int SUPPORTED_LOCATION_IDS_FIELD_NUMBER
Field Value
SUPPORT_EMAIL_FIELD_NUMBER
public static final int SUPPORT_EMAIL_FIELD_NUMBER
Field Value
TRANSFER_CONFIG_PUBSUB_TOPIC_FIELD_NUMBER
public static final int TRANSFER_CONFIG_PUBSUB_TOPIC_FIELD_NUMBER
Field Value
TRANSFER_RUN_PUBSUB_TOPIC_FIELD_NUMBER
public static final int TRANSFER_RUN_PUBSUB_TOPIC_FIELD_NUMBER
Field Value
Methods
equals(Object obj)
public boolean equals(Object obj)
Parameter
Returns
Overrides
getDataSource()
public DataSource getDataSource()
Data source metadata.
.google.cloud.bigquery.datatransfer.v1.DataSource data_source = 1;
Returns
getDataSourceOrBuilder()
public DataSourceOrBuilder getDataSourceOrBuilder()
Data source metadata.
.google.cloud.bigquery.datatransfer.v1.DataSource data_source = 1;
Returns
getDefaultInstance()
public static DataSourceDefinition getDefaultInstance()
Returns
getDefaultInstanceForType()
public DataSourceDefinition getDefaultInstanceForType()
Returns
getDescriptor()
public static final Descriptors.Descriptor getDescriptor()
Returns
getDisabled()
public boolean getDisabled()
Is data source disabled? If true, data_source is not visible.
API will also stop returning any data transfer configs and/or runs
associated with the data source. This setting has higher priority
than whitelisted_project_ids.
bool disabled = 5;
Returns
getName()
The resource name of the data source definition.
Data source definition names have the form
projects/{project_id}/locations/{location}/dataSourceDefinitions/{data_source_id}
.
string name = 21;
Returns
getNameBytes()
public ByteString getNameBytes()
The resource name of the data source definition.
Data source definition names have the form
projects/{project_id}/locations/{location}/dataSourceDefinitions/{data_source_id}
.
string name = 21;
Returns
getParserForType()
public Parser<DataSourceDefinition> getParserForType()
Returns
Overrides
getRunTimeOffset()
public Duration getRunTimeOffset()
Duration which should be added to schedule_time to calculate
run_time when job is scheduled. Only applicable for automatically
scheduled transfer runs. Used to start a run early on a data source that
supports continuous data refresh to compensate for unknown timezone
offsets. Use a negative number to start a run late for data sources not
supporting continuous data refresh.
.google.protobuf.Duration run_time_offset = 16;
Returns
getRunTimeOffsetOrBuilder()
public DurationOrBuilder getRunTimeOffsetOrBuilder()
Duration which should be added to schedule_time to calculate
run_time when job is scheduled. Only applicable for automatically
scheduled transfer runs. Used to start a run early on a data source that
supports continuous data refresh to compensate for unknown timezone
offsets. Use a negative number to start a run late for data sources not
supporting continuous data refresh.
.google.protobuf.Duration run_time_offset = 16;
Returns
getSerializedSize()
public int getSerializedSize()
Returns
Overrides
getServiceAccount()
public String getServiceAccount()
When service account is specified, BigQuery will share created dataset
with the given service account. Also, this service account will be
eligible to perform status updates and message logging for data transfer
runs for the corresponding data_source_id.
string service_account = 2;
Returns
getServiceAccountBytes()
public ByteString getServiceAccountBytes()
When service account is specified, BigQuery will share created dataset
with the given service account. Also, this service account will be
eligible to perform status updates and message logging for data transfer
runs for the corresponding data_source_id.
string service_account = 2;
Returns
getSupportEmail()
public String getSupportEmail()
Support e-mail address of the OAuth client's Brand, which contains the
consent screen data.
string support_email = 22;
Returns
getSupportEmailBytes()
public ByteString getSupportEmailBytes()
Support e-mail address of the OAuth client's Brand, which contains the
consent screen data.
string support_email = 22;
Returns
getSupportedLocationIds(int index)
public String getSupportedLocationIds(int index)
Supported location_ids used for deciding in which locations Pub/Sub topics
need to be created. If custom Pub/Sub topics are used and they contains
'{location}', the location_ids will be used for validating the topics by
replacing the '{location}' with the individual location in the list. The
valid values are the "location_id" field of the response of GET
https://bigquerydatatransfer.googleapis.com/v1/{name=projects/*}/locations
In addition, if the data source needs to support all available regions,
supported_location_ids can be set to "global" (a single string element).
When "global" is specified:
1) the data source implementation is supposed to stage the data in proper
region of the destination dataset;
2) Data source developer should be aware of the implications (e.g., network
traffic latency, potential charge associated with cross-region traffic,
etc.) of supporting the "global" region;
repeated string supported_location_ids = 23;
Parameter
Returns
getSupportedLocationIdsBytes(int index)
public ByteString getSupportedLocationIdsBytes(int index)
Supported location_ids used for deciding in which locations Pub/Sub topics
need to be created. If custom Pub/Sub topics are used and they contains
'{location}', the location_ids will be used for validating the topics by
replacing the '{location}' with the individual location in the list. The
valid values are the "location_id" field of the response of GET
https://bigquerydatatransfer.googleapis.com/v1/{name=projects/*}/locations
In addition, if the data source needs to support all available regions,
supported_location_ids can be set to "global" (a single string element).
When "global" is specified:
1) the data source implementation is supposed to stage the data in proper
region of the destination dataset;
2) Data source developer should be aware of the implications (e.g., network
traffic latency, potential charge associated with cross-region traffic,
etc.) of supporting the "global" region;
repeated string supported_location_ids = 23;
Parameter
Returns
getSupportedLocationIdsCount()
public int getSupportedLocationIdsCount()
Supported location_ids used for deciding in which locations Pub/Sub topics
need to be created. If custom Pub/Sub topics are used and they contains
'{location}', the location_ids will be used for validating the topics by
replacing the '{location}' with the individual location in the list. The
valid values are the "location_id" field of the response of GET
https://bigquerydatatransfer.googleapis.com/v1/{name=projects/*}/locations
In addition, if the data source needs to support all available regions,
supported_location_ids can be set to "global" (a single string element).
When "global" is specified:
1) the data source implementation is supposed to stage the data in proper
region of the destination dataset;
2) Data source developer should be aware of the implications (e.g., network
traffic latency, potential charge associated with cross-region traffic,
etc.) of supporting the "global" region;
repeated string supported_location_ids = 23;
Returns
getSupportedLocationIdsList()
public ProtocolStringList getSupportedLocationIdsList()
Supported location_ids used for deciding in which locations Pub/Sub topics
need to be created. If custom Pub/Sub topics are used and they contains
'{location}', the location_ids will be used for validating the topics by
replacing the '{location}' with the individual location in the list. The
valid values are the "location_id" field of the response of GET
https://bigquerydatatransfer.googleapis.com/v1/{name=projects/*}/locations
In addition, if the data source needs to support all available regions,
supported_location_ids can be set to "global" (a single string element).
When "global" is specified:
1) the data source implementation is supposed to stage the data in proper
region of the destination dataset;
2) Data source developer should be aware of the implications (e.g., network
traffic latency, potential charge associated with cross-region traffic,
etc.) of supporting the "global" region;
repeated string supported_location_ids = 23;
Returns
getTransferConfigPubsubTopic()
public String getTransferConfigPubsubTopic()
The Pub/Sub topic to use for broadcasting a message for transfer config. If
empty, a message will not be broadcasted. Both this topic and
transfer_run_pubsub_topic are auto-generated if none of them is provided
when creating the definition. It is recommended to provide
transfer_config_pubsub_topic if a user-owned transfer_run_pubsub_topic is
provided. Otherwise, it will be set to empty. If "{location}" is found in
the value, then that means, data source wants to handle message separately
for datasets in different regions. We will replace {location} with the
actual dataset location, as the actual topic name. For example,
projects/connector/topics/scheduler-{location} could become
projects/connector/topics/scheduler-us. If "{location}" is not found, then
we will use the input value as topic name.
string transfer_config_pubsub_topic = 12;
Returns
getTransferConfigPubsubTopicBytes()
public ByteString getTransferConfigPubsubTopicBytes()
The Pub/Sub topic to use for broadcasting a message for transfer config. If
empty, a message will not be broadcasted. Both this topic and
transfer_run_pubsub_topic are auto-generated if none of them is provided
when creating the definition. It is recommended to provide
transfer_config_pubsub_topic if a user-owned transfer_run_pubsub_topic is
provided. Otherwise, it will be set to empty. If "{location}" is found in
the value, then that means, data source wants to handle message separately
for datasets in different regions. We will replace {location} with the
actual dataset location, as the actual topic name. For example,
projects/connector/topics/scheduler-{location} could become
projects/connector/topics/scheduler-us. If "{location}" is not found, then
we will use the input value as topic name.
string transfer_config_pubsub_topic = 12;
Returns
getTransferRunPubsubTopic()
public String getTransferRunPubsubTopic()
The Pub/Sub topic to be used for broadcasting a message when a transfer run
is created. Both this topic and transfer_config_pubsub_topic can be
set to a custom topic. By default, both topics are auto-generated if none
of them is provided when creating the definition. However, if one topic is
manually set, the other topic has to be manually set as well. The only
difference is that transfer_run_pubsub_topic must be a non-empty Pub/Sub
topic, but transfer_config_pubsub_topic can be set to empty. The comments
about "{location}" for transfer_config_pubsub_topic apply here too.
string transfer_run_pubsub_topic = 13;
Returns
getTransferRunPubsubTopicBytes()
public ByteString getTransferRunPubsubTopicBytes()
The Pub/Sub topic to be used for broadcasting a message when a transfer run
is created. Both this topic and transfer_config_pubsub_topic can be
set to a custom topic. By default, both topics are auto-generated if none
of them is provided when creating the definition. However, if one topic is
manually set, the other topic has to be manually set as well. The only
difference is that transfer_run_pubsub_topic must be a non-empty Pub/Sub
topic, but transfer_config_pubsub_topic can be set to empty. The comments
about "{location}" for transfer_config_pubsub_topic apply here too.
string transfer_run_pubsub_topic = 13;
Returns
getUnknownFields()
public final UnknownFieldSet getUnknownFields()
Returns
Overrides
hasDataSource()
public boolean hasDataSource()
Data source metadata.
.google.cloud.bigquery.datatransfer.v1.DataSource data_source = 1;
Returns
hasRunTimeOffset()
public boolean hasRunTimeOffset()
Duration which should be added to schedule_time to calculate
run_time when job is scheduled. Only applicable for automatically
scheduled transfer runs. Used to start a run early on a data source that
supports continuous data refresh to compensate for unknown timezone
offsets. Use a negative number to start a run late for data sources not
supporting continuous data refresh.
.google.protobuf.Duration run_time_offset = 16;
Returns
hashCode()
Returns
Overrides
internalGetFieldAccessorTable()
protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
Returns
Overrides
isInitialized()
public final boolean isInitialized()
Returns
Overrides
newBuilder()
public static DataSourceDefinition.Builder newBuilder()
Returns
newBuilder(DataSourceDefinition prototype)
public static DataSourceDefinition.Builder newBuilder(DataSourceDefinition prototype)
Parameter
Returns
newBuilderForType()
public DataSourceDefinition.Builder newBuilderForType()
Returns
newBuilderForType(GeneratedMessageV3.BuilderParent parent)
protected DataSourceDefinition.Builder newBuilderForType(GeneratedMessageV3.BuilderParent parent)
Parameter
Returns
Overrides
public static DataSourceDefinition parseDelimitedFrom(InputStream input)
Parameter
Returns
Exceptions
public static DataSourceDefinition parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
parseFrom(byte[] data)
public static DataSourceDefinition parseFrom(byte[] data)
Parameter
Name | Description |
data | byte[]
|
Returns
Exceptions
parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
public static DataSourceDefinition parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
parseFrom(ByteString data)
public static DataSourceDefinition parseFrom(ByteString data)
Parameter
Returns
Exceptions
parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
public static DataSourceDefinition parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
public static DataSourceDefinition parseFrom(CodedInputStream input)
Parameter
Returns
Exceptions
public static DataSourceDefinition parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
public static DataSourceDefinition parseFrom(InputStream input)
Parameter
Returns
Exceptions
public static DataSourceDefinition parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
parseFrom(ByteBuffer data)
public static DataSourceDefinition parseFrom(ByteBuffer data)
Parameter
Returns
Exceptions
parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
public static DataSourceDefinition parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
Parameters
Returns
Exceptions
parser()
public static Parser<DataSourceDefinition> parser()
Returns
toBuilder()
public DataSourceDefinition.Builder toBuilder()
Returns
writeTo(CodedOutputStream output)
public void writeTo(CodedOutputStream output)
Parameter
Overrides
Exceptions