安排从 Amazon Redshift 到 BigQuery 的周期性加载作业。
深入探索
如需查看包含此代码示例的详细文档,请参阅以下内容:
代码示例
Java
试用此示例之前,请按照 BigQuery 快速入门:使用客户端库中的 Java 设置说明进行操作。如需了解详情,请参阅 BigQuery Java API 参考文档。
如需向 BigQuery 进行身份验证,请设置应用默认凭据。 如需了解详情,请参阅为客户端库设置身份验证。
import com.google.api.gax.rpc.ApiException;
import com.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest;
import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient;
import com.google.cloud.bigquery.datatransfer.v1.ProjectName;
import com.google.cloud.bigquery.datatransfer.v1.TransferConfig;
import com.google.protobuf.Struct;
import com.google.protobuf.Value;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
// Sample to create redshift transfer config
public class CreateRedshiftTransfer {
public static void main(String[] args) throws IOException {
// TODO(developer): Replace these variables before running the sample.
final String projectId = "MY_PROJECT_ID";
String datasetId = "MY_DATASET_ID";
String datasetRegion = "US";
String jdbcUrl = "MY_JDBC_URL_CONNECTION_REDSHIFT";
String dbUserName = "MY_USERNAME";
String dbPassword = "MY_PASSWORD";
String accessKeyId = "MY_AWS_ACCESS_KEY_ID";
String secretAccessId = "MY_AWS_SECRET_ACCESS_ID";
String s3Bucket = "MY_S3_BUCKET_URI";
String redShiftSchema = "MY_REDSHIFT_SCHEMA";
String tableNamePatterns = "*";
String vpcAndReserveIpRange = "MY_VPC_AND_IP_RANGE";
Map<String, Value> params = new HashMap<>();
params.put("jdbc_url", Value.newBuilder().setStringValue(jdbcUrl).build());
params.put("database_username", Value.newBuilder().setStringValue(dbUserName).build());
params.put("database_password", Value.newBuilder().setStringValue(dbPassword).build());
params.put("access_key_id", Value.newBuilder().setStringValue(accessKeyId).build());
params.put("secret_access_key", Value.newBuilder().setStringValue(secretAccessId).build());
params.put("s3_bucket", Value.newBuilder().setStringValue(s3Bucket).build());
params.put("redshift_schema", Value.newBuilder().setStringValue(redShiftSchema).build());
params.put("table_name_patterns", Value.newBuilder().setStringValue(tableNamePatterns).build());
params.put(
"migration_infra_cidr", Value.newBuilder().setStringValue(vpcAndReserveIpRange).build());
TransferConfig transferConfig =
TransferConfig.newBuilder()
.setDestinationDatasetId(datasetId)
.setDatasetRegion(datasetRegion)
.setDisplayName("Your Redshift Config Name")
.setDataSourceId("redshift")
.setParams(Struct.newBuilder().putAllFields(params).build())
.setSchedule("every 24 hours")
.build();
createRedshiftTransfer(projectId, transferConfig);
}
public static void createRedshiftTransfer(String projectId, TransferConfig transferConfig)
throws IOException {
try (DataTransferServiceClient client = DataTransferServiceClient.create()) {
ProjectName parent = ProjectName.of(projectId);
CreateTransferConfigRequest request =
CreateTransferConfigRequest.newBuilder()
.setParent(parent.toString())
.setTransferConfig(transferConfig)
.build();
TransferConfig config = client.createTransferConfig(request);
System.out.println("Cloud redshift transfer created successfully :" + config.getName());
} catch (ApiException ex) {
System.out.print("Cloud redshift transfer was not created." + ex.toString());
}
}
}
后续步骤
如需搜索和过滤其他 Google Cloud 产品的代码示例,请参阅 Google Cloud 示例浏览器。