範例:Java「Hello World」應用程式的 HBase API

這個範例是一個非常簡單的「hello world」應用程式,使用 Java 適用的 Cloud Bigtable HBase 用戶端程式庫,為您說明如何執行以下作業:

  • 連線至 Cloud Bigtable 執行個體。
  • 建立新的資料表。
  • 將資料寫入資料表。
  • 讀取資料。
  • 刪除資料表。

執行這個範例程式

這個範例使用 HBase API 與 Cloud Bigtable 進行通訊。您可以在 GitHub 存放區 GoogleCloudPlatform/cloud-bigtable-examplesjava/hello-world 目錄中找到本範例的程式碼。

如要執行這個範例程式,請依照 GitHub 網頁上的說明操作。

使用 HBase API

這個範例應用程式連接 Cloud Bigtable,展示部分簡易作業。

安裝與匯入用戶端程式庫

這個範例使用 Java 版 Cloud Bigtable HBase 用戶端以及 Maven。請參閱使用用戶端程式庫的操作說明

下面列出了這個範例所匯入的程式庫:

import com.google.cloud.bigtable.hbase.BigtableConfiguration;

import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

連接 Cloud BigTable

使用 BigtableConfiguration 類別連結 Cloud Bigtable。

// Create the Bigtable connection, use try-with-resources to make sure it gets closed
try (Connection connection = BigtableConfiguration.connect(projectId, instanceId)) {

  // The admin API lets us create, manage and delete tables
  Admin admin = connection.getAdmin();

建立資料表

使用 Admin API 建立資料表。

// Create a table with a single column family
HTableDescriptor descriptor = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
descriptor.addFamily(new HColumnDescriptor(COLUMN_FAMILY_NAME));

print("Create table " + descriptor.getNameAsString());
admin.createTable(descriptor);

將資料列寫入資料表

使用 Table 類別,將資料列放入資料表。

// Retrieve the table we just created so we can do some reads and writes
Table table = connection.getTable(TableName.valueOf(TABLE_NAME));

// Write some rows to the table
print("Write some greetings to the table");
for (int i = 0; i < GREETINGS.length; i++) {
  // Each row has a unique row key.
  //
  // Note: This example uses sequential numeric IDs for simplicity, but
  // this can result in poor performance in a production application.
  // Since rows are stored in sorted order by key, sequential keys can
  // result in poor distribution of operations across nodes.
  //
  // For more information about how to design a Bigtable schema for the
  // best performance, see the documentation:
  //
  //     https://cloud.google.com/bigtable/docs/schema-design
  String rowKey = "greeting" + i;

  // Put a single row into the table. We could also pass a list of Puts to write a batch.
  Put put = new Put(Bytes.toBytes(rowKey));
  put.addColumn(COLUMN_FAMILY_NAME, COLUMN_NAME, Bytes.toBytes(GREETINGS[i]));
  table.put(put);
}

依資料列索引鍵讀取資料列

直接使用資料列的索引鍵來擷取資料列。

// Get the first greeting by row key
String rowKey = "greeting0";
Result getResult = table.get(new Get(Bytes.toBytes(rowKey)));
String greeting = Bytes.toString(getResult.getValue(COLUMN_FAMILY_NAME, COLUMN_NAME));
System.out.println("Get a single greeting by row key");
System.out.printf("\t%s = %s\n", rowKey, greeting);

掃描所有的資料表資料列

使用 Scan 類別擷取特定範圍的資料列。

// Now scan across all rows.
Scan scan = new Scan();

print("Scan for all greetings:");
ResultScanner scanner = table.getScanner(scan);
for (Result row : scanner) {
  byte[] valueBytes = row.getValue(COLUMN_FAMILY_NAME, COLUMN_NAME);
  System.out.println('\t' + Bytes.toString(valueBytes));
}

刪除資料表

使用 Admin API 刪除資料表。

// Clean up by disabling and then deleting the table
print("Delete the table");
admin.disableTable(table.getName());
admin.deleteTable(table.getName());

完整的程式碼範例

以下是沒有註解的完整範例。

package com.example.cloud.bigtable.helloworld;

import com.google.cloud.bigtable.hbase.BigtableConfiguration;

import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

public class HelloWorld {

  private static final byte[] TABLE_NAME = Bytes.toBytes("Hello-Bigtable");
  private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes("cf1");
  private static final byte[] COLUMN_NAME = Bytes.toBytes("greeting");

  private static final String[] GREETINGS = {
    "Hello World!", "Hello Cloud Bigtable!", "Hello HBase!"
  };

  private static void doHelloWorld(String projectId, String instanceId) {

    try (Connection connection = BigtableConfiguration.connect(projectId, instanceId)) {

      Admin admin = connection.getAdmin();

      HTableDescriptor descriptor = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
      descriptor.addFamily(new HColumnDescriptor(COLUMN_FAMILY_NAME));

      print("Create table " + descriptor.getNameAsString());
      admin.createTable(descriptor);

      Table table = connection.getTable(TableName.valueOf(TABLE_NAME));

      print("Write some greetings to the table");
      for (int i = 0; i < GREETINGS.length; i++) {
        String rowKey = "greeting" + i;

        Put put = new Put(Bytes.toBytes(rowKey));
        put.addColumn(COLUMN_FAMILY_NAME, COLUMN_NAME, Bytes.toBytes(GREETINGS[i]));
        table.put(put);
      }

      String rowKey = "greeting0";
      Result getResult = table.get(new Get(Bytes.toBytes(rowKey)));
      String greeting = Bytes.toString(getResult.getValue(COLUMN_FAMILY_NAME, COLUMN_NAME));
      System.out.println("Get a single greeting by row key");
      System.out.printf("\t%s = %s\n", rowKey, greeting);

      Scan scan = new Scan();

      print("Scan for all greetings:");
      ResultScanner scanner = table.getScanner(scan);
      for (Result row : scanner) {
        byte[] valueBytes = row.getValue(COLUMN_FAMILY_NAME, COLUMN_NAME);
        System.out.println('\t' + Bytes.toString(valueBytes));
      }

      print("Delete the table");
      admin.disableTable(table.getName());
      admin.deleteTable(table.getName());

    } catch (IOException e) {
      System.err.println("Exception while running HelloWorld: " + e.getMessage());
      e.printStackTrace();
      System.exit(1);
    }

    System.exit(0);
  }

  private static void print(String msg) {
    System.out.println("HelloWorld: " + msg);
  }

  public static void main(String[] args) {
    String projectId = requiredProperty("bigtable.projectID");
    String instanceId = requiredProperty("bigtable.instanceID");

    doHelloWorld(projectId, instanceId);
  }

  private static String requiredProperty(String prop) {
    String value = System.getProperty(prop);
    if (value == null) {
      throw new IllegalArgumentException("Missing required system property: " + prop);
    }
    return value;
  }
}
本頁內容對您是否有任何幫助?請提供意見:

傳送您對下列選項的寶貴意見...

這個網頁
Cloud Bigtable 說明文件