Data Catalog client libraries

This page shows how to get started with the Cloud Client Libraries for the Data Catalog API. Read more about the client libraries for Cloud APIs, including the older Google API Client Libraries, in Client Libraries Explained.

Install the client library

C#

For more information, see Setting Up a C# Development Environment.

Install-Package Google.Cloud.DataCatalog.V1

Go

For more information, see Setting Up a Go Development Environment.

go get cloud.google.com/go/datacatalog/apiv1

Java

For more information, see Setting Up a Java Development Environment.

If you are using Maven, add this to your pom.xml file:

Maven:
<dependency>
    <groupId>com.google.cloud</groupId>
    <artifactId>google-cloud-datacatalog</artifactId>
    <version>insert datacatalog-library-version here</version>
</dependency>
If you are using Gradle, add this to your dependencies:
compile group: 'com.google.cloud', name: 'google-cloud-datacatalog', version: 'insert datacatalog-library-version here'

Node.js

For more information, see Setting Up a Node.js Development Environment.

npm install --save @google-cloud/datacatalog

PHP

For more information, see Using PHP on Google Cloud.

composer require google/cloud-data-catalog

Python

For more information, see Setting Up a Python Development Environment.

pip install --upgrade google-cloud-datacatalog

Ruby

For more information, see Setting Up a Ruby Development Environment.

gem install google-cloud-data_catalog

Set up authentication

When you use client libraries, you use Application Default Credentials (ADC) to authenticate. For information about setting up ADC, see Provide credentials for Application Default Credentials. For information about using ADC with client libraries, see Authenticate using client libraries.

Use the client library

The following example shows how to use the client library.

Java

For more information, see the Data Catalog Java API reference documentation.

To authenticate to Data Catalog, set up Application Default Credentials. For more information, see Set up authentication for a local development environment.

import com.google.cloud.datacatalog.v1.DataCatalogClient;
import com.google.cloud.datacatalog.v1.Entry;
import com.google.cloud.datacatalog.v1.LookupEntryRequest;

public class LookupEntryBigQueryDataset {

  /**
   * Lookup the Data Catalog entry referring to a BigQuery Dataset
   *
   * @param projectId The project ID to which the Dataset belongs, e.g. 'my-project'
   * @param datasetId The dataset ID to which the Catalog Entry refers, e.g. 'my_dataset'
   */
  public static void lookupEntry(String projectId, String datasetId) {
    // String projectId = "my-project"
    // String datasetId = "my_dataset"

    // Get an entry by the resource name from the source Google Cloud Platform service.
    String linkedResource =
        String.format("//bigquery.googleapis.com/projects/%s/datasets/%s", projectId, datasetId);
    LookupEntryRequest request =
        LookupEntryRequest.newBuilder().setLinkedResource(linkedResource).build();

    // Alternatively, lookup by the SQL name of the entry would have the same result:
    // String sqlResource = String.format("bigquery.dataset.`%s`.`%s`", projectId, datasetId);
    // LookupEntryRequest request =
    // LookupEntryRequest.newBuilder().setSqlResource(sqlResource).build();

    // Initialize client that will be used to send requests. This client only needs to be created
    // once, and can be reused for multiple requests. After completing all of your requests, call
    // the "close" method on the client to safely clean up any remaining background resources.
    try (DataCatalogClient dataCatalogClient = DataCatalogClient.create()) {
      Entry entry = dataCatalogClient.lookupEntry(request);
      System.out.printf("Entry name: %s\n", entry.getName());
    } catch (Exception e) {
      System.out.print("Error during lookupEntryBigQueryDataset:\n" + e.toString());
    }
  }
}

Node.js

For more information, see the Data Catalog Node.js API reference documentation.

To authenticate to Data Catalog, set up Application Default Credentials. For more information, see Set up authentication for a local development environment.

// -------------------------------
// Import required modules.
// -------------------------------
const {DataCatalogClient} = require('@google-cloud/datacatalog').v1;
const datacatalog = new DataCatalogClient();

const lookup = async () => {
  // TODO(developer): Uncomment the following lines before running the sample.
  // const projectId = 'my-project'
  // const datasetId = 'my_dataset'
  const resourceName = `//bigquery.googleapis.com/projects/${projectId}/datasets/${datasetId}`;
  const request = {linkedResource: resourceName};
  const [result] = await datacatalog.lookupEntry(request);
  return result;
};

const response = await lookup();
console.log(response);

Python

For more information, see the Data Catalog Python API reference documentation.

To authenticate to Data Catalog, set up Application Default Credentials. For more information, see Set up authentication for a local development environment.

from google.cloud import datacatalog_v1

datacatalog = datacatalog_v1.DataCatalogClient()

bigquery_project_id = "my_bigquery_project"
dataset_id = "my_dataset"
table_id = "my_table"
pubsub_project_id = "my_pubsub_project"
topic_id = "my_topic"


# To facilitate testing, we replace values with alternatives
# provided by the testing harness.
bigquery_project_id = override_values.get(
    "bigquery_project_id", bigquery_project_id
)
dataset_id = override_values.get("dataset_id", dataset_id)
table_id = override_values.get("table_id", table_id)
pubsub_project_id = override_values.get("pubsub_project_id", pubsub_project_id)
topic_id = override_values.get("topic_id", topic_id)

# BigQuery Dataset via linked_resource
resource_name = f"//bigquery.googleapis.com/projects/{bigquery_project_id}/datasets/{dataset_id}"

entry = datacatalog.lookup_entry(request={"linked_resource": resource_name})
print(
    f"Retrieved entry {entry.name} for BigQuery Dataset resource {entry.linked_resource}"
)

# BigQuery Dataset via sql_resource
sql_resource = f"bigquery.dataset.`{bigquery_project_id}`.`{dataset_id}`"

entry = datacatalog.lookup_entry(request={"sql_resource": sql_resource})
print(
    f"Retrieved entry {entry.name} for BigQuery Dataset resource {entry.linked_resource}"
)

# BigQuery Table via linked_resource
resource_name = (
    f"//bigquery.googleapis.com/projects/{bigquery_project_id}/datasets/{dataset_id}"
    f"/tables/{table_id}"
)

entry = datacatalog.lookup_entry(request={"linked_resource": resource_name})
print(f"Retrieved entry {entry.name} for BigQuery Table {entry.linked_resource}")

# BigQuery Table via sql_resource
sql_resource = f"bigquery.table.`{bigquery_project_id}`.`{dataset_id}`.`{table_id}`"

entry = datacatalog.lookup_entry(request={"sql_resource": sql_resource})
print(
    f"Retrieved entry {entry.name} for BigQuery Table resource {entry.linked_resource}"
)

# Pub/Sub Topic via linked_resource
resource_name = (
    f"//pubsub.googleapis.com/projects/{pubsub_project_id}/topics/{topic_id}"
)

entry = datacatalog.lookup_entry(request={"linked_resource": resource_name})
print(
    f"Retrieved entry {entry.name} for Pub/Sub Topic resource {entry.linked_resource}"
)

# Pub/Sub Topic via sql_resource
sql_resource = f"pubsub.topic.`{pubsub_project_id}`.`{topic_id}`"

entry = datacatalog.lookup_entry(request={"sql_resource": sql_resource})
print(
    f"Retrieved entry {entry.name} for Pub/Sub Topic resource {entry.linked_resource}"
)

Additional resources