Ricevi previsioni batch con input di BigQuery

Crea un job di previsione batch con una tabella BigQuery come input.

Per saperne di più

Per la documentazione dettagliata che include questo esempio di codice, vedi quanto segue:

Esempio di codice

Java

Per eseguire l'autenticazione su AutoML Tables, configura le Credenziali predefinite dell'applicazione. Per maggiori informazioni, consulta Configurare l'autenticazione per un ambiente di sviluppo locale.

import com.google.api.gax.longrunning.OperationFuture;
import com.google.cloud.automl.v1beta1.BatchPredictInputConfig;
import com.google.cloud.automl.v1beta1.BatchPredictOutputConfig;
import com.google.cloud.automl.v1beta1.BatchPredictRequest;
import com.google.cloud.automl.v1beta1.BatchPredictResult;
import com.google.cloud.automl.v1beta1.BigQueryDestination;
import com.google.cloud.automl.v1beta1.BigQuerySource;
import com.google.cloud.automl.v1beta1.ModelName;
import com.google.cloud.automl.v1beta1.OperationMetadata;
import com.google.cloud.automl.v1beta1.PredictionServiceClient;
import java.io.IOException;
import java.util.concurrent.ExecutionException;

abstract class TablesBatchPredictBigQuery {

  static void batchPredict() throws IOException, ExecutionException, InterruptedException {
    // TODO(developer): Replace these variables before running the sample.
    String projectId = "YOUR_PROJECT_ID";
    String modelId = "YOUR_MODEL_ID";
    String inputUri = "bq://YOUR_PROJECT_ID.bqDatasetID.bqTableId";
    String outputUri = "bq://YOUR_PROJECT_ID";
    batchPredict(projectId, modelId, inputUri, outputUri);
  }

  static void batchPredict(String projectId, String modelId, String inputUri, String outputUri)
      throws IOException, ExecutionException, InterruptedException {
    // Initialize client that will be used to send requests. This client only needs to be created
    // once, and can be reused for multiple requests. After completing all of your requests, call
    // the "close" method on the client to safely clean up any remaining background resources.
    try (PredictionServiceClient client = PredictionServiceClient.create()) {
      // Get the full path of the model.
      ModelName name = ModelName.of(projectId, "us-central1", modelId);

      // Configure the source of the file from BigQuery
      BigQuerySource bigQuerySource = BigQuerySource.newBuilder().setInputUri(inputUri).build();
      BatchPredictInputConfig inputConfig =
          BatchPredictInputConfig.newBuilder().setBigquerySource(bigQuerySource).build();

      // Configure where to store the output in BigQuery
      BigQueryDestination bigQueryDestination =
          BigQueryDestination.newBuilder().setOutputUri(outputUri).build();
      BatchPredictOutputConfig outputConfig =
          BatchPredictOutputConfig.newBuilder().setBigqueryDestination(bigQueryDestination).build();

      // Build the request that will be sent to the API
      BatchPredictRequest request =
          BatchPredictRequest.newBuilder()
              .setName(name.toString())
              .setInputConfig(inputConfig)
              .setOutputConfig(outputConfig)
              .build();

      // Start an asynchronous request
      OperationFuture<BatchPredictResult, OperationMetadata> future =
          client.batchPredictAsync(request);

      System.out.println("Waiting for operation to complete...");
      future.get();
      System.out.println("Batch Prediction results saved to BigQuery.");
    }
  }
}

Node.js

Per eseguire l'autenticazione su AutoML Tables, configura le Credenziali predefinite dell'applicazione. Per maggiori informazioni, consulta Configurare l'autenticazione per un ambiente di sviluppo locale.


/**
 * Demonstrates using the AutoML client to request prediction from
 * automl tables using bigQuery.
 * TODO(developer): Uncomment the following lines before running the sample.
 */
// const projectId = '[PROJECT_ID]' e.g., "my-gcloud-project";
// const computeRegion = '[REGION_NAME]' e.g., "us-central1";
// const modelId = '[MODEL_ID]' e.g., "TBL4704590352927948800";
// const inputUri = '[BIGQUERY_PATH]'
// e.g., "bq://<project_id>.<dataset_id>.<table_id>",
// `The Big Query URI containing the inputs`;
// const outputUri = '[BIGQUERY_PATH]' e.g., "bq://<project_id>",
// `The destination Big Query URI for storing outputs`;

const automl = require('@google-cloud/automl');

// Create client for prediction service.
const automlClient = new automl.v1beta1.PredictionServiceClient();

// Get the full path of the model.
const modelFullId = automlClient.modelPath(projectId, computeRegion, modelId);

async function batchPredict() {
  // Construct request
  // Get the Big Query input URI.
  const inputConfig = {
    bigquerySource: {
      inputUri: inputUri,
    },
  };

  // Get the Big Query output URI.
  const outputConfig = {
    bigqueryDestination: {
      outputUri: outputUri,
    },
  };

  const [, operation] = await automlClient.batchPredict({
    name: modelFullId,
    inputConfig: inputConfig,
    outputConfig: outputConfig,
  });

  // Get the latest state of long-running operation.
  console.log(`Operation name: ${operation.name}`);
}

batchPredict();

Python

Per eseguire l'autenticazione su AutoML Tables, configura le Credenziali predefinite dell'applicazione. Per maggiori informazioni, consulta Configurare l'autenticazione per un ambiente di sviluppo locale.

# TODO(developer): Uncomment and set the following variables
# project_id = 'PROJECT_ID_HERE'
# compute_region = 'COMPUTE_REGION_HERE'
# model_display_name = 'MODEL_DISPLAY_NAME_HERE'
# bq_input_uri = 'bq://my-project.my-dataset.my-table'
# bq_output_uri = 'bq://my-project'
# params = {}

from google.cloud import automl_v1beta1 as automl

client = automl.TablesClient(project=project_id, region=compute_region)

# Query model
response = client.batch_predict(
    bigquery_input_uri=bq_input_uri,
    bigquery_output_uri=bq_output_uri,
    model_display_name=model_display_name,
    params=params,
)
print("Making batch prediction... ")
# `response` is a async operation descriptor,
# you can register a callback for the operation to complete via `add_done_callback`:
# def callback(operation_future):
#   result = operation_future.result()
# response.add_done_callback(callback)
#
# or block the thread polling for the operation's results:
response.result()
# AutoML puts predictions in a newly generated dataset with a name by a mask "prediction_" + model_id + "_" + timestamp
# here's how to get the dataset name:
dataset_name = (
    response.metadata.batch_predict_details.output_info.bigquery_output_dataset
)

print(
    "Batch prediction complete.\nResults are in '{}' dataset.\n{}".format(
        dataset_name, response.metadata
    )
)

Passaggi successivi

Per cercare e filtrare esempi di codice per altri prodotti Google Cloud, consulta il browser di esempio Google Cloud.