Cria um pipeline de treinamento para classificação de imagens usando o método create_training_pipeline.
Mais informações
Para ver a documentação detalhada que inclui este exemplo de código, consulte:
Exemplo de código
Java
Antes de testar esse exemplo, siga as instruções de configuração para Java no Guia de início rápido da Vertex AI sobre como usar bibliotecas de cliente. Para mais informações, consulte a documentação de referência da API Vertex AI para Java.
Para autenticar na Vertex AI, configure o Application Default Credentials. Para mais informações, consulte Configurar a autenticação para um ambiente de desenvolvimento local.
import com.google.cloud.aiplatform.util.ValueConverter;
import com.google.cloud.aiplatform.v1.DeployedModelRef;
import com.google.cloud.aiplatform.v1.EnvVar;
import com.google.cloud.aiplatform.v1.FilterSplit;
import com.google.cloud.aiplatform.v1.FractionSplit;
import com.google.cloud.aiplatform.v1.InputDataConfig;
import com.google.cloud.aiplatform.v1.LocationName;
import com.google.cloud.aiplatform.v1.Model;
import com.google.cloud.aiplatform.v1.Model.ExportFormat;
import com.google.cloud.aiplatform.v1.ModelContainerSpec;
import com.google.cloud.aiplatform.v1.PipelineServiceClient;
import com.google.cloud.aiplatform.v1.PipelineServiceSettings;
import com.google.cloud.aiplatform.v1.Port;
import com.google.cloud.aiplatform.v1.PredefinedSplit;
import com.google.cloud.aiplatform.v1.PredictSchemata;
import com.google.cloud.aiplatform.v1.TimestampSplit;
import com.google.cloud.aiplatform.v1.TrainingPipeline;
import com.google.cloud.aiplatform.v1.schema.trainingjob.definition.AutoMlImageClassificationInputs;
import com.google.cloud.aiplatform.v1.schema.trainingjob.definition.AutoMlImageClassificationInputs.ModelType;
import com.google.rpc.Status;
import java.io.IOException;
public class CreateTrainingPipelineImageClassificationSample {
public static void main(String[] args) throws IOException {
// TODO(developer): Replace these variables before running the sample.
String trainingPipelineDisplayName = "YOUR_TRAINING_PIPELINE_DISPLAY_NAME";
String project = "YOUR_PROJECT_ID";
String datasetId = "YOUR_DATASET_ID";
String modelDisplayName = "YOUR_MODEL_DISPLAY_NAME";
createTrainingPipelineImageClassificationSample(
project, trainingPipelineDisplayName, datasetId, modelDisplayName);
}
static void createTrainingPipelineImageClassificationSample(
String project, String trainingPipelineDisplayName, String datasetId, String modelDisplayName)
throws IOException {
PipelineServiceSettings pipelineServiceSettings =
PipelineServiceSettings.newBuilder()
.setEndpoint("us-central1-aiplatform.googleapis.com:443")
.build();
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests. After completing all of your requests, call
// the "close" method on the client to safely clean up any remaining background resources.
try (PipelineServiceClient pipelineServiceClient =
PipelineServiceClient.create(pipelineServiceSettings)) {
String location = "us-central1";
String trainingTaskDefinition =
"gs://google-cloud-aiplatform/schema/trainingjob/definition/"
+ "automl_image_classification_1.0.0.yaml";
LocationName locationName = LocationName.of(project, location);
AutoMlImageClassificationInputs autoMlImageClassificationInputs =
AutoMlImageClassificationInputs.newBuilder()
.setModelType(ModelType.CLOUD)
.setMultiLabel(false)
.setBudgetMilliNodeHours(8000)
.setDisableEarlyStopping(false)
.build();
InputDataConfig trainingInputDataConfig =
InputDataConfig.newBuilder().setDatasetId(datasetId).build();
Model model = Model.newBuilder().setDisplayName(modelDisplayName).build();
TrainingPipeline trainingPipeline =
TrainingPipeline.newBuilder()
.setDisplayName(trainingPipelineDisplayName)
.setTrainingTaskDefinition(trainingTaskDefinition)
.setTrainingTaskInputs(ValueConverter.toValue(autoMlImageClassificationInputs))
.setInputDataConfig(trainingInputDataConfig)
.setModelToUpload(model)
.build();
TrainingPipeline trainingPipelineResponse =
pipelineServiceClient.createTrainingPipeline(locationName, trainingPipeline);
System.out.println("Create Training Pipeline Image Classification Response");
System.out.format("Name: %s\n", trainingPipelineResponse.getName());
System.out.format("Display Name: %s\n", trainingPipelineResponse.getDisplayName());
System.out.format(
"Training Task Definition %s\n", trainingPipelineResponse.getTrainingTaskDefinition());
System.out.format(
"Training Task Inputs: %s\n", trainingPipelineResponse.getTrainingTaskInputs());
System.out.format(
"Training Task Metadata: %s\n", trainingPipelineResponse.getTrainingTaskMetadata());
System.out.format("State: %s\n", trainingPipelineResponse.getState());
System.out.format("Create Time: %s\n", trainingPipelineResponse.getCreateTime());
System.out.format("StartTime %s\n", trainingPipelineResponse.getStartTime());
System.out.format("End Time: %s\n", trainingPipelineResponse.getEndTime());
System.out.format("Update Time: %s\n", trainingPipelineResponse.getUpdateTime());
System.out.format("Labels: %s\n", trainingPipelineResponse.getLabelsMap());
InputDataConfig inputDataConfig = trainingPipelineResponse.getInputDataConfig();
System.out.println("Input Data Config");
System.out.format("Dataset Id: %s", inputDataConfig.getDatasetId());
System.out.format("Annotations Filter: %s\n", inputDataConfig.getAnnotationsFilter());
FractionSplit fractionSplit = inputDataConfig.getFractionSplit();
System.out.println("Fraction Split");
System.out.format("Training Fraction: %s\n", fractionSplit.getTrainingFraction());
System.out.format("Validation Fraction: %s\n", fractionSplit.getValidationFraction());
System.out.format("Test Fraction: %s\n", fractionSplit.getTestFraction());
FilterSplit filterSplit = inputDataConfig.getFilterSplit();
System.out.println("Filter Split");
System.out.format("Training Filter: %s\n", filterSplit.getTrainingFilter());
System.out.format("Validation Filter: %s\n", filterSplit.getValidationFilter());
System.out.format("Test Filter: %s\n", filterSplit.getTestFilter());
PredefinedSplit predefinedSplit = inputDataConfig.getPredefinedSplit();
System.out.println("Predefined Split");
System.out.format("Key: %s\n", predefinedSplit.getKey());
TimestampSplit timestampSplit = inputDataConfig.getTimestampSplit();
System.out.println("Timestamp Split");
System.out.format("Training Fraction: %s\n", timestampSplit.getTrainingFraction());
System.out.format("Validation Fraction: %s\n", timestampSplit.getValidationFraction());
System.out.format("Test Fraction: %s\n", timestampSplit.getTestFraction());
System.out.format("Key: %s\n", timestampSplit.getKey());
Model modelResponse = trainingPipelineResponse.getModelToUpload();
System.out.println("Model To Upload");
System.out.format("Name: %s\n", modelResponse.getName());
System.out.format("Display Name: %s\n", modelResponse.getDisplayName());
System.out.format("Description: %s\n", modelResponse.getDescription());
System.out.format("Metadata Schema Uri: %s\n", modelResponse.getMetadataSchemaUri());
System.out.format("Metadata: %s\n", modelResponse.getMetadata());
System.out.format("Training Pipeline: %s\n", modelResponse.getTrainingPipeline());
System.out.format("Artifact Uri: %s\n", modelResponse.getArtifactUri());
System.out.format(
"Supported Deployment Resources Types: %s\n",
modelResponse.getSupportedDeploymentResourcesTypesList());
System.out.format(
"Supported Input Storage Formats: %s\n",
modelResponse.getSupportedInputStorageFormatsList());
System.out.format(
"Supported Output Storage Formats: %s\n",
modelResponse.getSupportedOutputStorageFormatsList());
System.out.format("Create Time: %s\n", modelResponse.getCreateTime());
System.out.format("Update Time: %s\n", modelResponse.getUpdateTime());
System.out.format("Labels: %sn\n", modelResponse.getLabelsMap());
PredictSchemata predictSchemata = modelResponse.getPredictSchemata();
System.out.println("Predict Schemata");
System.out.format("Instance Schema Uri: %s\n", predictSchemata.getInstanceSchemaUri());
System.out.format("Parameters Schema Uri: %s\n", predictSchemata.getParametersSchemaUri());
System.out.format("Prediction Schema Uri: %s\n", predictSchemata.getPredictionSchemaUri());
for (ExportFormat exportFormat : modelResponse.getSupportedExportFormatsList()) {
System.out.println("Supported Export Format");
System.out.format("Id: %s\n", exportFormat.getId());
}
ModelContainerSpec modelContainerSpec = modelResponse.getContainerSpec();
System.out.println("Container Spec");
System.out.format("Image Uri: %s\n", modelContainerSpec.getImageUri());
System.out.format("Command: %s\n", modelContainerSpec.getCommandList());
System.out.format("Args: %s\n", modelContainerSpec.getArgsList());
System.out.format("Predict Route: %s\n", modelContainerSpec.getPredictRoute());
System.out.format("Health Route: %s\n", modelContainerSpec.getHealthRoute());
for (EnvVar envVar : modelContainerSpec.getEnvList()) {
System.out.println("Env");
System.out.format("Name: %s\n", envVar.getName());
System.out.format("Value: %s\n", envVar.getValue());
}
for (Port port : modelContainerSpec.getPortsList()) {
System.out.println("Port");
System.out.format("Container Port: %s\n", port.getContainerPort());
}
for (DeployedModelRef deployedModelRef : modelResponse.getDeployedModelsList()) {
System.out.println("Deployed Model");
System.out.format("Endpoint: %s\n", deployedModelRef.getEndpoint());
System.out.format("Deployed Model Id: %s\n", deployedModelRef.getDeployedModelId());
}
Status status = trainingPipelineResponse.getError();
System.out.println("Error");
System.out.format("Code: %s\n", status.getCode());
System.out.format("Message: %s\n", status.getMessage());
}
}
}
Node.js
Antes de testar essa amostra, siga as instruções de configuração para Node.js Guia de início rápido da Vertex AI: como usar bibliotecas de cliente. Para mais informações, consulte a documentação de referência da API Vertex AI para Node.js.
Para autenticar na Vertex AI, configure o Application Default Credentials. Para mais informações, consulte Configurar a autenticação para um ambiente de desenvolvimento local.
/**
* TODO(developer): Uncomment these variables before running the sample.
* (Not necessary if passing values as arguments)
*/
/*
const datasetId = 'YOUR DATASET';
const modelDisplayName = 'NEW MODEL NAME;
const trainingPipelineDisplayName = 'NAME FOR TRAINING PIPELINE';
const project = 'YOUR PROJECT ID';
const location = 'us-central1';
*/
// Imports the Google Cloud Pipeline Service Client library
const aiplatform = require('@google-cloud/aiplatform');
const {definition} =
aiplatform.protos.google.cloud.aiplatform.v1.schema.trainingjob;
const ModelType = definition.AutoMlImageClassificationInputs.ModelType;
// Specifies the location of the api endpoint
const clientOptions = {
apiEndpoint: 'us-central1-aiplatform.googleapis.com',
};
// Instantiates a client
const {PipelineServiceClient} = aiplatform.v1;
const pipelineServiceClient = new PipelineServiceClient(clientOptions);
async function createTrainingPipelineImageClassification() {
// Configure the parent resource
const parent = `projects/${project}/locations/${location}`;
// Values should match the input expected by your model.
const trainingTaskInputsMessage =
new definition.AutoMlImageClassificationInputs({
multiLabel: true,
modelType: ModelType.CLOUD,
budgetMilliNodeHours: 8000,
disableEarlyStopping: false,
});
const trainingTaskInputs = trainingTaskInputsMessage.toValue();
const trainingTaskDefinition =
'gs://google-cloud-aiplatform/schema/trainingjob/definition/automl_image_classification_1.0.0.yaml';
const modelToUpload = {displayName: modelDisplayName};
const inputDataConfig = {datasetId};
const trainingPipeline = {
displayName: trainingPipelineDisplayName,
trainingTaskDefinition,
trainingTaskInputs,
inputDataConfig,
modelToUpload,
};
const request = {parent, trainingPipeline};
// Create training pipeline request
const [response] =
await pipelineServiceClient.createTrainingPipeline(request);
console.log('Create training pipeline image classification response');
console.log(`Name : ${response.name}`);
console.log('Raw response:');
console.log(JSON.stringify(response, null, 2));
}
createTrainingPipelineImageClassification();
Python
Antes de testar essa amostra, siga as instruções de configuração para Python Guia de início rápido da Vertex AI: como usar bibliotecas de cliente. Para mais informações, consulte a documentação de referência da API Vertex AI para Python.
Para autenticar na Vertex AI, configure o Application Default Credentials. Para mais informações, consulte Configurar a autenticação para um ambiente de desenvolvimento local.
from google.cloud import aiplatform
from google.cloud.aiplatform.gapic.schema import trainingjob
def create_training_pipeline_image_classification_sample(
project: str,
display_name: str,
dataset_id: str,
model_display_name: str,
location: str = "us-central1",
api_endpoint: str = "us-central1-aiplatform.googleapis.com",
):
# The AI Platform services require regional API endpoints.
client_options = {"api_endpoint": api_endpoint}
# Initialize client that will be used to create and send requests.
# This client only needs to be created once, and can be reused for multiple requests.
client = aiplatform.gapic.PipelineServiceClient(client_options=client_options)
training_task_inputs = trainingjob.definition.AutoMlImageClassificationInputs(
multi_label=True,
model_type="CLOUD",
budget_milli_node_hours=8000,
disable_early_stopping=False,
).to_value()
training_pipeline = {
"display_name": display_name,
"training_task_definition": "gs://google-cloud-aiplatform/schema/trainingjob/definition/automl_image_classification_1.0.0.yaml",
"training_task_inputs": training_task_inputs,
"input_data_config": {"dataset_id": dataset_id},
"model_to_upload": {"display_name": model_display_name},
}
parent = f"projects/{project}/locations/{location}"
response = client.create_training_pipeline(
parent=parent, training_pipeline=training_pipeline
)
print("response:", response)
A seguir
Para pesquisar e filtrar exemplos de código de outros produtos do Google Cloud, consulte a pesquisa de exemplos de código do Google Cloud.