A partire dal 29 aprile 2025, i modelli Gemini 1.5 Pro e Gemini 1.5 Flash non sono disponibili nei progetti che non li hanno mai utilizzati, inclusi i nuovi progetti. Per maggiori dettagli, vedi
Versioni e ciclo di vita dei modelli.
Previsione del testo in batch con il modello Gemini
Mantieni tutto organizzato con le raccolte
Salva e classifica i contenuti in base alle tue preferenze.
Esegue la previsione batch del testo utilizzando il modello Gemini e restituisce la posizione dell'output.
Per saperne di più
Per la documentazione dettagliata che include questo esempio di codice, vedi quanto segue:
Esempio di codice
Salvo quando diversamente specificato, i contenuti di questa pagina sono concessi in base alla licenza Creative Commons Attribution 4.0, mentre gli esempi di codice sono concessi in base alla licenza Apache 2.0. Per ulteriori dettagli, consulta le norme del sito di Google Developers. Java è un marchio registrato di Oracle e/o delle sue consociate.
[[["Facile da capire","easyToUnderstand","thumb-up"],["Il problema è stato risolto","solvedMyProblem","thumb-up"],["Altra","otherUp","thumb-up"]],[["Difficile da capire","hardToUnderstand","thumb-down"],["Informazioni o codice di esempio errati","incorrectInformationOrSampleCode","thumb-down"],["Mancano le informazioni o gli esempi di cui ho bisogno","missingTheInformationSamplesINeed","thumb-down"],["Problema di traduzione","translationIssue","thumb-down"],["Altra","otherDown","thumb-down"]],[],[],[],null,["# Batch text prediction with Gemini model\n\nPerform batch text prediction using Gemini model and returns the output location.\n\nExplore further\n---------------\n\n\nFor detailed documentation that includes this code sample, see the following:\n\n- [Get batch predictions for Gemini](/vertex-ai/generative-ai/docs/model-reference/batch-prediction-api)\n\nCode sample\n-----------\n\n### Go\n\n\nBefore trying this sample, follow the Go setup instructions in the\n[Vertex AI quickstart using\nclient libraries](/vertex-ai/docs/start/client-libraries).\n\n\nFor more information, see the\n[Vertex AI Go API\nreference documentation](/go/docs/reference/cloud.google.com/go/aiplatform/latest/apiv1).\n\n\nTo authenticate to Vertex AI, set up Application Default Credentials.\nFor more information, see\n\n[Set up authentication for a local development environment](/docs/authentication/set-up-adc-local-dev-environment).\n\n import (\n \t\"context\"\n \t\"fmt\"\n \t\"io\"\n \t\"time\"\n\n \taiplatform \"cloud.google.com/go/aiplatform/apiv1\"\n \taiplatformpb \"cloud.google.com/go/aiplatform/apiv1/aiplatformpb\"\n\n \t\"google.golang.org/api/option\"\n \t\"google.golang.org/protobuf/types/known/structpb\"\n )\n\n // batchPredictGCS submits a batch prediction job using GCS data source as its input\n func batchPredictGCS(w io.Writer, projectID, location string, inputURIs []string, outputURI string) error {\n \t// location := \"us-central1\"\n \t// inputURIs := []string{\"gs://cloud-samples-data/batch/prompt_for_batch_gemini_predict.jsonl\"}\n \t// outputURI := \"gs://\u003ccloud-bucket-name\u003e/\u003cprefix-name\u003e\"\n \tmodelName := \"gemini-2.0-flash-001\"\n \tjobName := \"batch-predict-gcs-test-001\"\n\n \tctx := context.Background()\n \tapiEndpoint := fmt.Sprintf(\"%s-aiplatform.googleapis.com:443\", location)\n \tclient, err := aiplatform.https://cloud.google.com/go/docs/reference/cloud.google.com/go/aiplatform/latest/apiv1.html#cloud_google_com_go_aiplatform_apiv1_JobClient_NewJobClient(ctx, option.WithEndpoint(apiEndpoint))\n \tif err != nil {\n \t\treturn fmt.Errorf(\"unable to create aiplatform client: %w\", err)\n \t}\n \tdefer client.Close()\n\n \tmodelParameters, err := structpb.NewValue(map[string]interface{}{\n \t\t\"temperature\": 0.2,\n \t\t\"maxOutputTokens\": 200,\n \t})\n \tif err != nil {\n \t\treturn fmt.Errorf(\"unable to convert model parameters to protobuf value: %w\", err)\n \t}\n\n \treq := &aiplatformpb.CreateBatchPredictionJobRequest{\n \t\tParent: fmt.Sprintf(\"projects/%s/locations/%s\", projectID, location),\n \t\tBatchPredictionJob: &aiplatformpb.BatchPredictionJob{\n \t\t\tDisplayName: jobName,\n \t\t\tModel: fmt.Sprintf(\"publishers/google/models/%s\", modelName),\n \t\t\tModelParameters: modelParameters,\n \t\t\t// Check the API reference for `BatchPredictionJob` for supported input and output formats:\n \t\t\t// https://cloud.google.com/vertex-ai/docs/reference/rpc/google.cloud.aiplatform.v1#google.cloud.aiplatform.v1.BatchPredictionJob\n \t\t\tInputConfig: &aiplatformpb.BatchPredictionJob_InputConfig{\n \t\t\t\tSource: &aiplatformpb.BatchPredictionJob_InputConfig_GcsSource{\n \t\t\t\t\tGcsSource: &aiplatformpb.GcsSource{\n \t\t\t\t\t\tUris: inputURIs,\n \t\t\t\t\t},\n \t\t\t\t},\n \t\t\t\tInstancesFormat: \"jsonl\",\n \t\t\t},\n \t\t\tOutputConfig: &aiplatformpb.BatchPredictionJob_OutputConfig{\n \t\t\t\tDestination: &aiplatformpb.BatchPredictionJob_OutputConfig_GcsDestination{\n \t\t\t\t\tGcsDestination: &aiplatformpb.GcsDestination{\n \t\t\t\t\t\tOutputUriPrefix: outputURI,\n \t\t\t\t\t},\n \t\t\t\t},\n \t\t\t\tPredictionsFormat: \"jsonl\",\n \t\t\t},\n \t\t},\n \t}\n\n \tjob, err := client.CreateBatchPredictionJob(ctx, req)\n \tif err != nil {\n \t\treturn err\n \t}\n \tfullJobId := job.GetName()\n \tfmt.Fprintf(w, \"submitted batch predict job for model %q\\n\", job.GetModel())\n \tfmt.Fprintf(w, \"job id: %q\\n\", fullJobId)\n \tfmt.Fprintf(w, \"job state: %s\\n\", job.GetState())\n \t// Example response:\n \t// submitted batch predict job for model \"publishers/google/models/gemini-2.0-flash-001\"\n \t// job id: \"projects/.../locations/.../batchPredictionJobs/1234567890000000000\"\n \t// job state: JOB_STATE_PENDING\n\n \tfor {\n \t\ttime.Sleep(5 * time.Second)\n\n \t\tjob, err := client.GetBatchPredictionJob(ctx, &aiplatformpb.GetBatchPredictionJobRequest{\n \t\t\tName: fullJobId,\n \t\t})\n \t\tif err != nil {\n \t\t\treturn fmt.Errorf(\"error: couldn't get updated job state: %w\", err)\n \t\t}\n\n \t\tif job.GetEndTime() != nil {\n \t\t\tfmt.Fprintf(w, \"batch predict job finished with state %s\\n\", job.GetState())\n \t\t\tbreak\n \t\t} else {\n \t\t\tfmt.Fprintf(w, \"batch predict job is running... job state is %s\\n\", job.GetState())\n \t\t}\n \t}\n\n \treturn nil\n }\n\nWhat's next\n-----------\n\n\nTo search and filter code samples for other Google Cloud products, see the\n[Google Cloud sample browser](/docs/samples?product=generativeaionvertexai)."]]