Go
Antes de testar essa amostra, siga as instruções de configuração para Go
Guia de início rápido da Vertex AI: como usar bibliotecas de cliente.
Para mais informações, consulte a documentação de referência da API Vertex AI para Go.
Para autenticar na Vertex AI, configure o Application Default Credentials.
Para mais informações, consulte
Configurar a autenticação para um ambiente de desenvolvimento local.
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"cloud.google.com/go/vertexai/genai"
)
// functionCallsBasic opens a chat session and sends 2 messages to the model:
// - first, to convert a text into a structured function call request
// - second, to convert a structured function call response into natural language
func functionCallsBasic(w io.Writer, prompt, projectID, location, modelName string) error {
// prompt := "What's the weather like in Boston?"
// location := "us-central1"
// modelName := "gemini-1.5-flash-001"
ctx := context.Background()
client, err := genai.NewClient(ctx, projectID, location)
if err != nil {
return fmt.Errorf("unable to create client: %w", err)
}
defer client.Close()
model := client.GenerativeModel(modelName)
// Build an OpenAPI schema, in memory
params := &genai.Schema{
Type: genai.TypeObject,
Properties: map[string]*genai.Schema{
"location": {
Type: genai.TypeString,
Description: "location",
},
},
}
fundecl := &genai.FunctionDeclaration{
Name: "getCurrentWeather",
Description: "Get the current weather in a given location",
Parameters: params,
}
model.Tools = []*genai.Tool{
{FunctionDeclarations: []*genai.FunctionDeclaration{fundecl}},
}
chat := model.StartChat()
fmt.Fprintf(w, "Question: %s\n", prompt)
resp, err := chat.SendMessage(ctx, genai.Text(prompt))
if err != nil {
return err
}
if len(resp.Candidates) == 0 ||
len(resp.Candidates[0].Content.Parts) == 0 {
return errors.New("empty response from model")
}
// The model has returned a function call to the declared function `getCurrentWeather`
// with a value for the argument `location`.
jsondata, err := json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "", " ")
if err != nil {
return fmt.Errorf("json.MarshalIndent: %w", err)
}
fmt.Fprintf(w, "function call generated by the model:\n%s\n\n", string(jsondata))
// Create a function call response, to simulate the result of a call to a
// real service
funresp := &genai.FunctionResponse{
Name: "getCurrentWeather",
Response: map[string]any{
"currentWeather": "sunny",
},
}
jsondata, err = json.MarshalIndent(funresp, "", " ")
if err != nil {
return fmt.Errorf("json.MarshalIndent: %w", err)
}
fmt.Fprintf(w, "function call response sent to the model:\n%s\n\n", string(jsondata))
// And provide the function call response to the model
resp, err = chat.SendMessage(ctx, funresp)
if err != nil {
return err
}
if len(resp.Candidates) == 0 ||
len(resp.Candidates[0].Content.Parts) == 0 {
return errors.New("empty response from model")
}
// The model has taken the function call response as input, and has
// reformulated the response to the user.
jsondata, err = json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "", " ")
if err != nil {
return fmt.Errorf("json.MarshalIndent: %w", err)
}
fmt.Fprintf(w, "Answer generated by the model:\n%s\n", string(jsondata))
return nil
}
Python
Antes de testar essa amostra, siga as instruções de configuração para Python
Guia de início rápido da Vertex AI: como usar bibliotecas de cliente.
Para mais informações, consulte a documentação de referência da API Vertex AI para Python.
Para autenticar na Vertex AI, configure o Application Default Credentials.
Para mais informações, consulte
Configurar a autenticação para um ambiente de desenvolvimento local.
import vertexai
from vertexai.generative_models import (
Content,
FunctionDeclaration,
GenerationConfig,
GenerativeModel,
Part,
Tool,
)
# Initialize Vertex AI
# TODO(developer): Update and un-comment below lines
# project_id = "PROJECT_ID"
vertexai.init(project=project_id, location="us-central1")
# Initialize Gemini model
model = GenerativeModel(model_name="gemini-1.0-pro-001")
# Define the user's prompt in a Content object that we can reuse in model calls
user_prompt_content = Content(
role="user",
parts=[
Part.from_text("What is the weather like in Boston?"),
],
)
# Specify a function declaration and parameters for an API request
function_name = "get_current_weather"
get_current_weather_func = FunctionDeclaration(
name=function_name,
description="Get the current weather in a given location",
# Function parameters are specified in OpenAPI JSON schema format
parameters={
"type": "object",
"properties": {"location": {"type": "string", "description": "Location"}},
},
)
# Define a tool that includes the above get_current_weather_func
weather_tool = Tool(
function_declarations=[get_current_weather_func],
)
# Send the prompt and instruct the model to generate content using the Tool that you just created
response = model.generate_content(
user_prompt_content,
generation_config=GenerationConfig(temperature=0),
tools=[weather_tool],
)
function_call = response.candidates[0].function_calls[0]
print(function_call)
# Check the function name that the model responded with, and make an API call to an external system
if function_call.name == function_name:
# Extract the arguments to use in your API call
location = function_call.args["location"] # noqa: F841
# Here you can use your preferred method to make an API request to fetch the current weather, for example:
# api_response = requests.post(weather_api_url, data={"location": location})
# In this example, we'll use synthetic data to simulate a response payload from an external API
api_response = """{ "location": "Boston, MA", "temperature": 38, "description": "Partly Cloudy",
"icon": "partly-cloudy", "humidity": 65, "wind": { "speed": 10, "direction": "NW" } }"""
# Return the API response to Gemini so it can generate a model response or request another function call
response = model.generate_content(
[
user_prompt_content, # User prompt
response.candidates[0].content, # Function call response
Content(
parts=[
Part.from_function_response(
name=function_name,
response={
"content": api_response, # Return the API response to Gemini
},
),
],
),
],
tools=[weather_tool],
)
# Get the model response
print(response.text)