Appel de fonction en parallèle

Cet exemple de code montre comment exécuter plusieurs appels de fonction en parallèle et renvoyer leurs résultats au modèle pour générer une réponse complète.

En savoir plus

Pour obtenir une documentation détaillée incluant cet exemple de code, consultez les articles suivants :

Exemple de code

Go

Avant d'essayer cet exemple, suivez les instructions de configuration pour Go décrites dans le guide de démarrage rapide de Vertex AI à l'aide des bibliothèques clientes. Pour en savoir plus, consultez la documentation de référence de l'API Vertex AI Go.

Pour vous authentifier auprès de Vertex AI, configurez le service Identifiants par défaut de l'application. Pour en savoir plus, consultez Configurer l'authentification pour un environnement de développement local.

import (
	"context"
	"encoding/json"
	"errors"
	"fmt"
	"io"

	"cloud.google.com/go/vertexai/genai"
)

// parallelFunctionCalling shows how to execute multiple function calls in parallel
// and return their results to the model for generating a complete response.
func parallelFunctionCalling(w io.Writer, projectID, location, modelName string) error {
	// location = "us-central1"
	// modelName = "gemini-1.5-flash-002"
	ctx := context.Background()
	client, err := genai.NewClient(ctx, projectID, location)
	if err != nil {
		return fmt.Errorf("failed to create GenAI client: %w", err)
	}
	defer client.Close()

	model := client.GenerativeModel(modelName)
	// Set temperature to 0.0 for maximum determinism in function calling.
	model.SetTemperature(0.0)

	funcName := "getCurrentWeather"
	funcDecl := &genai.FunctionDeclaration{
		Name:        funcName,
		Description: "Get the current weather in a given location",
		Parameters: &genai.Schema{
			Type: genai.TypeObject,
			Properties: map[string]*genai.Schema{
				"location": {
					Type: genai.TypeString,
					Description: "The location for which to get the weather. " +
						"It can be a city name, a city name and state, or a zip code. " +
						"Examples: 'San Francisco', 'San Francisco, CA', '95616', etc.",
				},
			},
			Required: []string{"location"},
		},
	}
	// Add the weather function to our model toolbox.
	model.Tools = []*genai.Tool{
		{
			FunctionDeclarations: []*genai.FunctionDeclaration{funcDecl},
		},
	}

	prompt := genai.Text("Get weather details in New Delhi and San Francisco?")
	resp, err := model.GenerateContent(ctx, prompt)

	if err != nil {
		return fmt.Errorf("failed to generate content: %w", err)
	}
	if len(resp.Candidates) == 0 {
		return errors.New("got empty response from model")
	} else if len(resp.Candidates[0].FunctionCalls()) == 0 {
		return errors.New("got no function call suggestions from model")
	}

	// In a production environment, consider adding validations for function names and arguments.
	for _, fnCall := range resp.Candidates[0].FunctionCalls() {
		fmt.Fprintf(w, "The model suggests to call the function %q with args: %v\n", fnCall.Name, fnCall.Args)
		// Example response:
		// The model suggests to call the function "getCurrentWeather" with args: map[location:New Delhi]
		// The model suggests to call the function "getCurrentWeather" with args: map[location:San Francisco]
	}

	// Use synthetic data to simulate responses from the external API.
	// In a real application, this would come from an actual weather API.
	mockAPIResp1, err := json.Marshal(map[string]string{
		"location":         "New Delhi",
		"temperature":      "42",
		"temperature_unit": "C",
		"description":      "Hot and humid",
		"humidity":         "65",
	})
	if err != nil {
		return fmt.Errorf("failed to marshal function response to JSON: %w", err)
	}

	mockAPIResp2, err := json.Marshal(map[string]string{
		"location":         "San Francisco",
		"temperature":      "36",
		"temperature_unit": "F",
		"description":      "Cold and cloudy",
		"humidity":         "N/A",
	})
	if err != nil {
		return fmt.Errorf("failed to marshal function response to JSON: %w", err)
	}

	// Note, that the function calls don't have to be chained. We can obtain both responses in parallel
	// and return them to Gemini at once.
	funcResp1 := &genai.FunctionResponse{
		Name: funcName,
		Response: map[string]any{
			"content": mockAPIResp1,
		},
	}
	funcResp2 := &genai.FunctionResponse{
		Name: funcName,
		Response: map[string]any{
			"content": mockAPIResp2,
		},
	}

	// Return both API responses to the model allowing it to complete its response.
	resp, err = model.GenerateContent(ctx, prompt, funcResp1, funcResp2)
	if err != nil {
		return fmt.Errorf("failed to generate content: %w", err)
	}
	if len(resp.Candidates) == 0 || len(resp.Candidates[0].Content.Parts) == 0 {
		return errors.New("got empty response from model")
	}

	fmt.Fprintln(w, resp.Candidates[0].Content.Parts[0])
	// Example response:
	// The weather in New Delhi is hot and humid with a humidity of 65 and a temperature of 42°C. The weather in San Francisco ...

	return nil
}

Python

Avant d'essayer cet exemple, suivez les instructions de configuration pour Python décrites dans le guide de démarrage rapide de Vertex AI à l'aide des bibliothèques clientes. Pour en savoir plus, consultez la documentation de référence de l'API Vertex AI Python.

Pour vous authentifier auprès de Vertex AI, configurez le service Identifiants par défaut de l'application. Pour en savoir plus, consultez Configurer l'authentification pour un environnement de développement local.

import vertexai

from vertexai.generative_models import (
    FunctionDeclaration,
    GenerativeModel,
    Part,
    Tool,
)

# TODO(developer): Update & uncomment below line
# PROJECT_ID = "your-project-id"

# Initialize Vertex AI
vertexai.init(project=PROJECT_ID, location="us-central1")

# Specify a function declaration and parameters for an API request
function_name = "get_current_weather"
get_current_weather_func = FunctionDeclaration(
    name=function_name,
    description="Get the current weather in a given location",
    parameters={
        "type": "object",
        "properties": {
            "location": {
                "type": "string",
                "description": "The location for which to get the weather. \
                  It can be a city name, a city name and state, or a zip code. \
                  Examples: 'San Francisco', 'San Francisco, CA', '95616', etc.",
            },
        },
    },
)

# In this example, we'll use synthetic data to simulate a response payload from an external API
def mock_weather_api_service(location: str) -> str:
    temperature = 25 if location == "San Francisco" else 35
    return f"""{{ "location": "{location}", "temperature": {temperature}, "unit": "C" }}"""

# Define a tool that includes the above function
tools = Tool(
    function_declarations=[get_current_weather_func],
)

# Initialize Gemini model
model = GenerativeModel(
    model_name="gemini-1.5-pro-002",
    tools=[tools],
)

# Start a chat session
chat_session = model.start_chat()
response = chat_session.send_message(
    "Get weather details in New Delhi and San Francisco?"
)

function_calls = response.candidates[0].function_calls
print("Suggested finction calls:\n", function_calls)

if function_calls:
    api_responses = []
    for func in function_calls:
        if func.name == function_name:
            api_responses.append(
                {
                    "content": mock_weather_api_service(
                        location=func.args["location"]
                    )
                }
            )

    # Return the API response to Gemini
    response = chat_session.send_message(
        [
            Part.from_function_response(
                name="get_current_weather",
                response=api_responses[0],
            ),
            Part.from_function_response(
                name="get_current_weather",
                response=api_responses[1],
            ),
        ],
    )

    print(response.text)
    # Example response:
    # The current weather in New Delhi is 35°C. The current weather in San Francisco is 25°C.

Étape suivante

Pour rechercher et filtrer des exemples de code pour d'autres Google Cloud produits, consultez l'explorateur d'exemplesGoogle Cloud .