-
-
Notifications
You must be signed in to change notification settings - Fork 643
/
googleai-tool-call-example.go
107 lines (96 loc) · 2.89 KB
/
googleai-tool-call-example.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"os"
"strings"
"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/googleai"
)
func main() {
genaiKey := os.Getenv("GOOGLE_API_KEY")
if genaiKey == "" {
log.Fatal("please set GOOGLE_API_KEY")
}
ctx := context.Background()
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey))
if err != nil {
log.Fatal(err)
}
// Start by sending an initial question about the weather to the model, adding
// "available tools" that include a getCurrentWeather function.
// Thoroughout this sample, messageHistory collects the conversation history
// with the model - this context is needed to ensure tool calling works
// properly.
messageHistory := []llms.MessageContent{
llms.TextParts(llms.ChatMessageTypeHuman, "What is the weather like in Chicago?"),
}
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(availableTools))
if err != nil {
log.Fatal(err)
}
// Translate the model's response into a MessageContent element that can be
// added to messageHistory.
respchoice := resp.Choices[0]
assistantResponse := llms.TextParts(llms.ChatMessageTypeAI, respchoice.Content)
for _, tc := range respchoice.ToolCalls {
assistantResponse.Parts = append(assistantResponse.Parts, tc)
}
messageHistory = append(messageHistory, assistantResponse)
// "Execute" tool calls by calling requested function
for _, tc := range respchoice.ToolCalls {
switch tc.FunctionCall.Name {
case "getCurrentWeather":
var args struct {
Location string `json:"location"`
}
if err := json.Unmarshal([]byte(tc.FunctionCall.Arguments), &args); err != nil {
log.Fatal(err)
}
if strings.Contains(args.Location, "Chicago") {
toolResponse := llms.MessageContent{
Role: llms.ChatMessageTypeTool,
Parts: []llms.ContentPart{
llms.ToolCallResponse{
Name: tc.FunctionCall.Name,
Content: "64 and sunny",
},
},
}
messageHistory = append(messageHistory, toolResponse)
}
default:
log.Fatalf("got unexpected function call: %v", tc.FunctionCall.Name)
}
}
resp, err = llm.GenerateContent(ctx, messageHistory, llms.WithTools(availableTools))
if err != nil {
log.Fatal(err)
}
fmt.Println("Response after tool call:")
b, _ := json.MarshalIndent(resp.Choices[0], " ", " ")
fmt.Println(string(b))
}
// availableTools simulates the tools/functions we're making available for
// the model.
var availableTools = []llms.Tool{
{
Type: "function",
Function: &llms.FunctionDefinition{
Name: "getCurrentWeather",
Description: "Get the current weather in a given location",
Parameters: map[string]any{
"type": "object",
"properties": map[string]any{
"location": map[string]any{
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
},
"required": []string{"location"},
},
},
},
}