Skip to content

Commit e870501

Browse files
authored
feat(logs): Add tool call, agent logs (#24)
1 parent 00070b0 commit e870501

File tree

16 files changed

+1010
-77
lines changed

16 files changed

+1010
-77
lines changed

.gitignore

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
*.so
99
*.tar.gz
1010
/release/
11-
sample-app
1211

1312
# Test binary, built with `go test -c`
1413
*.test
Lines changed: 297 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,297 @@
1+
package main
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"os"
7+
8+
"github.com/sashabaranov/go-openai"
9+
sdk "github.com/traceloop/go-openllmetry/traceloop-sdk"
10+
)
11+
12+
func createJoke(ctx context.Context, workflow *sdk.Workflow, client *openai.Client) (string, error) {
13+
task := workflow.NewTask("joke_creation")
14+
defer task.End()
15+
16+
// Log prompt
17+
prompt := sdk.Prompt{
18+
Vendor: "openai",
19+
Mode: "chat",
20+
Model: "gpt-3.5-turbo",
21+
Messages: []sdk.Message{
22+
{
23+
Index: 0,
24+
Role: "user",
25+
Content: "Tell me a joke about opentelemetry",
26+
},
27+
},
28+
}
29+
30+
llmSpan := task.LogPrompt(prompt)
31+
32+
// Make API call
33+
resp, err := client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{
34+
Model: "gpt-3.5-turbo",
35+
Messages: []openai.ChatCompletionMessage{
36+
{
37+
Role: "user",
38+
Content: "Tell me a joke about opentelemetry",
39+
},
40+
},
41+
})
42+
if err != nil {
43+
return "", fmt.Errorf("CreateChatCompletion error: %w", err)
44+
}
45+
46+
// Log completion
47+
var completionMsgs []sdk.Message
48+
for _, choice := range resp.Choices {
49+
completionMsgs = append(completionMsgs, sdk.Message{
50+
Index: choice.Index,
51+
Content: choice.Message.Content,
52+
Role: choice.Message.Role,
53+
})
54+
}
55+
56+
llmSpan.LogCompletion(ctx, sdk.Completion{
57+
Model: resp.Model,
58+
Messages: completionMsgs,
59+
}, sdk.Usage{
60+
TotalTokens: resp.Usage.TotalTokens,
61+
CompletionTokens: resp.Usage.CompletionTokens,
62+
PromptTokens: resp.Usage.PromptTokens,
63+
})
64+
65+
return resp.Choices[0].Message.Content, nil
66+
}
67+
68+
func translateJokeToPirate(ctx context.Context, workflow *sdk.Workflow, client *openai.Client, joke string) (string, error) {
69+
// Log prompt
70+
piratePrompt := fmt.Sprintf("Translate the below joke to pirate-like english:\n\n%s", joke)
71+
prompt := sdk.Prompt{
72+
Vendor: "openai",
73+
Mode: "chat",
74+
Model: "gpt-3.5-turbo",
75+
Messages: []sdk.Message{
76+
{
77+
Index: 0,
78+
Role: "user",
79+
Content: piratePrompt,
80+
},
81+
},
82+
}
83+
84+
agent := workflow.NewAgent("joke_translation", map[string]string{
85+
"translation_type": "pirate",
86+
})
87+
defer agent.End()
88+
89+
llmSpan := agent.LogPrompt(prompt)
90+
91+
// Make API call
92+
resp, err := client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{
93+
Model: "gpt-3.5-turbo",
94+
Messages: []openai.ChatCompletionMessage{
95+
{
96+
Role: "user",
97+
Content: piratePrompt,
98+
},
99+
},
100+
})
101+
if err != nil {
102+
return "", fmt.Errorf("CreateChatCompletion error: %w", err)
103+
}
104+
105+
// Log completion
106+
var completionMsgs []sdk.Message
107+
for _, choice := range resp.Choices {
108+
completionMsgs = append(completionMsgs, sdk.Message{
109+
Index: choice.Index,
110+
Content: choice.Message.Content,
111+
Role: choice.Message.Role,
112+
})
113+
}
114+
115+
llmSpan.LogCompletion(ctx, sdk.Completion{
116+
Model: resp.Model,
117+
Messages: completionMsgs,
118+
}, sdk.Usage{
119+
TotalTokens: resp.Usage.TotalTokens,
120+
CompletionTokens: resp.Usage.CompletionTokens,
121+
PromptTokens: resp.Usage.PromptTokens,
122+
})
123+
124+
// Call history jokes tool
125+
_, err = historyJokesTool(ctx, agent, client)
126+
if err != nil {
127+
fmt.Printf("Warning: history_jokes_tool error: %v\n", err)
128+
}
129+
130+
return resp.Choices[0].Message.Content, nil
131+
}
132+
133+
func historyJokesTool(ctx context.Context, agent *sdk.Agent, client *openai.Client) (string, error) {
134+
// Log prompt
135+
prompt := sdk.Prompt{
136+
Vendor: "openai",
137+
Mode: "chat",
138+
Model: "gpt-3.5-turbo",
139+
Messages: []sdk.Message{
140+
{
141+
Index: 0,
142+
Role: "user",
143+
Content: "get some history jokes",
144+
},
145+
},
146+
}
147+
148+
tool := agent.NewTool("history_jokes", "function", sdk.ToolFunction{
149+
Name: "history_jokes",
150+
Description: "Get some history jokes",
151+
Parameters: map[string]interface{}{},
152+
}, map[string]string{
153+
"user_id": "user_12345",
154+
})
155+
defer tool.End()
156+
157+
llmSpan := tool.LogPrompt(prompt)
158+
159+
// Make API call
160+
resp, err := client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{
161+
Model: "gpt-3.5-turbo",
162+
Messages: []openai.ChatCompletionMessage{
163+
{
164+
Role: "user",
165+
Content: "get some history jokes",
166+
},
167+
},
168+
})
169+
if err != nil {
170+
return "", fmt.Errorf("CreateChatCompletion error: %w", err)
171+
}
172+
173+
// Log completion
174+
var completionMsgs []sdk.Message
175+
for _, choice := range resp.Choices {
176+
completionMsgs = append(completionMsgs, sdk.Message{
177+
Index: choice.Index,
178+
Content: choice.Message.Content,
179+
Role: choice.Message.Role,
180+
})
181+
}
182+
183+
llmSpan.LogCompletion(ctx, sdk.Completion{
184+
Model: resp.Model,
185+
Messages: completionMsgs,
186+
}, sdk.Usage{
187+
TotalTokens: resp.Usage.TotalTokens,
188+
CompletionTokens: resp.Usage.CompletionTokens,
189+
PromptTokens: resp.Usage.PromptTokens,
190+
})
191+
192+
return resp.Choices[0].Message.Content, nil
193+
}
194+
195+
func generateSignature(ctx context.Context, workflow *sdk.Workflow, client *openai.Client, joke string) (string, error) {
196+
task := workflow.NewTask("signature_generation")
197+
defer task.End()
198+
199+
signaturePrompt := "add a signature to the joke:\n\n" + joke
200+
201+
// Log prompt
202+
prompt := sdk.Prompt{
203+
Vendor: "openai",
204+
Mode: "completion",
205+
Model: "davinci-002",
206+
Messages: []sdk.Message{
207+
{
208+
Index: 0,
209+
Role: "user",
210+
Content: signaturePrompt,
211+
},
212+
},
213+
}
214+
215+
llmSpan := task.LogPrompt(prompt)
216+
217+
// Make API call
218+
resp, err := client.CreateCompletion(ctx, openai.CompletionRequest{
219+
Model: "davinci-002",
220+
Prompt: signaturePrompt,
221+
})
222+
if err != nil {
223+
return "", fmt.Errorf("CreateCompletion error: %w", err)
224+
}
225+
226+
// Log completion
227+
llmSpan.LogCompletion(ctx, sdk.Completion{
228+
Model: resp.Model,
229+
Messages: []sdk.Message{
230+
{
231+
Index: 0,
232+
Role: "assistant",
233+
Content: resp.Choices[0].Text,
234+
},
235+
},
236+
}, sdk.Usage{
237+
TotalTokens: resp.Usage.TotalTokens,
238+
CompletionTokens: resp.Usage.CompletionTokens,
239+
PromptTokens: resp.Usage.PromptTokens,
240+
})
241+
242+
return resp.Choices[0].Text, nil
243+
}
244+
245+
func runJokeWorkflow() {
246+
ctx := context.Background()
247+
248+
// Initialize Traceloop SDK
249+
traceloop, err := sdk.NewClient(ctx, sdk.Config{
250+
APIKey: os.Getenv("TRACELOOP_API_KEY"),
251+
})
252+
if err != nil {
253+
fmt.Printf("NewClient error: %v\n", err)
254+
return
255+
}
256+
defer func() { traceloop.Shutdown(ctx) }()
257+
258+
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
259+
260+
// Create workflow
261+
wf := traceloop.NewWorkflow(ctx, sdk.WorkflowAttributes{
262+
Name: "go-joke_generator",
263+
AssociationProperties: map[string]string{
264+
"user_id": "user_12345",
265+
"chat_id": "chat_1234",
266+
},
267+
})
268+
defer wf.End()
269+
270+
// Execute workflow steps
271+
fmt.Println("Creating joke...")
272+
engJoke, err := createJoke(ctx, wf, client)
273+
if err != nil {
274+
fmt.Printf("Error creating joke: %v\n", err)
275+
return
276+
}
277+
fmt.Printf("\nEnglish joke:\n%s\n\n", engJoke)
278+
279+
fmt.Println("Translating to pirate...")
280+
pirateJoke, err := translateJokeToPirate(ctx, wf, client, engJoke)
281+
if err != nil {
282+
fmt.Printf("Error translating joke: %v\n", err)
283+
return
284+
}
285+
fmt.Printf("\nPirate joke:\n%s\n\n", pirateJoke)
286+
287+
fmt.Println("Generating signature...")
288+
signature, err := generateSignature(ctx, wf, client, pirateJoke)
289+
if err != nil {
290+
fmt.Printf("Error generating signature: %v\n", err)
291+
return
292+
}
293+
294+
// Combine result
295+
result := pirateJoke + "\n\n" + signature
296+
fmt.Printf("\n=== Final Result ===\n%s\n", result)
297+
}

sample-app/main.go

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,17 @@ func main() {
2222
runToolCallingExample()
2323
return
2424
}
25-
25+
26+
if len(os.Args) > 1 && os.Args[1] == "recipe-agent" {
27+
runRecipeAgent()
28+
return
29+
}
30+
31+
if len(os.Args) > 1 && os.Args[1] == "joke-workflow" {
32+
runJokeWorkflow()
33+
return
34+
}
35+
2636
// Default to workflow example using prompt registry
2737
workflowExample()
2838
}
@@ -61,16 +71,17 @@ func workflowExample() {
6171
}
6272

6373
// Log the prompt
64-
llmSpan, err := traceloop.LogPrompt(
74+
workflowName := "example-workflow"
75+
llmSpan := traceloop.LogPrompt(
6576
ctx,
6677
sdk.Prompt{
6778
Vendor: "openai",
6879
Mode: "chat",
6980
Model: request.Model,
7081
Messages: promptMsgs,
7182
},
72-
sdk.WorkflowAttributes{
73-
Name: "example-workflow",
83+
sdk.ContextAttributes{
84+
WorkflowName: &workflowName,
7485
AssociationProperties: map[string]string{
7586
"user_id": "demo-user",
7687
},
@@ -103,18 +114,14 @@ func workflowExample() {
103114
}
104115

105116
// Log the completion
106-
err = llmSpan.LogCompletion(ctx, sdk.Completion{
117+
llmSpan.LogCompletion(ctx, sdk.Completion{
107118
Model: resp.Model,
108119
Messages: completionMsgs,
109120
}, sdk.Usage{
110121
TotalTokens: resp.Usage.TotalTokens,
111122
CompletionTokens: resp.Usage.CompletionTokens,
112123
PromptTokens: resp.Usage.PromptTokens,
113124
})
114-
if err != nil {
115-
fmt.Printf("LogCompletion error: %v\n", err)
116-
return
117-
}
118125

119126
fmt.Println(resp.Choices[0].Message.Content)
120-
}
127+
}

0 commit comments

Comments
 (0)