Skip to content
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
*.so
*.tar.gz
/release/
sample-app

# Test binary, built with `go test -c`
*.test
Expand Down
300 changes: 300 additions & 0 deletions sample-app/generate_joke_workflow_example.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,300 @@
package main

import (
"context"
"fmt"
"os"

"github.com/sashabaranov/go-openai"
sdk "github.com/traceloop/go-openllmetry/traceloop-sdk"
)

func createJoke(ctx context.Context, workflow *sdk.Workflow, client *openai.Client) (string, error) {
task := workflow.NewTask("joke_creation")
defer task.End()

// Log prompt
prompt := sdk.Prompt{
Vendor: "openai",
Mode: "chat",
Model: "gpt-3.5-turbo",
Messages: []sdk.Message{
{
Index: 0,
Role: "user",
Content: "Tell me a joke about opentelemetry",
},
},
}

llmSpan, err := task.LogPrompt(prompt)
if err != nil {
return "", fmt.Errorf("LogPrompt error: %w", err)
}

// Make API call
resp, err := client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{
Model: "gpt-3.5-turbo",
Messages: []openai.ChatCompletionMessage{
{
Role: "user",
Content: "Tell me a joke about opentelemetry",
},
},
})
if err != nil {
return "", fmt.Errorf("CreateChatCompletion error: %w", err)
}

// Log completion
var completionMsgs []sdk.Message
for _, choice := range resp.Choices {
completionMsgs = append(completionMsgs, sdk.Message{
Index: choice.Index,
Content: choice.Message.Content,
Role: choice.Message.Role,
})
}

llmSpan.LogCompletion(ctx, sdk.Completion{
Model: resp.Model,
Messages: completionMsgs,
}, sdk.Usage{
TotalTokens: resp.Usage.TotalTokens,
CompletionTokens: resp.Usage.CompletionTokens,
PromptTokens: resp.Usage.PromptTokens,
})

return resp.Choices[0].Message.Content, nil
}

func translateJokeToPirate(ctx context.Context, traceloop *sdk.Traceloop, workflow *sdk.Workflow, client *openai.Client, joke string) (string, error) {
// Log prompt
piratePrompt := fmt.Sprintf("Translate the below joke to pirate-like english:\n\n%s", joke)
prompt := sdk.Prompt{
Vendor: "openai",
Mode: "chat",
Model: "gpt-3.5-turbo",
Messages: []sdk.Message{
{
Index: 0,
Role: "user",
Content: piratePrompt,
},
},
}

llmSpan, err := traceloop.LogAgent(ctx, sdk.AgentAttributes{
Name: "joke_translation",
}, prompt, workflow.Attributes)
if err != nil {
return "", fmt.Errorf("LogPrompt error: %w", err)
}

// Make API call
resp, err := client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{
Model: "gpt-3.5-turbo",
Messages: []openai.ChatCompletionMessage{
{
Role: "user",
Content: piratePrompt,
},
},
})
if err != nil {
return "", fmt.Errorf("CreateChatCompletion error: %w", err)
}

// Log completion
var completionMsgs []sdk.Message
for _, choice := range resp.Choices {
completionMsgs = append(completionMsgs, sdk.Message{
Index: choice.Index,
Content: choice.Message.Content,
Role: choice.Message.Role,
})
}

llmSpan.LogCompletion(ctx, sdk.Completion{
Model: resp.Model,
Messages: completionMsgs,
}, sdk.Usage{
TotalTokens: resp.Usage.TotalTokens,
CompletionTokens: resp.Usage.CompletionTokens,
PromptTokens: resp.Usage.PromptTokens,
})

// Call history jokes tool
_, err = historyJokesTool(ctx, traceloop, workflow, client)
if err != nil {
fmt.Printf("Warning: history_jokes_tool error: %v\n", err)
}

return resp.Choices[0].Message.Content, nil
}

func historyJokesTool(ctx context.Context, traceloop *sdk.Traceloop, workflow *sdk.Workflow, client *openai.Client) (string, error) {
// Log prompt
prompt := sdk.Prompt{
Vendor: "openai",
Mode: "chat",
Model: "gpt-3.5-turbo",
Messages: []sdk.Message{
{
Index: 0,
Role: "user",
Content: "get some history jokes",
},
},
}

llmSpan, err := traceloop.LogToolCall(ctx, sdk.ToolCallAttributes{
Name: "history_jokes",
}, prompt, workflow.Attributes)
if err != nil {
return "", fmt.Errorf("LogPrompt error: %w", err)
}

// Make API call
resp, err := client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{
Model: "gpt-3.5-turbo",
Messages: []openai.ChatCompletionMessage{
{
Role: "user",
Content: "get some history jokes",
},
},
})
if err != nil {
return "", fmt.Errorf("CreateChatCompletion error: %w", err)
}

// Log completion
var completionMsgs []sdk.Message
for _, choice := range resp.Choices {
completionMsgs = append(completionMsgs, sdk.Message{
Index: choice.Index,
Content: choice.Message.Content,
Role: choice.Message.Role,
})
}

llmSpan.LogCompletion(ctx, sdk.Completion{
Model: resp.Model,
Messages: completionMsgs,
}, sdk.Usage{
TotalTokens: resp.Usage.TotalTokens,
CompletionTokens: resp.Usage.CompletionTokens,
PromptTokens: resp.Usage.PromptTokens,
})

return resp.Choices[0].Message.Content, nil
}

func generateSignature(ctx context.Context, workflow *sdk.Workflow, client *openai.Client, joke string) (string, error) {
task := workflow.NewTask("signature_generation")
defer task.End()

signaturePrompt := "add a signature to the joke:\n\n" + joke

// Log prompt
prompt := sdk.Prompt{
Vendor: "openai",
Mode: "completion",
Model: "davinci-002",
Messages: []sdk.Message{
{
Index: 0,
Role: "user",
Content: signaturePrompt,
},
},
}

llmSpan, err := task.LogPrompt(prompt)
if err != nil {
return "", fmt.Errorf("LogPrompt error: %w", err)
}

// Make API call
resp, err := client.CreateCompletion(ctx, openai.CompletionRequest{
Model: "davinci-002",
Prompt: signaturePrompt,
})
if err != nil {
return "", fmt.Errorf("CreateCompletion error: %w", err)
}

// Log completion
llmSpan.LogCompletion(ctx, sdk.Completion{
Model: resp.Model,
Messages: []sdk.Message{
{
Index: 0,
Role: "assistant",
Content: resp.Choices[0].Text,
},
},
}, sdk.Usage{
TotalTokens: resp.Usage.TotalTokens,
CompletionTokens: resp.Usage.CompletionTokens,
PromptTokens: resp.Usage.PromptTokens,
})

return resp.Choices[0].Text, nil
}

func runJokeWorkflow() {
ctx := context.Background()

// Initialize Traceloop SDK
traceloop, err := sdk.NewClient(ctx, sdk.Config{
APIKey: os.Getenv("TRACELOOP_API_KEY"),
})
if err != nil {
fmt.Printf("NewClient error: %v\n", err)
return
}
defer func() { traceloop.Shutdown(ctx) }()

// Create OpenAI client
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))

// Create workflow
wf := traceloop.NewWorkflow(ctx, sdk.WorkflowAttributes{
Name: "go-joke_generator",
AssociationProperties: map[string]string{
"user_id": "user_12345",
"chat_id": "chat_1234",
},
})
defer wf.End()

// Execute workflow steps
fmt.Println("Creating joke...")
engJoke, err := createJoke(ctx, wf, client)
if err != nil {
fmt.Printf("Error creating joke: %v\n", err)
return
}
fmt.Printf("\nEnglish joke:\n%s\n\n", engJoke)

fmt.Println("Translating to pirate...")
pirateJoke, err := translateJokeToPirate(ctx, traceloop, wf, client, engJoke)
if err != nil {
fmt.Printf("Error translating joke: %v\n", err)
return
}
fmt.Printf("\nPirate joke:\n%s\n\n", pirateJoke)

fmt.Println("Generating signature...")
signature, err := generateSignature(ctx, wf, client, pirateJoke)
if err != nil {
fmt.Printf("Error generating signature: %v\n", err)
return
}

// Combine result
result := pirateJoke + "\n\n" + signature
fmt.Printf("\n=== Final Result ===\n%s\n", result)
}
3 changes: 2 additions & 1 deletion sample-app/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ func main() {
}

// Default to workflow example using prompt registry
workflowExample()
// workflowExample()
runJokeWorkflow()
}

func workflowExample() {
Expand Down
1 change: 1 addition & 0 deletions semconv-ai/attributes.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ const (
LLMCompletions = attribute.Key("llm.completions")
LLMChatStopSequence = attribute.Key("llm.chat.stop_sequences")
LLMRequestFunctions = attribute.Key("llm.request.functions")
LLMAgentName = attribute.Key("llm.agent.name")

// Vector DB
VectorDBVendor = attribute.Key("vector_db.vendor")
Expand Down
56 changes: 56 additions & 0 deletions traceloop-sdk/sdk.go
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,62 @@ func (llmSpan *LLMSpan) LogCompletion(ctx context.Context, completion Completion
return nil
}

// LogToolCall logs a tool call with the specified name and prompt
func (instance *Traceloop) LogToolCall(ctx context.Context, attrs ToolCallAttributes, prompt Prompt, workflowAttrs WorkflowAttributes) (LLMSpan, error) {
spanName := fmt.Sprintf("%s.tool", attrs.Name)
_, span := instance.getTracer().Start(ctx, spanName)

spanAttrs := []attribute.KeyValue{
semconvai.LLMVendor.String(prompt.Vendor),
semconvai.LLMRequestModel.String(prompt.Model),
semconvai.LLMRequestType.String(prompt.Mode),
semconvai.TraceloopWorkflowName.String(workflowAttrs.Name),
semconvai.TraceloopSpanKind.String("tool"),
semconvai.TraceloopEntityName.String(attrs.Name),
}

// Add association properties if provided
for key, value := range workflowAttrs.AssociationProperties {
spanAttrs = append(spanAttrs, attribute.String("traceloop.association.properties."+key, value))
}

span.SetAttributes(spanAttrs...)
setMessagesAttribute(span, "llm.prompts", prompt.Messages)
setToolsAttribute(span, prompt.Tools)

return LLMSpan{
span: span,
}, nil
}

// LogAgent logs an agent with the specified name and prompt
func (instance *Traceloop) LogAgent(ctx context.Context, attrs AgentAttributes, prompt Prompt, workflowAttrs WorkflowAttributes) (LLMSpan, error) {
spanName := fmt.Sprintf("%s.agent", attrs.Name)
_, span := instance.getTracer().Start(ctx, spanName)

spanAttrs := []attribute.KeyValue{
semconvai.LLMVendor.String(prompt.Vendor),
semconvai.LLMRequestModel.String(prompt.Model),
semconvai.LLMRequestType.String(prompt.Mode),
semconvai.TraceloopWorkflowName.String(workflowAttrs.Name),
semconvai.TraceloopSpanKind.String("agent"),
semconvai.LLMAgentName.String(attrs.Name),
}

// Add association properties if provided
for key, value := range workflowAttrs.AssociationProperties {
spanAttrs = append(spanAttrs, attribute.String("traceloop.association.properties."+key, value))
}

span.SetAttributes(spanAttrs...)
setMessagesAttribute(span, "llm.prompts", prompt.Messages)
setToolsAttribute(span, prompt.Tools)

return LLMSpan{
span: span,
}, nil
}

func (instance *Traceloop) Shutdown(ctx context.Context) {
if instance.tracerProvider != nil {
instance.tracerProvider.Shutdown(ctx)
Expand Down
Loading
Loading