Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .changeset/three-loops-occur.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
'posthog-node': minor
'@posthog/ai': minor
'@posthog/core': patch
---

track LLMA trace_id on exceptions and exception_id on traces
1 change: 1 addition & 0 deletions packages/ai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
"@anthropic-ai/sdk": "^0.67.0",
"@google/genai": "^1.29.0",
"@langchain/core": "^1.0.0",
"@posthog/core": "workspace:*",
"ai": "^5.0.87",
"langchain": "^1.0.0",
"openai": "^6.8.1",
Expand Down
12 changes: 5 additions & 7 deletions packages/ai/src/anthropic/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import {
sendEventToPosthog,
extractAvailableToolCalls,
extractPosthogParams,
sendEventWithErrorToPosthog,
} from '../utils'
import type { FormattedContentItem, FormattedTextContent, FormattedFunctionCall, FormattedMessage } from '../types'

Expand Down Expand Up @@ -224,9 +225,8 @@ export class WrappedMessages extends AnthropicOriginal.Messages {
usage,
tools: availableTools,
})
} catch (error: any) {
// error handling
await sendEventToPosthog({
} catch (error: unknown) {
const enrichedError = await sendEventWithErrorToPosthog({
client: this.phClient,
...posthogParams,
model: anthropicParams.model,
Expand All @@ -236,14 +236,13 @@ export class WrappedMessages extends AnthropicOriginal.Messages {
latency: 0,
baseURL: this.baseURL,
params: body,
httpStatus: error?.status ? error.status : 500,
usage: {
inputTokens: 0,
outputTokens: 0,
},
isError: true,
error: JSON.stringify(error),
error: error,
})
throw enrichedError
}
})()

Expand Down Expand Up @@ -299,7 +298,6 @@ export class WrappedMessages extends AnthropicOriginal.Messages {
inputTokens: 0,
outputTokens: 0,
},
isError: true,
error: JSON.stringify(error),
})
throw error
Expand Down
17 changes: 7 additions & 10 deletions packages/ai/src/gemini/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {
formatResponseGemini,
extractPosthogParams,
toContentString,
sendEventWithErrorToPosthog,
} from '../utils'
import { sanitizeGemini } from '../sanitization'
import type { TokenUsage, FormattedContent, FormattedContentItem, FormattedMessage } from '../types'
Expand Down Expand Up @@ -86,7 +87,7 @@ export class WrappedModels {
return response
} catch (error: unknown) {
const latency = (Date.now() - startTime) / 1000
await sendEventToPosthog({
const enrichedError = await sendEventWithErrorToPosthog({
client: this.phClient,
...posthogParams,
model: geminiParams.model,
Expand All @@ -96,15 +97,13 @@ export class WrappedModels {
latency,
baseURL: 'https://generativelanguage.googleapis.com',
params: params as GenerateContentParameters & MonitoringParams,
httpStatus: (error as { status?: number })?.status ?? 500,
usage: {
inputTokens: 0,
outputTokens: 0,
},
isError: true,
error: JSON.stringify(error),
error: error,
})
throw error
throw enrichedError
}
}

Expand Down Expand Up @@ -212,7 +211,7 @@ export class WrappedModels {
})
} catch (error: unknown) {
const latency = (Date.now() - startTime) / 1000
await sendEventToPosthog({
const enrichedError = await sendEventWithErrorToPosthog({
client: this.phClient,
...posthogParams,
model: geminiParams.model,
Expand All @@ -222,15 +221,13 @@ export class WrappedModels {
latency,
baseURL: 'https://generativelanguage.googleapis.com',
params: params as GenerateContentParameters & MonitoringParams,
httpStatus: (error as { status?: number })?.status ?? 500,
usage: {
inputTokens: 0,
outputTokens: 0,
},
isError: true,
error: JSON.stringify(error),
error: error,
})
throw error
throw enrichedError
}
}

Expand Down
37 changes: 14 additions & 23 deletions packages/ai/src/openai/azure.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
import OpenAIOrignal, { AzureOpenAI } from 'openai'
import { PostHog } from 'posthog-node'
import { AIEvent, formatResponseOpenAI, MonitoringParams, sendEventToPosthog, withPrivacyMode } from '../utils'
import {
AIEvent,
formatResponseOpenAI,
MonitoringParams,
sendEventToPosthog,
sendEventWithErrorToPosthog,
withPrivacyMode,
} from '../utils'
import type { APIPromise } from 'openai'
import type { Stream } from 'openai/streaming'
import type { ParsedResponse } from 'openai/resources/responses/responses'
Expand Down Expand Up @@ -220,12 +227,7 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
usage,
})
} catch (error: unknown) {
const httpStatus =
error && typeof error === 'object' && 'status' in error
? ((error as { status?: number }).status ?? 500)
: 500

await sendEventToPosthog({
const enrichedError = await sendEventWithErrorToPosthog({
client: this.phClient,
...posthogParams,
model: openAIParams.model,
Expand All @@ -235,11 +237,10 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
latency: 0,
baseURL: this.baseURL,
params: body,
httpStatus,
usage: { inputTokens: 0, outputTokens: 0 },
isError: true,
error: JSON.stringify(error),
error: error,
})
throw enrichedError
}
})()

Expand Down Expand Up @@ -295,7 +296,6 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
inputTokens: 0,
outputTokens: 0,
},
isError: true,
error: JSON.stringify(error),
})
throw error
Expand Down Expand Up @@ -397,12 +397,7 @@ export class WrappedResponses extends AzureOpenAI.Responses {
usage,
})
} catch (error: unknown) {
const httpStatus =
error && typeof error === 'object' && 'status' in error
? ((error as { status?: number }).status ?? 500)
: 500

await sendEventToPosthog({
const enrichedError = await sendEventWithErrorToPosthog({
client: this.phClient,
...posthogParams,
//@ts-expect-error
Expand All @@ -413,11 +408,10 @@ export class WrappedResponses extends AzureOpenAI.Responses {
latency: 0,
baseURL: this.baseURL,
params: body,
httpStatus,
usage: { inputTokens: 0, outputTokens: 0 },
isError: true,
error: JSON.stringify(error),
error: error,
})
throw enrichedError
}
})()

Expand Down Expand Up @@ -474,7 +468,6 @@ export class WrappedResponses extends AzureOpenAI.Responses {
inputTokens: 0,
outputTokens: 0,
},
isError: true,
error: JSON.stringify(error),
})
throw error
Expand Down Expand Up @@ -533,7 +526,6 @@ export class WrappedResponses extends AzureOpenAI.Responses {
inputTokens: 0,
outputTokens: 0,
},
isError: true,
error: JSON.stringify(error),
})
throw error
Expand Down Expand Up @@ -602,7 +594,6 @@ export class WrappedEmbeddings extends AzureOpenAI.Embeddings {
usage: {
inputTokens: 0,
},
isError: true,
error: JSON.stringify(error),
})
throw error
Expand Down
Loading
Loading