Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
222 changes: 222 additions & 0 deletions packages/ai/src/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ const Chat = OpenAIOrignal.Chat
const Completions = Chat.Completions
const Responses = OpenAIOrignal.Responses
const Embeddings = OpenAIOrignal.Embeddings
const Audio = OpenAIOrignal.Audio
const Transcriptions = OpenAIOrignal.Audio.Transcriptions

type ChatCompletion = OpenAIOrignal.ChatCompletion
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
Expand All @@ -46,6 +48,7 @@ export class PostHogOpenAI extends OpenAIOrignal {
public chat: WrappedChat
public responses: WrappedResponses
public embeddings: WrappedEmbeddings
public audio: WrappedAudio

constructor(config: MonitoringOpenAIConfig) {
const { posthog, ...openAIConfig } = config
Expand All @@ -54,6 +57,7 @@ export class PostHogOpenAI extends OpenAIOrignal {
this.chat = new WrappedChat(this, this.phClient)
this.responses = new WrappedResponses(this, this.phClient)
this.embeddings = new WrappedEmbeddings(this, this.phClient)
this.audio = new WrappedAudio(this, this.phClient)
}
}

Expand Down Expand Up @@ -651,6 +655,224 @@ export class WrappedEmbeddings extends Embeddings {
}
}

export class WrappedAudio extends Audio {
constructor(parentClient: PostHogOpenAI, phClient: PostHog) {
super(parentClient)
this.transcriptions = new WrappedTranscriptions(parentClient, phClient)
}

public transcriptions: WrappedTranscriptions
}

export class WrappedTranscriptions extends Transcriptions {
private readonly phClient: PostHog
private readonly baseURL: string

constructor(client: OpenAIOrignal, phClient: PostHog) {
super(client)
this.phClient = phClient
this.baseURL = client.baseURL
}

// --- Overload #1: Non-streaming
public create(
body: OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateParamsNonStreaming<'json' | undefined> &
MonitoringParams,
options?: RequestOptions
): APIPromise<OpenAIOrignal.Audio.Transcriptions.Transcription>

// --- Overload #2: Non-streaming
public create(
body: OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateParamsNonStreaming<'verbose_json'> & MonitoringParams,
options?: RequestOptions
): APIPromise<OpenAIOrignal.Audio.Transcriptions.TranscriptionVerbose>

// --- Overload #3: Non-streaming
public create(
body: OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateParamsNonStreaming<'srt' | 'vtt' | 'text'> &
MonitoringParams,
options?: RequestOptions
): APIPromise<string>

// --- Overload #4: Non-streaming
public create(
body: OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateParamsNonStreaming,
options?: RequestOptions
): APIPromise<OpenAIOrignal.Audio.Transcriptions.Transcription>

// --- Overload #5: Streaming
public create(
body: OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateParamsStreaming & MonitoringParams,
options?: RequestOptions
): APIPromise<Stream<OpenAIOrignal.Audio.Transcriptions.TranscriptionStreamEvent>>

// --- Overload #6: Streaming
public create(
body: OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateParamsStreaming & MonitoringParams,
options?: RequestOptions
): APIPromise<
| OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateResponse
| string
| Stream<OpenAIOrignal.Audio.Transcriptions.TranscriptionStreamEvent>
>

// --- Overload #7: Generic base
public create(
body: OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateParams & MonitoringParams,
options?: RequestOptions
): APIPromise<
| OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateResponse
| string
| Stream<OpenAIOrignal.Audio.Transcriptions.TranscriptionStreamEvent>
>

// --- Implementation Signature
public create(
body: OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateParams & MonitoringParams,
options?: RequestOptions
): APIPromise<
| OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateResponse
| string
| Stream<OpenAIOrignal.Audio.Transcriptions.TranscriptionStreamEvent>
> {
const { providerParams: openAIParams, posthogParams } =
extractPosthogParams<OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateParams>(body)
const startTime = Date.now()

const parentPromise = openAIParams.stream
? super.create(openAIParams, options)
: super.create(openAIParams, options)
Comment on lines +742 to +744
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

style: redundant ternary - both branches call the same function

Suggested change
const parentPromise = openAIParams.stream
? super.create(openAIParams, options)
: super.create(openAIParams, options)
const parentPromise = super.create(openAIParams, options)
Prompt To Fix With AI
This is a comment left during a code review.
Path: packages/ai/src/openai/index.ts
Line: 742:744

Comment:
**style:** redundant ternary - both branches call the same function

```suggestion
    const parentPromise = super.create(openAIParams, options)
```

How can I resolve this? If you propose a fix, please make it concise.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is neccessary for TypeScript to figure out which overloaded super.create function to call, and removing this will result in a typescript error:

Argument of type 'TranscriptionCreateParams' is not assignable to parameter of type 'TranscriptionCreateParamsStreaming'.
      Type 'TranscriptionCreateParamsNonStreaming<AudioResponseFormat | undefined>' is not assignable to type 'TranscriptionCreateParamsStreaming'.
        Types of property 'stream' are incompatible.
          Type 'false | null | undefined' is not assignable to type 'true'.
            Type 'undefined' is not assignable to type 'true'.ts(2769)


if (openAIParams.stream) {
return parentPromise.then((value) => {
if ('tee' in value && typeof (value as any).tee === 'function') {
const [stream1, stream2] = (value as any).tee()
;(async () => {
try {
let finalContent: string = ''
let usage: {
inputTokens?: number
outputTokens?: number
} = {
inputTokens: 0,
outputTokens: 0,
}

const doneEvent: OpenAIOrignal.Audio.Transcriptions.TranscriptionTextDoneEvent['type'] =
'transcript.text.done'
for await (const chunk of stream1) {
if (chunk.type === doneEvent && 'text' in chunk && chunk.text && chunk.text.length > 0) {
finalContent = chunk.text
}
if ('usage' in chunk && chunk.usage) {
usage = {
inputTokens: chunk.usage?.type === 'tokens' ? (chunk.usage.input_tokens ?? 0) : 0,
outputTokens: chunk.usage?.type === 'tokens' ? (chunk.usage.output_tokens ?? 0) : 0,
}
}
}

const latency = (Date.now() - startTime) / 1000
const availableTools = extractAvailableToolCalls('openai', openAIParams)
await sendEventToPosthog({
client: this.phClient,
...posthogParams,
model: openAIParams.model,
provider: 'openai',
input: openAIParams.prompt,
output: finalContent,
latency,
baseURL: this.baseURL,
params: body,
httpStatus: 200,
usage,
tools: availableTools,
})
} catch (error: unknown) {
const httpStatus =
error && typeof error === 'object' && 'status' in error
? ((error as { status?: number }).status ?? 500)
: 500

await sendEventToPosthog({
client: this.phClient,
...posthogParams,
model: openAIParams.model,
provider: 'openai',
input: openAIParams.prompt,
output: [],
latency: 0,
baseURL: this.baseURL,
params: body,
httpStatus,
usage: { inputTokens: 0, outputTokens: 0 },
isError: true,
error: JSON.stringify(error),
})
}
})()

return stream2
}
return value
}) as APIPromise<Stream<OpenAIOrignal.Audio.Transcriptions.TranscriptionStreamEvent>>
} else {
const wrappedPromise = parentPromise.then(
async (result) => {
if ('text' in result) {
const latency = (Date.now() - startTime) / 1000
await sendEventToPosthog({
client: this.phClient,
...posthogParams,
model: String(openAIParams.model ?? ''),
provider: 'openai',
input: openAIParams.prompt,
output: result.text,
latency,
baseURL: this.baseURL,
params: body,
httpStatus: 200,
usage: {
inputTokens: result.usage?.type === 'tokens' ? (result.usage.input_tokens ?? 0) : 0,
outputTokens: result.usage?.type === 'tokens' ? (result.usage.output_tokens ?? 0) : 0,
},
})
return result
}
},
Comment on lines +820 to +842
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

logic: function returns undefined when result doesn't have text property (e.g., 'srt', 'vtt', 'text' response formats)

Suggested change
const wrappedPromise = parentPromise.then(
async (result) => {
if ('text' in result) {
const latency = (Date.now() - startTime) / 1000
await sendEventToPosthog({
client: this.phClient,
...posthogParams,
model: String(openAIParams.model ?? ''),
provider: 'openai',
input: openAIParams.prompt,
output: result.text,
latency,
baseURL: this.baseURL,
params: body,
httpStatus: 200,
usage: {
inputTokens: result.usage?.type === 'tokens' ? (result.usage.input_tokens ?? 0) : 0,
outputTokens: result.usage?.type === 'tokens' ? (result.usage.output_tokens ?? 0) : 0,
},
})
return result
}
},
const wrappedPromise = parentPromise.then(
async (result) => {
if ('text' in result) {
const latency = (Date.now() - startTime) / 1000
await sendEventToPosthog({
client: this.phClient,
...posthogParams,
model: String(openAIParams.model ?? ''),
provider: 'openai',
input: openAIParams.prompt,
output: result.text,
latency,
baseURL: this.baseURL,
params: body,
httpStatus: 200,
usage: {
inputTokens: result.usage?.type === 'tokens' ? (result.usage.input_tokens ?? 0) : 0,
outputTokens: result.usage?.type === 'tokens' ? (result.usage.output_tokens ?? 0) : 0,
},
})
}
return result
},
Prompt To Fix With AI
This is a comment left during a code review.
Path: packages/ai/src/openai/index.ts
Line: 820:842

Comment:
**logic:** function returns `undefined` when result doesn't have `text` property (e.g., 'srt', 'vtt', 'text' response formats)

```suggestion
      const wrappedPromise = parentPromise.then(
        async (result) => {
          if ('text' in result) {
            const latency = (Date.now() - startTime) / 1000
            await sendEventToPosthog({
              client: this.phClient,
              ...posthogParams,
              model: String(openAIParams.model ?? ''),
              provider: 'openai',
              input: openAIParams.prompt,
              output: result.text,
              latency,
              baseURL: this.baseURL,
              params: body,
              httpStatus: 200,
              usage: {
                inputTokens: result.usage?.type === 'tokens' ? (result.usage.input_tokens ?? 0) : 0,
                outputTokens: result.usage?.type === 'tokens' ? (result.usage.output_tokens ?? 0) : 0,
              },
            })
          }
          return result
        },
```

How can I resolve this? If you propose a fix, please make it concise.

async (error: unknown) => {
const httpStatus =
error && typeof error === 'object' && 'status' in error
? ((error as { status?: number }).status ?? 500)
: 500

await sendEventToPosthog({
client: this.phClient,
...posthogParams,
model: String(openAIParams.model ?? ''),
provider: 'openai',
input: openAIParams.prompt,
output: [],
latency: 0,
baseURL: this.baseURL,
params: body,
httpStatus,
usage: {
inputTokens: 0,
outputTokens: 0,
},
isError: true,
error: JSON.stringify(error),
})
throw error
}
) as APIPromise<OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateResponse>

return wrappedPromise
}
}
}

export default PostHogOpenAI

export { PostHogOpenAI as OpenAI }
3 changes: 3 additions & 0 deletions packages/ai/src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatComplet
type MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams
type ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams
type EmbeddingCreateParams = OpenAIOrignal.EmbeddingCreateParams
type TranscriptionCreateParams = OpenAIOrignal.Audio.Transcriptions.TranscriptionCreateParams
type AnthropicTool = AnthropicOriginal.Tool

// limit large outputs by truncating to 200kb (approx 200k bytes)
Expand Down Expand Up @@ -77,6 +78,7 @@ export const getModelParams = (
| ResponseCreateParams
| ResponseCreateParamsWithTools
| EmbeddingCreateParams
| TranscriptionCreateParams
) &
MonitoringParams)
| null
Expand Down Expand Up @@ -402,6 +404,7 @@ export type SendEventToPosthogParams = {
| ResponseCreateParams
| ResponseCreateParamsWithTools
| EmbeddingCreateParams
| TranscriptionCreateParams
) &
MonitoringParams
isError?: boolean
Expand Down