Skip to content

Commit af745f8

Browse files
committed
🤖 fix: emit stream-error for pre-stream API failures
When sendMessage failed before the stream started (e.g., invalid model, API key missing), the error was only shown as a toast which could be easily missed. The user would see their message in the chat but no response, with no indication of what went wrong. Now, when streamWithHistory fails, we emit a stream-error chat event so the error is visible directly in the conversation. This provides clear feedback when: - Model validation fails (invalid format, non-existent model) - API key is not configured - Provider is not supported - Any other pre-stream error The toast is still shown for immediate feedback, but the stream-error ensures the error is also visible in the chat history. _Generated with mux_
1 parent b2e8690 commit af745f8

File tree

2 files changed

+65
-6
lines changed

2 files changed

+65
-6
lines changed

src/node/services/agentSession.ts

Lines changed: 41 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,10 @@ import type {
1818
ImagePart,
1919
} from "@/common/orpc/types";
2020
import type { SendMessageError } from "@/common/types/errors";
21-
import { createUnknownSendMessageError } from "@/node/services/utils/sendMessageError";
21+
import {
22+
createUnknownSendMessageError,
23+
formatSendMessageError,
24+
} from "@/node/services/utils/sendMessageError";
2225
import type { Result } from "@/common/types/result";
2326
import { Ok, Err } from "@/common/types/result";
2427
import { enforceThinkingPolicy } from "@/browser/utils/thinking/policy";
@@ -427,9 +430,11 @@ export class AgentSession {
427430
}
428431

429432
if (!options?.model || options.model.trim().length === 0) {
430-
return Err(
431-
createUnknownSendMessageError("No model specified. Please select a model using /model.")
433+
const error = createUnknownSendMessageError(
434+
"No model specified. Please select a model using /model."
432435
);
436+
this.emitStreamError(error);
437+
return Err(error);
433438
}
434439

435440
return this.streamWithHistory(options.model, options);
@@ -483,12 +488,16 @@ export class AgentSession {
483488
): Promise<Result<void, SendMessageError>> {
484489
const commitResult = await this.partialService.commitToHistory(this.workspaceId);
485490
if (!commitResult.success) {
486-
return Err(createUnknownSendMessageError(commitResult.error));
491+
const error = createUnknownSendMessageError(commitResult.error);
492+
this.emitStreamError(error);
493+
return Err(error);
487494
}
488495

489496
const historyResult = await this.historyService.getHistory(this.workspaceId);
490497
if (!historyResult.success) {
491-
return Err(createUnknownSendMessageError(historyResult.error));
498+
const error = createUnknownSendMessageError(historyResult.error);
499+
this.emitStreamError(error);
500+
return Err(error);
492501
}
493502

494503
// Enforce thinking policy for the specified model (single source of truth)
@@ -497,7 +506,7 @@ export class AgentSession {
497506
? enforceThinkingPolicy(modelString, options.thinkingLevel)
498507
: undefined;
499508

500-
return this.aiService.streamMessage(
509+
const result = await this.aiService.streamMessage(
501510
historyResult.data,
502511
this.workspaceId,
503512
modelString,
@@ -509,6 +518,32 @@ export class AgentSession {
509518
options?.providerOptions,
510519
options?.mode
511520
);
521+
522+
// If stream failed to start, emit a stream-error so the user sees the error in the chat UI
523+
// (not just a toast which might be missed). The error is already displayed in the chat via
524+
// stream-error handling, which shows up as an error message in the conversation.
525+
if (!result.success) {
526+
this.emitStreamError(result.error);
527+
}
528+
529+
return result;
530+
}
531+
532+
/**
533+
* Emit a stream-error chat event for pre-stream failures.
534+
* This ensures errors that occur before stream-start are visible in the chat UI,
535+
* not just as a toast that might be dismissed or missed.
536+
*/
537+
private emitStreamError(error: SendMessageError): void {
538+
const { message, errorType } = formatSendMessageError(error);
539+
const streamError: StreamErrorMessage = {
540+
type: "stream-error",
541+
// Use a synthetic messageId since no assistant message was created
542+
messageId: `error-${Date.now()}-${Math.random().toString(36).substring(2, 11)}`,
543+
error: message,
544+
errorType,
545+
};
546+
this.emitChatEvent(streamError);
512547
}
513548

514549
private attachAiListeners(): void {

tests/ipc/sendMessage.errors.test.ts

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,30 @@ describeIntegration("sendMessage error handling tests", () => {
9696
15000
9797
);
9898

99+
test.concurrent(
100+
"should emit stream-error when model validation fails early",
101+
async () => {
102+
await withSharedWorkspace("openai", async ({ env, workspaceId, collector }) => {
103+
// Send a message with an invalid model format (causes early validation failure)
104+
const result = await sendMessage(env, workspaceId, "Hello", {
105+
model: "invalid-model-without-provider",
106+
});
107+
108+
// IPC call fails immediately (pre-stream validation error)
109+
expect(result.success).toBe(false);
110+
111+
// Should still emit stream-error so it shows in the chat UI
112+
// This is important because the user message is displayed before stream starts
113+
const errorEvent = await collector.waitForEvent("stream-error", 3000);
114+
expect(errorEvent).toBeDefined();
115+
if (errorEvent?.type === "stream-error") {
116+
expect(errorEvent.error).toBeDefined();
117+
}
118+
});
119+
},
120+
15000
121+
);
122+
99123
test.concurrent(
100124
"should fail with non-existent model",
101125
async () => {

0 commit comments

Comments
 (0)