Skip to content

Commit ef7fa34

Browse files
authored
Merge pull request #3216 from continuedev/nate/quick-edit-8e63
fix lag
2 parents 9acfdcb + fd513ed commit ef7fa34

File tree

5 files changed

+105
-91
lines changed

5 files changed

+105
-91
lines changed

gui/src/context/IdeMessenger.ts

+19-11
Original file line numberDiff line numberDiff line change
@@ -37,14 +37,14 @@ export interface IIdeMessenger {
3737
messageType: T,
3838
data: FromWebviewProtocol[T][0],
3939
cancelToken?: AbortSignal,
40-
): FromWebviewProtocol[T][1];
40+
): AsyncGenerator<unknown[]>;
4141

4242
llmStreamChat(
4343
modelTitle: string,
4444
cancelToken: AbortSignal | undefined,
4545
messages: ChatMessage[],
4646
options?: LLMFullCompletionOptions,
47-
): AsyncGenerator<ChatMessage, PromptLog, unknown>;
47+
): AsyncGenerator<ChatMessage[], PromptLog, unknown>;
4848

4949
ide: IDE;
5050
}
@@ -147,11 +147,20 @@ export class IdeMessenger implements IIdeMessenger {
147147
});
148148
}
149149

150+
/**
151+
* Because of weird type stuff, we're actually yielding an array of the things
152+
* that are streamed. For example, if the return type here says
153+
* AsyncGenerator<ChatMessage>, then it's actually AsyncGenerator<ChatMessage[]>.
154+
* This needs to be handled by the caller.
155+
*
156+
* Using unknown for now to make this more explicit
157+
*/
150158
async *streamRequest<T extends keyof FromWebviewProtocol>(
151159
messageType: T,
152160
data: FromWebviewProtocol[T][0],
153161
cancelToken?: AbortSignal,
154-
): FromWebviewProtocol[T][1] {
162+
): AsyncGenerator<unknown[]> {
163+
// ): FromWebviewProtocol[T][1] {
155164
const messageId = uuidv4();
156165

157166
this.post(messageType, data, messageId);
@@ -181,17 +190,16 @@ export class IdeMessenger implements IIdeMessenger {
181190

182191
while (!done) {
183192
if (buffer.length > index) {
184-
const chunk = buffer[index];
185-
index++;
186-
yield chunk;
193+
const chunks = buffer.slice(index);
194+
index = buffer.length;
195+
yield chunks;
187196
}
188197
await new Promise((resolve) => setTimeout(resolve, 50));
189198
}
190199

191-
while (buffer.length > index) {
192-
const chunk = buffer[index];
193-
index++;
194-
yield chunk;
200+
if (buffer.length > index) {
201+
const chunks = buffer.slice(index);
202+
yield chunks;
195203
}
196204

197205
return returnVal;
@@ -202,7 +210,7 @@ export class IdeMessenger implements IIdeMessenger {
202210
cancelToken: AbortSignal | undefined,
203211
messages: ChatMessage[],
204212
options: LLMFullCompletionOptions = {},
205-
): AsyncGenerator<ChatMessage, PromptLog> {
213+
): AsyncGenerator<ChatMessage[], PromptLog> {
206214
const gen = this.streamRequest(
207215
"llm/streamChat",
208216
{

gui/src/redux/slices/sessionSlice.ts

+68-66
Original file line numberDiff line numberDiff line change
@@ -10,21 +10,21 @@ import {
1010
ApplyState,
1111
ChatHistoryItem,
1212
ChatMessage,
13+
CodeToEdit,
14+
ContextItem,
1315
ContextItemWithId,
1416
FileSymbolMap,
15-
Session,
17+
MessageModes,
1618
PromptLog,
17-
CodeToEdit,
19+
Session,
1820
ToolCall,
19-
ContextItem,
20-
MessageModes,
2121
} from "core";
2222
import { incrementalParseJson } from "core/util/incrementalParseJson";
2323
import { renderChatMessage } from "core/util/messageContent";
2424
import { v4 as uuidv4 } from "uuid";
25+
import { RootState } from "../store";
2526
import { streamResponseThunk } from "../thunks/streamResponse";
2627
import { findCurrentToolCall } from "../util";
27-
import { RootState } from "../store";
2828

2929
// We need this to handle reorderings (e.g. a mid-array deletion) of the messages array.
3030
// The proper fix is adding a UUID to all chat messages, but this is the temp workaround.
@@ -284,72 +284,74 @@ export const sessionSlice = createSlice({
284284
state.streamAborter.abort();
285285
state.streamAborter = new AbortController();
286286
},
287-
streamUpdate: (state, action: PayloadAction<ChatMessage>) => {
287+
streamUpdate: (state, action: PayloadAction<ChatMessage[]>) => {
288288
if (state.history.length) {
289-
const lastMessage = state.history[state.history.length - 1];
290-
291-
if (
292-
action.payload.role &&
293-
(lastMessage.message.role !== action.payload.role ||
294-
// This is when a tool call comes after assistant text
295-
(lastMessage.message.content !== "" &&
296-
action.payload.role === "assistant" &&
297-
action.payload.toolCalls?.length))
298-
) {
299-
const baseHistoryItem = getBaseHistoryItem();
300-
301-
// Create a new message
302-
const historyItem: ChatHistoryItemWithMessageId = {
303-
...baseHistoryItem,
304-
message: { ...baseHistoryItem.message, ...action.payload },
305-
};
306-
307-
if (action.payload.role === "assistant" && action.payload.toolCalls) {
308-
const [_, parsedArgs] = incrementalParseJson(
309-
action.payload.toolCalls[0].function.arguments,
310-
);
311-
historyItem.toolCallState = {
312-
status: "generating",
313-
toolCall: action.payload.toolCalls[0] as ToolCall,
314-
toolCallId: action.payload.toolCalls[0].id,
315-
parsedArgs,
289+
for (const message of action.payload) {
290+
const lastMessage = state.history[state.history.length - 1];
291+
292+
if (
293+
message.role &&
294+
(lastMessage.message.role !== message.role ||
295+
// This is when a tool call comes after assistant text
296+
(lastMessage.message.content !== "" &&
297+
message.role === "assistant" &&
298+
message.toolCalls?.length))
299+
) {
300+
const baseHistoryItem = getBaseHistoryItem();
301+
302+
// Create a new message
303+
const historyItem: ChatHistoryItemWithMessageId = {
304+
...baseHistoryItem,
305+
message: { ...baseHistoryItem.message, ...message },
316306
};
317-
}
318307

319-
state.history.push(historyItem);
320-
} else {
321-
// Add to the existing message
322-
const msg = state.history[state.history.length - 1].message;
323-
if (action.payload.content) {
324-
msg.content += renderChatMessage(action.payload);
325-
} else if (
326-
action.payload.role === "assistant" &&
327-
action.payload.toolCalls &&
328-
msg.role === "assistant"
329-
) {
330-
if (!msg.toolCalls) {
331-
msg.toolCalls = [];
308+
if (message.role === "assistant" && message.toolCalls) {
309+
const [_, parsedArgs] = incrementalParseJson(
310+
message.toolCalls[0].function.arguments,
311+
);
312+
historyItem.toolCallState = {
313+
status: "generating",
314+
toolCall: message.toolCalls[0] as ToolCall,
315+
toolCallId: message.toolCalls[0].id,
316+
parsedArgs,
317+
};
332318
}
333-
action.payload.toolCalls.forEach((toolCall, i) => {
334-
if (msg.toolCalls.length <= i) {
335-
msg.toolCalls.push(toolCall);
336-
} else {
337-
msg.toolCalls[i].function.arguments +=
338-
toolCall.function.arguments;
339-
340-
const [_, parsedArgs] = incrementalParseJson(
341-
msg.toolCalls[i].function.arguments,
342-
);
343-
344-
state.history[
345-
state.history.length - 1
346-
].toolCallState.parsedArgs = parsedArgs;
347-
state.history[
348-
state.history.length - 1
349-
].toolCallState.toolCall.function.arguments +=
350-
toolCall.function.arguments;
319+
320+
state.history.push(historyItem);
321+
} else {
322+
// Add to the existing message
323+
const msg = state.history[state.history.length - 1].message;
324+
if (message.content) {
325+
msg.content += renderChatMessage(message);
326+
} else if (
327+
message.role === "assistant" &&
328+
message.toolCalls &&
329+
msg.role === "assistant"
330+
) {
331+
if (!msg.toolCalls) {
332+
msg.toolCalls = [];
351333
}
352-
});
334+
message.toolCalls.forEach((toolCall, i) => {
335+
if (msg.toolCalls.length <= i) {
336+
msg.toolCalls.push(toolCall);
337+
} else {
338+
msg.toolCalls[i].function.arguments +=
339+
toolCall.function.arguments;
340+
341+
const [_, parsedArgs] = incrementalParseJson(
342+
msg.toolCalls[i].function.arguments,
343+
);
344+
345+
state.history[
346+
state.history.length - 1
347+
].toolCallState.parsedArgs = parsedArgs;
348+
state.history[
349+
state.history.length - 1
350+
].toolCallState.toolCall.function.arguments +=
351+
toolCall.function.arguments;
352+
}
353+
});
354+
}
353355
}
354356
}
355357
}

gui/src/redux/thunks/streamNormalInput.ts

+3-8
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import { createAsyncThunk } from "@reduxjs/toolkit";
22
import { ChatMessage, PromptLog } from "core";
33
import { selectCurrentToolCall } from "../selectors/selectCurrentToolCall";
4+
import { selectDefaultModel } from "../slices/configSlice";
45
import {
56
abortStream,
67
addPromptCompletionPair,
@@ -10,7 +11,6 @@ import {
1011
} from "../slices/sessionSlice";
1112
import { ThunkApiType } from "../store";
1213
import { callTool } from "./callTool";
13-
import { selectDefaultModel } from "../slices/configSlice";
1414

1515
export const streamNormalInput = createAsyncThunk<
1616
void,
@@ -56,14 +56,9 @@ export const streamNormalInput = createAsyncThunk<
5656
break;
5757
}
5858

59-
const update = next.value as ChatMessage;
60-
dispatch(streamUpdate(update));
59+
const updates = next.value as ChatMessage[];
60+
dispatch(streamUpdate(updates));
6161
next = await gen.next();
62-
63-
// There has been lag when streaming tool calls. This is a temporary solution
64-
if (update.role === "assistant" && update.toolCalls) {
65-
await new Promise((resolve) => setTimeout(resolve, 10));
66-
}
6762
}
6863

6964
// Attach prompt log

gui/src/redux/thunks/streamResponseAfterToolCall.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ import { createAsyncThunk } from "@reduxjs/toolkit";
22
import { ChatMessage, ContextItem } from "core";
33
import { constructMessages } from "core/llm/constructMessages";
44
import { renderContextItems } from "core/util/messageContent";
5+
import { selectDefaultModel } from "../slices/configSlice";
56
import {
67
addContextItemsAtIndex,
78
setActive,
@@ -11,7 +12,6 @@ import { ThunkApiType } from "../store";
1112
import { handleErrors } from "./handleErrors";
1213
import { resetStateForNewMessage } from "./resetStateForNewMessage";
1314
import { streamNormalInput } from "./streamNormalInput";
14-
import { selectDefaultModel } from "../slices/configSlice";
1515

1616
export const streamResponseAfterToolCall = createAsyncThunk<
1717
void,
@@ -39,7 +39,7 @@ export const streamResponseAfterToolCall = createAsyncThunk<
3939
toolCallId,
4040
};
4141

42-
dispatch(streamUpdate(newMessage));
42+
dispatch(streamUpdate([newMessage]));
4343
dispatch(
4444
addContextItemsAtIndex({
4545
index: initialHistory.length,

gui/src/redux/thunks/streamSlashCommand.ts

+13-4
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ import {
55
RangeInFile,
66
SlashCommandDescription,
77
} from "core";
8-
import { ThunkApiType } from "../store";
9-
import { abortStream, streamUpdate } from "../slices/sessionSlice";
108
import { selectDefaultModel } from "../slices/configSlice";
9+
import { abortStream, streamUpdate } from "../slices/sessionSlice";
10+
import { ThunkApiType } from "../store";
1111

1212
export const streamSlashCommand = createAsyncThunk<
1313
void,
@@ -62,8 +62,17 @@ export const streamSlashCommand = createAsyncThunk<
6262
dispatch(abortStream());
6363
break;
6464
}
65-
if (typeof update === "string") {
66-
dispatch(streamUpdate(update));
65+
for (const item of update) {
66+
if (typeof item === "string") {
67+
dispatch(
68+
streamUpdate([
69+
{
70+
role: "assistant",
71+
content: item,
72+
},
73+
]),
74+
);
75+
}
6776
}
6877
}
6978
clearInterval(checkActiveInterval);

0 commit comments

Comments
 (0)