fix(chat): prevent React Maximum update depth exceeded during streaming
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
Root cause: Each LLM delta (text/thinking) triggered a synchronous setState via updateMessages → chatStore.setState. With Kimi thinking model emitting many deltas per frame, this caused a React render storm that hit the maximum update depth limit. Fix (two-layer approach): 1. streamStore: Buffer text/thinking deltas locally and flush to store via setTimeout(0), batching multiple deltas per frame 2. chatStore: Microtask batching in injectChatStore.updateMessages to coalesce rapid successive updates Verified: 2-round conversation (4 messages) with Kimi thinking model completes without crash. Previously crashed 100% on 2nd message.
This commit is contained in:
@@ -247,6 +247,41 @@ export const useStreamStore = create<StreamState>()(
|
||||
});
|
||||
set({ isStreaming: true, activeRunId: null });
|
||||
|
||||
// ── Delta buffering ──
|
||||
// Accumulate text/thinking deltas in local buffers and flush to store
|
||||
// at ~60fps intervals. This prevents React "Maximum update depth exceeded"
|
||||
// when the LLM emits many small deltas per frame (e.g. Kimi thinking).
|
||||
let textBuffer = '';
|
||||
let thinkBuffer = '';
|
||||
let flushTimer: ReturnType<typeof setTimeout> | null = null;
|
||||
|
||||
const flushBuffers = () => {
|
||||
flushTimer = null;
|
||||
const text = textBuffer;
|
||||
const think = thinkBuffer;
|
||||
textBuffer = '';
|
||||
thinkBuffer = '';
|
||||
|
||||
if (text || think) {
|
||||
_chat?.updateMessages(msgs =>
|
||||
msgs.map(m => {
|
||||
if (m.id !== assistantId) return m;
|
||||
return {
|
||||
...m,
|
||||
...(text ? { content: m.content + text } : {}),
|
||||
...(think ? { thinkingContent: (m.thinkingContent || '') + think } : {}),
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const scheduleFlush = () => {
|
||||
if (flushTimer === null) {
|
||||
flushTimer = setTimeout(flushBuffers, 0);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const client = getClient();
|
||||
const connectionState = useConnectionStore.getState().connectionState;
|
||||
@@ -265,22 +300,12 @@ export const useStreamStore = create<StreamState>()(
|
||||
content,
|
||||
{
|
||||
onDelta: (delta: string) => {
|
||||
_chat?.updateMessages(msgs =>
|
||||
msgs.map(m =>
|
||||
m.id === assistantId
|
||||
? { ...m, content: m.content + delta }
|
||||
: m
|
||||
)
|
||||
);
|
||||
textBuffer += delta;
|
||||
scheduleFlush();
|
||||
},
|
||||
onThinkingDelta: (delta: string) => {
|
||||
_chat?.updateMessages(msgs =>
|
||||
msgs.map(m =>
|
||||
m.id === assistantId
|
||||
? { ...m, thinkingContent: (m.thinkingContent || '') + delta }
|
||||
: m
|
||||
)
|
||||
);
|
||||
thinkBuffer += delta;
|
||||
scheduleFlush();
|
||||
},
|
||||
onTool: (tool: string, input: string, output: string) => {
|
||||
if (output) {
|
||||
@@ -412,6 +437,13 @@ export const useStreamStore = create<StreamState>()(
|
||||
);
|
||||
},
|
||||
onComplete: (inputTokens?: number, outputTokens?: number) => {
|
||||
// Flush any remaining buffered deltas before finalizing
|
||||
if (flushTimer !== null) {
|
||||
clearTimeout(flushTimer);
|
||||
flushTimer = null;
|
||||
}
|
||||
flushBuffers();
|
||||
|
||||
const currentMsgs = _chat?.getMessages();
|
||||
|
||||
if (currentMsgs) {
|
||||
@@ -507,6 +539,14 @@ export const useStreamStore = create<StreamState>()(
|
||||
)
|
||||
);
|
||||
} catch (err: unknown) {
|
||||
// Flush remaining buffers on error
|
||||
if (flushTimer !== null) {
|
||||
clearTimeout(flushTimer);
|
||||
flushTimer = null;
|
||||
}
|
||||
textBuffer = '';
|
||||
thinkBuffer = '';
|
||||
|
||||
const errorMessage = err instanceof Error ? err.message : '无法连接 Gateway';
|
||||
_chat?.updateMessages(msgs =>
|
||||
msgs.map(m =>
|
||||
|
||||
Reference in New Issue
Block a user