fix(chat): prevent React Maximum update depth exceeded during streaming
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

Root cause: Each LLM delta (text/thinking) triggered a synchronous
setState via updateMessages → chatStore.setState. With Kimi thinking
model emitting many deltas per frame, this caused a React render storm
that hit the maximum update depth limit.

Fix (two-layer approach):
1. streamStore: Buffer text/thinking deltas locally and flush to store
   via setTimeout(0), batching multiple deltas per frame
2. chatStore: Microtask batching in injectChatStore.updateMessages to
   coalesce rapid successive updates

Verified: 2-round conversation (4 messages) with Kimi thinking model
completes without crash. Previously crashed 100% on 2nd message.
This commit is contained in:
iven
2026-04-08 14:47:43 +08:00
parent ade534d1ce
commit 0b0ab00b9c
2 changed files with 96 additions and 20 deletions

View File

@@ -247,6 +247,41 @@ export const useStreamStore = create<StreamState>()(
});
set({ isStreaming: true, activeRunId: null });
// ── Delta buffering ──
// Accumulate text/thinking deltas in local buffers and flush to store
// at ~60fps intervals. This prevents React "Maximum update depth exceeded"
// when the LLM emits many small deltas per frame (e.g. Kimi thinking).
let textBuffer = '';
let thinkBuffer = '';
let flushTimer: ReturnType<typeof setTimeout> | null = null;
const flushBuffers = () => {
flushTimer = null;
const text = textBuffer;
const think = thinkBuffer;
textBuffer = '';
thinkBuffer = '';
if (text || think) {
_chat?.updateMessages(msgs =>
msgs.map(m => {
if (m.id !== assistantId) return m;
return {
...m,
...(text ? { content: m.content + text } : {}),
...(think ? { thinkingContent: (m.thinkingContent || '') + think } : {}),
};
})
);
}
};
const scheduleFlush = () => {
if (flushTimer === null) {
flushTimer = setTimeout(flushBuffers, 0);
}
};
try {
const client = getClient();
const connectionState = useConnectionStore.getState().connectionState;
@@ -265,22 +300,12 @@ export const useStreamStore = create<StreamState>()(
content,
{
onDelta: (delta: string) => {
_chat?.updateMessages(msgs =>
msgs.map(m =>
m.id === assistantId
? { ...m, content: m.content + delta }
: m
)
);
textBuffer += delta;
scheduleFlush();
},
onThinkingDelta: (delta: string) => {
_chat?.updateMessages(msgs =>
msgs.map(m =>
m.id === assistantId
? { ...m, thinkingContent: (m.thinkingContent || '') + delta }
: m
)
);
thinkBuffer += delta;
scheduleFlush();
},
onTool: (tool: string, input: string, output: string) => {
if (output) {
@@ -412,6 +437,13 @@ export const useStreamStore = create<StreamState>()(
);
},
onComplete: (inputTokens?: number, outputTokens?: number) => {
// Flush any remaining buffered deltas before finalizing
if (flushTimer !== null) {
clearTimeout(flushTimer);
flushTimer = null;
}
flushBuffers();
const currentMsgs = _chat?.getMessages();
if (currentMsgs) {
@@ -507,6 +539,14 @@ export const useStreamStore = create<StreamState>()(
)
);
} catch (err: unknown) {
// Flush remaining buffers on error
if (flushTimer !== null) {
clearTimeout(flushTimer);
flushTimer = null;
}
textBuffer = '';
thinkBuffer = '';
const errorMessage = err instanceof Error ? err.message : '无法连接 Gateway';
_chat?.updateMessages(msgs =>
msgs.map(m =>

View File

@@ -306,15 +306,51 @@ setMessageStoreChatStore({
});
// 2. Inject chatStore into streamStore for message mutations
injectChatStore({
addMessage: (msg) => useChatStore.getState().addMessage(msg as Message),
updateMessages: (updater) => {
// Uses microtask batching to prevent React "Maximum update depth exceeded" when
// the LLM emits many deltas per frame (e.g. Kimi thinking model).
// Non-delta updates (onComplete etc.) flush immediately via _flushPending().
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let _pendingUpdater: ((msgs: any[]) => any[]) | null = null;
let _microtaskScheduled = false;
function _flushPending() {
_microtaskScheduled = false;
const batchedUpdater = _pendingUpdater;
_pendingUpdater = null;
if (batchedUpdater) {
const msgs = useChatStore.getState().messages as unknown[];
const updated = updater(msgs as Parameters<typeof updater>[0]);
const updated = batchedUpdater(msgs);
useChatStore.setState({ messages: updated as Message[] });
}
}
injectChatStore({
addMessage: (msg) => {
// addMessage must be synchronous for immediate visibility
_flushPending(); // flush any pending batched updates first
useChatStore.getState().addMessage(msg as Message);
},
updateMessages: (updater) => {
// Accumulate updaters — only the latest state matters
const prevUpdater = _pendingUpdater;
_pendingUpdater = prevUpdater
? (msgs: unknown[]) => updater(prevUpdater(msgs) as Parameters<typeof updater>[0])
: updater;
if (!_microtaskScheduled) {
_microtaskScheduled = true;
void Promise.resolve().then(_flushPending);
}
},
getMessages: () => {
// Flush pending updates before reading to ensure consistency
_flushPending();
return useChatStore.getState().messages;
},
setChatStoreState: (partial) => {
_flushPending();
useChatStore.setState(partial as Partial<ChatState>);
},
getMessages: () => useChatStore.getState().messages,
setChatStoreState: (partial) => useChatStore.setState(partial as Partial<ChatState>),
});
// 3. Sync streamStore state to chatStore mirrors