Files
zclaw_openfang/desktop/src/store/chatStore.ts
iven b56d1a4c34
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
feat(chat): LLM 动态对话建议 — 替换硬编码关键词匹配
AI 回复结束后,将最近对话发给 LLM 生成 3 个上下文相关的后续问题,
替换原有的"继续深入分析"等泛泛默认建议。

变更:
- llm-service.ts: 添加 suggestions 提示模板 + llmSuggest() 辅助函数
- streamStore.ts: SSE 流式请求 via SaaS relay,response.text() 一次性
  读取避免 Tauri WebView2 ReadableStream 兼容问题,失败降级到关键词
- chatStore.ts: suggestionsLoading 状态镜像
- SuggestionChips.tsx: loading 骨架动画
- ChatArea.tsx: 传递 loading prop
2026-04-23 11:41:50 +08:00

409 lines
13 KiB
TypeScript

import { create } from 'zustand';
import { persist } from 'zustand/middleware';
import type { ChatModeType, ChatModeConfig, Subtask } from '../components/ai';
import type { ToolCallStep } from '../components/ai';
import {
useConversationStore,
resolveGatewayAgentId,
toChatAgent,
DEFAULT_AGENT,
type Agent,
type AgentProfileLike,
type Conversation,
} from './chat/conversationStore';
import { useMessageStore, setMessageStoreChatStore } from './chat/messageStore';
import { useStreamStore, injectChatStore } from './chat/streamStore';
// ---------------------------------------------------------------------------
// Re-export for backward compatibility
// ---------------------------------------------------------------------------
export type { Agent, AgentProfileLike, Conversation };
export { toChatAgent, DEFAULT_AGENT, resolveGatewayAgentId };
export { useConversationStore } from './chat/conversationStore';
export { useMessageStore } from './chat/messageStore';
export { useStreamStore } from './chat/streamStore';
// ---------------------------------------------------------------------------
// Message types
// ---------------------------------------------------------------------------
export interface MessageFile {
name: string;
path?: string;
size?: number;
type?: string;
}
export interface CodeBlock {
language?: string;
filename?: string;
content?: string;
}
export interface Message {
id: string;
role: 'user' | 'assistant' | 'tool' | 'hand' | 'workflow' | 'system';
content: string;
timestamp: Date;
runId?: string;
streaming?: boolean;
toolName?: string;
toolInput?: string;
toolOutput?: string;
error?: string;
handName?: string;
handStatus?: string;
handResult?: unknown;
workflowId?: string;
workflowStep?: string;
workflowStatus?: string;
workflowResult?: unknown;
files?: MessageFile[];
codeBlocks?: CodeBlock[];
thinkingContent?: string;
subtasks?: Subtask[];
toolSteps?: ToolCallStep[];
optimistic?: boolean;
}
// ---------------------------------------------------------------------------
// ChatState — messages + facade delegation
// ---------------------------------------------------------------------------
interface ChatState {
messages: Message[];
isStreaming: boolean;
isLoading: boolean;
totalInputTokens: number;
totalOutputTokens: number;
chatMode: ChatModeType;
suggestions: string[];
suggestionsLoading: boolean;
addMessage: (message: Message) => void;
updateMessage: (id: string, updates: Partial<Message>) => void;
setIsLoading: (loading: boolean) => void;
setCurrentAgent: (agent: Agent) => void;
syncAgents: (profiles: AgentProfileLike[]) => void;
setCurrentModel: (model: string) => void;
sendMessage: (content: string) => Promise<void>;
initStreamListener: () => () => void;
cancelStream: () => void;
newConversation: () => void;
switchConversation: (id: string) => void;
deleteConversation: (id: string) => void;
addTokenUsage: (inputTokens: number, outputTokens: number) => void;
getTotalTokens: () => { input: number; output: number; total: number };
searchSkills: (query: string) => { results: Array<{ id: string; name: string; description: string }>; totalAvailable: number };
setChatMode: (mode: ChatModeType) => void;
getChatModeConfig: () => ChatModeConfig;
setSuggestions: (suggestions: string[]) => void;
addSubtask: (messageId: string, task: Subtask) => void;
updateSubtask: (messageId: string, taskId: string, updates: Partial<Subtask>) => void;
}
export const useChatStore = create<ChatState>()(
persist(
(set, get) => ({
messages: [],
// Mirrors from streamStore for backward compat selectors
isStreaming: false,
isLoading: false,
chatMode: 'thinking' as ChatModeType,
suggestions: [],
suggestionsLoading: false,
totalInputTokens: 0,
totalOutputTokens: 0,
addMessage: (message: Message) =>
set((state) => ({ messages: [...state.messages, message] })),
updateMessage: (id, updates) =>
set((state) => ({
messages: state.messages.map((m) =>
m.id === id ? { ...m, ...updates } : m
),
})),
setIsLoading: (loading) => set({ isLoading: loading }),
// ── Facade: conversation coordination ──
setCurrentAgent: (agent: Agent) => {
const messages = get().messages;
const result = useConversationStore.getState().setCurrentAgent(agent, messages);
set({
messages: result.messages as Message[],
isStreaming: false,
});
},
syncAgents: (profiles: AgentProfileLike[]) => {
useConversationStore.getState().syncAgents(profiles);
const convStore = useConversationStore.getState();
let messages = get().messages;
if (messages.length === 0 && convStore.currentConversationId && convStore.conversations.length > 0) {
const conv = convStore.conversations.find(c => c.id === convStore.currentConversationId);
if (conv && conv.messages.length > 0) {
messages = conv.messages.map(m => ({ ...m })) as Message[];
}
}
set({ messages });
},
setCurrentModel: (model: string) => {
useConversationStore.getState().setCurrentModel(model);
},
newConversation: () => {
if (get().isStreaming) {
useStreamStore.getState().cancelStream();
}
const messages = get().messages;
useConversationStore.getState().newConversation(messages);
set({ messages: [], isStreaming: false });
},
switchConversation: (id: string) => {
if (get().isStreaming) {
useStreamStore.getState().cancelStream();
}
const messages = get().messages;
const result = useConversationStore.getState().switchConversation(id, messages);
if (result) {
set({ messages: result.messages as Message[], isStreaming: false });
}
},
deleteConversation: (id: string) => {
const convStore = useConversationStore.getState();
const result = convStore.deleteConversation(id, convStore.currentConversationId);
if (result.resetMessages) {
set({ messages: [], isStreaming: false });
}
},
// ── Token tracking — delegated to messageStore ──
addTokenUsage: (inputTokens: number, outputTokens: number) => {
useMessageStore.getState().addTokenUsage(inputTokens, outputTokens);
set((state) => ({
totalInputTokens: state.totalInputTokens + inputTokens,
totalOutputTokens: state.totalOutputTokens + outputTokens,
}));
},
getTotalTokens: () => useMessageStore.getState().getTotalTokens(),
// ── Streaming — delegated to streamStore ──
sendMessage: (content: string) => useStreamStore.getState().sendMessage(content),
initStreamListener: () => useStreamStore.getState().initStreamListener(),
cancelStream: () => useStreamStore.getState().cancelStream(),
searchSkills: (query: string) => useStreamStore.getState().searchSkills(query),
setChatMode: (mode: ChatModeType) => {
useStreamStore.getState().setChatMode(mode);
set({ chatMode: mode });
},
getChatModeConfig: () => useStreamStore.getState().getChatModeConfig(),
setSuggestions: (suggestions: string[]) => {
useStreamStore.getState().setSuggestions(suggestions);
set({ suggestions });
},
// ── Subtask mutations (message-level) ──
addSubtask: (messageId: string, task: Subtask) =>
set((state) => ({
messages: state.messages.map((m) =>
m.id === messageId
? { ...m, subtasks: [...(m.subtasks || []), task] }
: m
),
})),
updateSubtask: (messageId: string, taskId: string, updates: Partial<Subtask>) =>
set((state) => ({
messages: state.messages.map((m) =>
m.id === messageId
? {
...m,
subtasks: (m.subtasks || []).map((t) =>
t.id === taskId ? { ...t, ...updates } : t
),
}
: m
),
})),
}),
{
name: 'zclaw-chat-storage',
partialize: (_state) => ({
// chatMode is persisted in streamStore — nothing else to persist here.
// Keeping the persist wrapper for onRehydrateStorage lifecycle.
}),
onRehydrateStorage: () => {
// Wait for conversationStore to finish IndexedDB rehydration
// before syncing messages. IndexedDB is async and won't be ready
// in a fixed setTimeout.
let done = false;
function syncMessages() {
if (done) return;
done = true;
const convStore = useConversationStore.getState();
if (convStore.currentConversationId && convStore.conversations.length > 0) {
const conv = convStore.conversations.find(c => c.id === convStore.currentConversationId);
if (conv && conv.messages.length > 0) {
useChatStore.setState({
messages: conv.messages.map(m => ({ ...m })) as Message[],
});
}
}
}
// If conversationStore already hydrated (fast path), sync immediately
if (useConversationStore.persist.hasHydrated()) {
syncMessages();
return;
}
// Otherwise subscribe and wait for hydration to complete
const unsub = useConversationStore.subscribe(() => {
if (useConversationStore.persist.hasHydrated()) {
unsub();
syncMessages();
}
});
// Safety timeout: if IDB is broken/slow, give up after 5s
setTimeout(() => {
if (!done) {
unsub();
syncMessages();
}
}, 5000);
// NOTE: Do NOT return a cleanup function here.
// chatStore rehydrates from localStorage (fast), and the returned
// function fires immediately — before conversationStore finishes
// its slow IndexedDB rehydration. Returning cleanup would tear down
// the subscription prematurely.
},
},
),
);
// ---------------------------------------------------------------------------
// Cross-store wiring
// ---------------------------------------------------------------------------
// 1. Inject chatStore into messageStore for subtask delegation
setMessageStoreChatStore({
getState: () => ({
addSubtask: useChatStore.getState().addSubtask,
updateSubtask: useChatStore.getState().updateSubtask,
}),
});
// 2. Inject chatStore into streamStore for message mutations
// Uses microtask batching to prevent React "Maximum update depth exceeded" when
// the LLM emits many deltas per frame (e.g. Kimi thinking model).
// Non-delta updates (onComplete etc.) flush immediately via _flushPending().
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let _pendingUpdater: ((msgs: any[]) => any[]) | null = null;
let _microtaskScheduled = false;
function _flushPending() {
_microtaskScheduled = false;
const batchedUpdater = _pendingUpdater;
_pendingUpdater = null;
if (batchedUpdater) {
const msgs = useChatStore.getState().messages as unknown[];
const updated = batchedUpdater(msgs);
useChatStore.setState({ messages: updated as Message[] });
}
}
injectChatStore({
addMessage: (msg) => {
// addMessage must be synchronous for immediate visibility
_flushPending(); // flush any pending batched updates first
useChatStore.getState().addMessage(msg as Message);
},
updateMessages: (updater) => {
// Accumulate updaters — only the latest state matters
const prevUpdater = _pendingUpdater;
_pendingUpdater = prevUpdater
? (msgs: unknown[]) => updater(prevUpdater(msgs) as Parameters<typeof updater>[0])
: updater;
if (!_microtaskScheduled) {
_microtaskScheduled = true;
void Promise.resolve().then(_flushPending);
}
},
getMessages: () => {
// Flush pending updates before reading to ensure consistency
_flushPending();
return useChatStore.getState().messages;
},
setChatStoreState: (partial) => {
_flushPending();
useChatStore.setState(partial as Partial<ChatState>);
},
});
// 3. Sync streamStore state to chatStore mirrors
const unsubStream = useStreamStore.subscribe((state) => {
const chat = useChatStore.getState();
const updates: Partial<ChatState> = {};
if (chat.isStreaming !== state.isStreaming) updates.isStreaming = state.isStreaming;
if (chat.isLoading !== state.isLoading) updates.isLoading = state.isLoading;
if (chat.chatMode !== state.chatMode) updates.chatMode = state.chatMode;
if (chat.suggestions !== state.suggestions) updates.suggestions = state.suggestions;
if (chat.suggestionsLoading !== state.suggestionsLoading) updates.suggestionsLoading = state.suggestionsLoading;
if (Object.keys(updates).length > 0) {
useChatStore.setState(updates);
}
});
// 4. Sync messageStore tokens to chatStore mirror
const unsubTokens = useMessageStore.subscribe((state) => {
const chat = useChatStore.getState();
if (
chat.totalInputTokens !== state.totalInputTokens ||
chat.totalOutputTokens !== state.totalOutputTokens
) {
useChatStore.setState({
totalInputTokens: state.totalInputTokens,
totalOutputTokens: state.totalOutputTokens,
});
}
});
// HMR cleanup: unsubscribe on module hot-reload to prevent duplicate listeners
if (import.meta.hot) {
import.meta.hot.dispose(() => {
unsubStream();
unsubTokens();
});
}
// Dev-only: Expose stores to window for E2E testing
if (import.meta.env.DEV && typeof window !== 'undefined') {
const w = window as unknown as Record<string, unknown>;
w.__ZCLAW_STORES__ = (w.__ZCLAW_STORES__ as Record<string, unknown>) || {};
const stores = w.__ZCLAW_STORES__ as Record<string, unknown>;
stores.chat = useChatStore;
stores.message = useMessageStore;
stores.stream = useStreamStore;
}