feat(desktop): DeerFlow visual redesign + stream hang fix + intelligence client
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

DeerFlow frontend visual overhaul:
- Card-style input box (white rounded card, textarea top, actions bottom)
- Dropdown mode selector (闪速/思考/Pro/Ultra with icons+descriptions)
- Colored quick-action chips (小惊喜/写作/研究/收集/学习)
- Minimal top bar (title + token count + export)
- Warm gray color system (#faf9f6 bg, #f5f4f1 sidebar, #e8e6e1 border)
- DeerFlow-style sidebar (新对话/对话/智能体 nav)
- Reasoning block, tool call chain, task progress visualization
- Streaming text, model selector, suggestion chips components
- Resizable artifact panel with drag handle
- Virtualized message list for 100+ messages

Bug fixes:
- Stream hang: GatewayClient onclose code 1000 now calls onComplete
- WebView2 textarea border: CSS !important override for UA styles
- Gateway stream event handling (response/phase/tool_call types)

Intelligence client:
- Unified client with fallback drivers (compactor/heartbeat/identity/memory/reflection)
- Gateway API types and type conversions
This commit is contained in:
iven
2026-04-01 22:03:07 +08:00
parent e3b93ff96d
commit 73ff5e8c5e
43 changed files with 4817 additions and 905 deletions

View File

@@ -10,6 +10,9 @@ import { useConnectionStore } from './connectionStore';
import { createLogger } from '../lib/logger';
import { speechSynth } from '../lib/speech-synth';
import { generateRandomString } from '../lib/crypto-utils';
import type { ChatModeType, ChatModeConfig, Subtask } from '../components/ai';
import type { ToolCallStep } from '../components/ai';
import { CHAT_MODES } from '../components/ai';
const log = createLogger('ChatStore');
@@ -49,6 +52,12 @@ export interface Message {
// Output files and code blocks
files?: MessageFile[];
codeBlocks?: CodeBlock[];
// AI Enhancement fields (DeerFlow-inspired)
thinkingContent?: string; // Extended thinking/reasoning content
subtasks?: Subtask[]; // Sub-agent task tracking
toolSteps?: ToolCallStep[]; // Tool call steps chain (DeerFlow-inspired)
// Optimistic message flag (Phase 4: DeerFlow-inspired 3-phase optimistic rendering)
optimistic?: boolean; // true = awaiting server confirmation, false/undefined = confirmed
}
export interface Conversation {
@@ -90,6 +99,14 @@ interface ChatState {
// Token usage tracking
totalInputTokens: number;
totalOutputTokens: number;
// Chat mode (DeerFlow-inspired)
chatMode: ChatModeType;
// Follow-up suggestions
suggestions: string[];
// Artifacts (DeerFlow-inspired)
artifacts: import('../components/ai/ArtifactPanel').ArtifactFile[];
selectedArtifactId: string | null;
artifactPanelOpen: boolean;
addMessage: (message: Message) => void;
updateMessage: (id: string, updates: Partial<Message>) => void;
@@ -105,6 +122,17 @@ interface ChatState {
addTokenUsage: (inputTokens: number, outputTokens: number) => void;
getTotalTokens: () => { input: number; output: number; total: number };
searchSkills: (query: string) => { results: Array<{ id: string; name: string; description: string }>; totalAvailable: number };
// Chat mode and suggestions (DeerFlow-inspired)
setChatMode: (mode: ChatModeType) => void;
getChatModeConfig: () => ChatModeConfig;
setSuggestions: (suggestions: string[]) => void;
addSubtask: (messageId: string, task: Subtask) => void;
updateSubtask: (messageId: string, taskId: string, updates: Partial<Subtask>) => void;
// Artifact management (DeerFlow-inspired)
addArtifact: (artifact: import('../components/ai/ArtifactPanel').ArtifactFile) => void;
selectArtifact: (id: string | null) => void;
setArtifactPanelOpen: (open: boolean) => void;
clearArtifacts: () => void;
}
function generateConvId(): string {
@@ -189,6 +217,44 @@ function upsertActiveConversation(
return [nextConversation, ...conversations];
}
/**
* Generate follow-up suggestions based on assistant response content.
* Uses keyword heuristics to suggest contextually relevant follow-ups.
*/
function generateFollowUpSuggestions(content: string): string[] {
const suggestions: string[] = [];
const lower = content.toLowerCase();
const patterns: Array<{ keywords: string[]; suggestion: string }> = [
{ keywords: ['代码', 'code', 'function', '函数', '实现'], suggestion: '解释这段代码的工作原理' },
{ keywords: ['错误', 'error', 'bug', '问题'], suggestion: '如何调试这个问题?' },
{ keywords: ['数据', 'data', '分析', '统计'], suggestion: '可视化这些数据' },
{ keywords: ['步骤', 'step', '流程', '方案'], suggestion: '详细说明第一步该怎么做' },
{ keywords: ['可以', '建议', '推荐', '试试'], suggestion: '还有其他方案吗?' },
{ keywords: ['文件', 'file', '保存', '写入'], suggestion: '查看生成的文件内容' },
{ keywords: ['搜索', 'search', '查找', 'research'], suggestion: '搜索更多相关信息' },
];
for (const { keywords, suggestion } of patterns) {
if (keywords.some(kw => lower.includes(kw))) {
if (!suggestions.includes(suggestion)) {
suggestions.push(suggestion);
}
}
if (suggestions.length >= 3) break;
}
// Always add a generic follow-up if we have fewer than 3
const generic = ['继续深入分析', '换个角度看看', '用简单的话解释'];
while (suggestions.length < 3) {
const next = generic.find(g => !suggestions.includes(g));
if (next) suggestions.push(next);
else break;
}
return suggestions;
}
export const useChatStore = create<ChatState>()(
persist(
(set, get) => ({
@@ -203,6 +269,11 @@ export const useChatStore = create<ChatState>()(
sessionKey: null,
totalInputTokens: 0,
totalOutputTokens: 0,
chatMode: 'thinking' as ChatModeType,
suggestions: [],
artifacts: [],
selectedArtifactId: null,
artifactPanelOpen: false,
addMessage: (message: Message) =>
set((state) => ({ messages: [...state.messages, message] })),
@@ -331,6 +402,8 @@ export const useChatStore = create<ChatState>()(
sendMessage: async (content: string) => {
const { addMessage, currentAgent, sessionKey } = get();
// Clear stale suggestions when user sends a new message
set({ suggestions: [] });
const effectiveSessionKey = sessionKey || crypto.randomUUID();
const effectiveAgentId = resolveGatewayAgentId(currentAgent);
const agentId = currentAgent?.id || 'zclaw-main';
@@ -386,11 +459,14 @@ export const useChatStore = create<ChatState>()(
}
// Add user message (original content for display)
// Mark as optimistic -- will be cleared when server confirms via onComplete
const streamStartTime = Date.now();
const userMsg: Message = {
id: `user_${Date.now()}`,
id: `user_${streamStartTime}`,
role: 'user',
content,
timestamp: new Date(),
timestamp: new Date(streamStartTime),
optimistic: true,
};
addMessage(userMsg);
@@ -421,6 +497,11 @@ export const useChatStore = create<ChatState>()(
// Declare runId before chatStream so callbacks can access it
let runId = `run_${Date.now()}`;
// F5: Persist sessionKey before starting stream to survive page reload mid-stream
if (!get().sessionKey) {
set({ sessionKey: effectiveSessionKey });
}
// Try streaming first (ZCLAW WebSocket)
const result = await client.chatStream(
enhancedContent,
@@ -436,17 +517,22 @@ export const useChatStore = create<ChatState>()(
}));
},
onTool: (tool: string, input: string, output: string) => {
const toolMsg: Message = {
id: `tool_${Date.now()}_${generateRandomString(4)}`,
role: 'tool',
content: output || input,
timestamp: new Date(),
runId,
const step: ToolCallStep = {
id: `step_${Date.now()}_${generateRandomString(4)}`,
toolName: tool,
toolInput: input,
toolOutput: output,
input,
output,
status: output ? 'completed' : 'running',
timestamp: new Date(),
};
set((state) => ({ messages: [...state.messages, toolMsg] }));
// Add step to the streaming assistant message's toolSteps
set((s) => ({
messages: s.messages.map((m) =>
m.id === assistantId
? { ...m, toolSteps: [...(m.toolSteps || []), step] }
: m
),
}));
},
onHand: (name: string, status: string, result?: unknown) => {
const handMsg: Message = {
@@ -492,9 +578,16 @@ export const useChatStore = create<ChatState>()(
isStreaming: false,
conversations,
currentConversationId: currentConvId,
messages: state.messages.map((m) =>
m.id === assistantId ? { ...m, streaming: false, runId } : m
),
messages: state.messages.map((m) => {
if (m.id === assistantId) {
return { ...m, streaming: false, runId };
}
// Clear optimistic flag on user messages (server confirmed)
if (m.optimistic) {
return { ...m, optimistic: false };
}
return m;
}),
});
// Track token usage if provided (KernelClient provides these)
@@ -520,6 +613,16 @@ export const useChatStore = create<ChatState>()(
});
}
});
// Generate follow-up suggestions (DeerFlow-inspired)
const assistantMsg = get().messages.find(m => m.id === assistantId);
if (assistantMsg?.content) {
const content = assistantMsg.content;
const suggestions = generateFollowUpSuggestions(content);
if (suggestions.length > 0) {
get().setSuggestions(suggestions);
}
}
},
onError: (error: string) => {
set((state) => ({
@@ -527,7 +630,9 @@ export const useChatStore = create<ChatState>()(
messages: state.messages.map((m) =>
m.id === assistantId
? { ...m, content: `⚠️ ${error}`, streaming: false, error }
: m
: m.role === 'user' && m.optimistic && m.timestamp.getTime() >= streamStartTime
? { ...m, optimistic: false }
: m
),
}));
},
@@ -535,6 +640,9 @@ export const useChatStore = create<ChatState>()(
{
sessionKey: effectiveSessionKey,
agentId: effectiveAgentId,
thinking_enabled: get().getChatModeConfig().thinking_enabled,
reasoning_effort: get().getChatModeConfig().reasoning_effort,
plan_mode: get().getChatModeConfig().plan_mode,
}
);
@@ -566,7 +674,9 @@ export const useChatStore = create<ChatState>()(
streaming: false,
error: errorMessage,
}
: m
: m.role === 'user' && m.optimistic && m.timestamp.getTime() >= streamStartTime
? { ...m, optimistic: false }
: m
),
}));
}
@@ -592,6 +702,50 @@ export const useChatStore = create<ChatState>()(
};
},
// Chat mode (DeerFlow-inspired)
setChatMode: (mode: ChatModeType) => set({ chatMode: mode }),
getChatModeConfig: () => CHAT_MODES[get().chatMode].config,
setSuggestions: (suggestions: string[]) => set({ suggestions }),
addSubtask: (messageId: string, task: Subtask) =>
set((state) => ({
messages: state.messages.map((m) =>
m.id === messageId
? { ...m, subtasks: [...(m.subtasks || []), task] }
: m
),
})),
updateSubtask: (messageId: string, taskId: string, updates: Partial<Subtask>) =>
set((state) => ({
messages: state.messages.map((m) =>
m.id === messageId
? {
...m,
subtasks: (m.subtasks || []).map((t) =>
t.id === taskId ? { ...t, ...updates } : t
),
}
: m
),
})),
// Artifact management (DeerFlow-inspired)
addArtifact: (artifact) =>
set((state) => ({
artifacts: [...state.artifacts, artifact],
selectedArtifactId: artifact.id,
artifactPanelOpen: true,
})),
selectArtifact: (id) => set({ selectedArtifactId: id }),
setArtifactPanelOpen: (open) => set({ artifactPanelOpen: open }),
clearArtifacts: () => set({ artifacts: [], selectedArtifactId: null, artifactPanelOpen: false }),
initStreamListener: () => {
const client = getClient();
@@ -629,31 +783,51 @@ export const useChatStore = create<ChatState>()(
),
}));
} else if (delta.stream === 'tool') {
const toolMsg: Message = {
id: `tool_${Date.now()}_${generateRandomString(4)}`,
role: 'tool',
content: delta.toolOutput || '',
// Add tool step to the streaming assistant message (DeerFlow-inspired steps chain)
const step: ToolCallStep = {
id: `step_${Date.now()}_${generateRandomString(4)}`,
toolName: delta.tool || 'unknown',
input: delta.toolInput,
output: delta.toolOutput,
status: delta.toolOutput ? 'completed' : 'running',
timestamp: new Date(),
runId: delta.runId,
toolName: delta.tool,
toolInput: delta.toolInput,
toolOutput: delta.toolOutput,
};
set((s) => ({ messages: [...s.messages, toolMsg] }));
set((s) => ({
messages: s.messages.map((m) =>
m.id === streamingMsg.id
? { ...m, toolSteps: [...(m.toolSteps || []), step] }
: m
),
}));
} else if (delta.stream === 'lifecycle') {
if (delta.phase === 'end' || delta.phase === 'error') {
set((s) => ({
isStreaming: false,
messages: s.messages.map((m) =>
m.id === streamingMsg.id
? {
...m,
streaming: false,
error: delta.phase === 'error' ? delta.error : undefined,
}
: m
),
messages: s.messages.map((m) => {
if (m.id === streamingMsg.id) {
return {
...m,
streaming: false,
error: delta.phase === 'error' ? delta.error : undefined,
};
}
// Clear optimistic flag on user messages (server confirmed)
if (m.optimistic) {
return { ...m, optimistic: false };
}
return m;
}),
}));
// Generate follow-up suggestions on stream end
if (delta.phase === 'end') {
const completedMsg = get().messages.find(m => m.id === streamingMsg.id);
if (completedMsg?.content) {
const suggestions = generateFollowUpSuggestions(completedMsg.content);
if (suggestions.length > 0) {
get().setSuggestions(suggestions);
}
}
}
}
} else if (delta.stream === 'hand') {
// Handle Hand trigger events from ZCLAW
@@ -699,6 +873,7 @@ export const useChatStore = create<ChatState>()(
currentModel: state.currentModel,
currentAgentId: state.currentAgent?.id,
currentConversationId: state.currentConversationId,
chatMode: state.chatMode,
}),
onRehydrateStorage: () => (state) => {
// Rehydrate Date objects from JSON strings
@@ -709,6 +884,7 @@ export const useChatStore = create<ChatState>()(
for (const msg of conv.messages) {
msg.timestamp = new Date(msg.timestamp);
msg.streaming = false; // Never restore streaming state
msg.optimistic = false; // Never restore optimistic flag (server already confirmed)
}
}
}