fix(desktop): DeerFlow UI — ChatArea refactor + ai-elements + dead CSS cleanup

ChatArea retry button uses setInput instead of direct sendToGateway,
fix bootstrap spinner stuck for non-logged-in users,
remove dead CSS (aurora-title/sidebar-open/quick-action-chips),
add ai components (ReasoningBlock/StreamingText/ChatMode/ModelSelector/TaskProgress),
add ClassroomPlayer + ResizableChatLayout + artifact panel

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
iven
2026-04-02 19:24:44 +08:00
parent d40c4605b2
commit 28299807b6
70 changed files with 4938 additions and 618 deletions

View File

@@ -103,10 +103,6 @@ interface ChatState {
chatMode: ChatModeType;
// Follow-up suggestions
suggestions: string[];
// Artifacts (DeerFlow-inspired)
artifacts: import('../components/ai/ArtifactPanel').ArtifactFile[];
selectedArtifactId: string | null;
artifactPanelOpen: boolean;
addMessage: (message: Message) => void;
updateMessage: (id: string, updates: Partial<Message>) => void;
@@ -128,11 +124,6 @@ interface ChatState {
setSuggestions: (suggestions: string[]) => void;
addSubtask: (messageId: string, task: Subtask) => void;
updateSubtask: (messageId: string, taskId: string, updates: Partial<Subtask>) => void;
// Artifact management (DeerFlow-inspired)
addArtifact: (artifact: import('../components/ai/ArtifactPanel').ArtifactFile) => void;
selectArtifact: (id: string | null) => void;
setArtifactPanelOpen: (open: boolean) => void;
clearArtifacts: () => void;
}
function generateConvId(): string {
@@ -271,10 +262,6 @@ export const useChatStore = create<ChatState>()(
totalOutputTokens: 0,
chatMode: 'thinking' as ChatModeType,
suggestions: [],
artifacts: [],
selectedArtifactId: null,
artifactPanelOpen: false,
addMessage: (message: Message) =>
set((state) => ({ messages: [...state.messages, message] })),
@@ -401,6 +388,10 @@ export const useChatStore = create<ChatState>()(
},
sendMessage: async (content: string) => {
// Concurrency guard: prevent rapid double-click bypassing UI-level isStreaming check.
// React re-render is async — two clicks within the same frame both read isStreaming=false.
if (get().isStreaming) return;
const { addMessage, currentAgent, sessionKey } = get();
// Clear stale suggestions when user sends a new message
set({ suggestions: [] });
@@ -436,27 +427,10 @@ export const useChatStore = create<ChatState>()(
// Context compaction is handled by the kernel (AgentLoop with_compaction_threshold).
// Frontend no longer performs duplicate compaction — see crates/zclaw-runtime/src/compaction.rs.
// Build memory-enhanced content using layered context (L0/L1/L2)
let enhancedContent = content;
try {
const contextResult = await intelligenceClient.memory.buildContext(
agentId,
content,
500, // token budget for memory context
);
if (contextResult.systemPromptAddition) {
const systemPrompt = await intelligenceClient.identity.buildPrompt(
agentId,
contextResult.systemPromptAddition,
);
if (systemPrompt) {
enhancedContent = `<context>\n${systemPrompt}\n</context>\n\n${content}`;
}
}
} catch (err) {
log.warn('Memory context build failed, proceeding without:', err);
}
// Memory context injection is handled by backend MemoryMiddleware (before_completion),
// which injects relevant memories into the system prompt. Frontend must NOT duplicate
// this by embedding old conversation memories into the user message content — that causes
// context leaking (old conversations appearing in new chat thinking/output).
// Add user message (original content for display)
// Mark as optimistic -- will be cleared when server confirms via onComplete
@@ -504,7 +478,7 @@ export const useChatStore = create<ChatState>()(
// Try streaming first (ZCLAW WebSocket)
const result = await client.chatStream(
enhancedContent,
content,
{
onDelta: (delta: string) => {
// Update message content directly (works for both KernelClient and GatewayClient)
@@ -516,6 +490,15 @@ export const useChatStore = create<ChatState>()(
),
}));
},
onThinkingDelta: (delta: string) => {
set((s) => ({
messages: s.messages.map((m) =>
m.id === assistantId
? { ...m, thinkingContent: (m.thinkingContent || '') + delta }
: m
),
}));
},
onTool: (tool: string, input: string, output: string) => {
const step: ToolCallStep = {
id: `step_${Date.now()}_${generateRandomString(4)}`,
@@ -732,20 +715,6 @@ export const useChatStore = create<ChatState>()(
),
})),
// Artifact management (DeerFlow-inspired)
addArtifact: (artifact) =>
set((state) => ({
artifacts: [...state.artifacts, artifact],
selectedArtifactId: artifact.id,
artifactPanelOpen: true,
})),
selectArtifact: (id) => set({ selectedArtifactId: id }),
setArtifactPanelOpen: (open) => set({ artifactPanelOpen: open }),
clearArtifacts: () => set({ artifacts: [], selectedArtifactId: null, artifactPanelOpen: false }),
initStreamListener: () => {
const client = getClient();