feat(desktop): DeerFlow visual redesign + stream hang fix + intelligence client
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

DeerFlow frontend visual overhaul:
- Card-style input box (white rounded card, textarea top, actions bottom)
- Dropdown mode selector (闪速/思考/Pro/Ultra with icons+descriptions)
- Colored quick-action chips (小惊喜/写作/研究/收集/学习)
- Minimal top bar (title + token count + export)
- Warm gray color system (#faf9f6 bg, #f5f4f1 sidebar, #e8e6e1 border)
- DeerFlow-style sidebar (新对话/对话/智能体 nav)
- Reasoning block, tool call chain, task progress visualization
- Streaming text, model selector, suggestion chips components
- Resizable artifact panel with drag handle
- Virtualized message list for 100+ messages

Bug fixes:
- Stream hang: GatewayClient onclose code 1000 now calls onComplete
- WebView2 textarea border: CSS !important override for UA styles
- Gateway stream event handling (response/phase/tool_call types)

Intelligence client:
- Unified client with fallback drivers (compactor/heartbeat/identity/memory/reflection)
- Gateway API types and type conversions
This commit is contained in:
iven
2026-04-01 22:03:07 +08:00
parent e3b93ff96d
commit 73ff5e8c5e
43 changed files with 4817 additions and 905 deletions

View File

@@ -47,6 +47,7 @@
"react": "^19.2.4", "react": "^19.2.4",
"react-dom": "^19.2.4", "react-dom": "^19.2.4",
"react-markdown": "^10.1.0", "react-markdown": "^10.1.0",
"react-resizable-panels": "^4.8.0",
"react-window": "^2.2.7", "react-window": "^2.2.7",
"recharts": "^3.8.1", "recharts": "^3.8.1",
"remark-gfm": "^4.0.1", "remark-gfm": "^4.0.1",

14
desktop/pnpm-lock.yaml generated
View File

@@ -41,6 +41,9 @@ importers:
react-markdown: react-markdown:
specifier: ^10.1.0 specifier: ^10.1.0
version: 10.1.0(@types/react@19.2.14)(react@19.2.4) version: 10.1.0(@types/react@19.2.14)(react@19.2.4)
react-resizable-panels:
specifier: ^4.8.0
version: 4.8.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
react-window: react-window:
specifier: ^2.2.7 specifier: ^2.2.7
version: 2.2.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4) version: 2.2.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
@@ -2819,6 +2822,12 @@ packages:
resolution: {integrity: sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==} resolution: {integrity: sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==}
engines: {node: '>=0.10.0'} engines: {node: '>=0.10.0'}
react-resizable-panels@4.8.0:
resolution: {integrity: sha512-2uEABkewb3ky/ZgIlAUxWa1W/LjsK494fdV1QsXxst7CDRHCzo7h22tWWu3NNaBjmiuriOCt3CvhipnaYcpoIw==}
peerDependencies:
react: ^18.0.0 || ^19.0.0
react-dom: ^18.0.0 || ^19.0.0
react-window@2.2.7: react-window@2.2.7:
resolution: {integrity: sha512-SH5nvfUQwGHYyriDUAOt7wfPsfG9Qxd6OdzQxl5oQ4dsSsUicqQvjV7dR+NqZ4coY0fUn3w1jnC5PwzIUWEg5w==} resolution: {integrity: sha512-SH5nvfUQwGHYyriDUAOt7wfPsfG9Qxd6OdzQxl5oQ4dsSsUicqQvjV7dR+NqZ4coY0fUn3w1jnC5PwzIUWEg5w==}
peerDependencies: peerDependencies:
@@ -6213,6 +6222,11 @@ snapshots:
react-refresh@0.17.0: {} react-refresh@0.17.0: {}
react-resizable-panels@4.8.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4):
dependencies:
react: 19.2.4
react-dom: 19.2.4(react@19.2.4)
react-window@2.2.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4): react-window@2.2.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4):
dependencies: dependencies:
react: 19.2.4 react: 19.2.4

View File

@@ -78,6 +78,9 @@ impl TauriExtractionDriver {
temperature: Some(0.3), temperature: Some(0.3),
stop: Vec::new(), stop: Vec::new(),
stream: false, stream: false,
thinking_enabled: false,
reasoning_effort: None,
plan_mode: false,
} }
} }

View File

@@ -886,7 +886,7 @@ mod tests {
#[test] #[test]
fn test_default_config() { fn test_default_config() {
let config = HeartbeatConfig::default(); let config = HeartbeatConfig::default();
assert!(!config.enabled); assert!(config.enabled);
assert_eq!(config.interval_minutes, 30); assert_eq!(config.interval_minutes, 30);
} }
} }

View File

@@ -3,7 +3,6 @@
use std::sync::Arc; use std::sync::Arc;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tauri::{AppHandle, Emitter, State}; use tauri::{AppHandle, Emitter, State};
use tokio::sync::Mutex;
use zclaw_types::AgentId; use zclaw_types::AgentId;
use super::{validate_agent_id, KernelState, SessionStreamGuard}; use super::{validate_agent_id, KernelState, SessionStreamGuard};
@@ -51,6 +50,15 @@ pub struct StreamChatRequest {
pub agent_id: String, pub agent_id: String,
pub session_id: String, pub session_id: String,
pub message: String, pub message: String,
/// Enable extended thinking/reasoning
#[serde(default)]
pub thinking_enabled: Option<bool>,
/// Reasoning effort level (low/medium/high)
#[serde(default)]
pub reasoning_effort: Option<String>,
/// Enable plan mode
#[serde(default)]
pub plan_mode: Option<bool>,
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@@ -111,18 +119,21 @@ pub async fn agent_chat_stream(
let agent_id_str = request.agent_id.clone(); let agent_id_str = request.agent_id.clone();
let message = request.message.clone(); let message = request.message.clone();
// Session-level concurrency guard // Session-level concurrency guard using atomic flag
let session_mutex = stream_guard let session_active = stream_guard
.entry(session_id.clone()) .entry(session_id.clone())
.or_insert_with(|| Arc::new(Mutex::new(()))); .or_insert_with(|| Arc::new(std::sync::atomic::AtomicBool::new(false)));
let _session_guard = session_mutex.try_lock() // Atomically set flag from false→true, fail if already true
.map_err(|_| { if session_active
tracing::warn!( .compare_exchange(false, true, std::sync::atomic::Ordering::SeqCst, std::sync::atomic::Ordering::SeqCst)
"[agent_chat_stream] Session {} already has an active stream — rejecting", .is_err()
session_id {
); tracing::warn!(
format!("Session {} already has an active stream", session_id) "[agent_chat_stream] Session {} already has an active stream — rejecting",
})?; session_id
);
return Err(format!("Session {} already has an active stream", session_id));
}
// AUTO-INIT HEARTBEAT // AUTO-INIT HEARTBEAT
{ {
@@ -167,7 +178,20 @@ pub async fn agent_chat_stream(
} }
} }
}; };
let rx = kernel.send_message_stream_with_prompt(&id, message.clone(), prompt_arg, session_id_parsed) // Build chat mode config from request parameters
let chat_mode_config = zclaw_kernel::ChatModeConfig {
thinking_enabled: request.thinking_enabled,
reasoning_effort: request.reasoning_effort.clone(),
plan_mode: request.plan_mode,
};
let rx = kernel.send_message_stream_with_prompt(
&id,
message.clone(),
prompt_arg,
session_id_parsed,
Some(chat_mode_config),
)
.await .await
.map_err(|e| format!("Failed to start streaming: {}", e))?; .map_err(|e| format!("Failed to start streaming: {}", e))?;
(rx, driver) (rx, driver)
@@ -176,7 +200,9 @@ pub async fn agent_chat_stream(
let hb_state = heartbeat_state.inner().clone(); let hb_state = heartbeat_state.inner().clone();
let rf_state = reflection_state.inner().clone(); let rf_state = reflection_state.inner().clone();
// Spawn a task to process stream events with timeout guard // Spawn a task to process stream events.
// The session_active flag is cleared when task completes.
let guard_clone = Arc::clone(&*session_active);
tokio::spawn(async move { tokio::spawn(async move {
use zclaw_runtime::LoopEvent; use zclaw_runtime::LoopEvent;
@@ -268,6 +294,9 @@ pub async fn agent_chat_stream(
} }
tracing::debug!("[agent_chat_stream] Stream processing ended for session: {}", session_id); tracing::debug!("[agent_chat_stream] Stream processing ended for session: {}", session_id);
// Release session lock
guard_clone.store(false, std::sync::atomic::Ordering::SeqCst);
}); });
Ok(()) Ok(())

View File

@@ -32,7 +32,9 @@ pub type SchedulerState = Arc<Mutex<Option<zclaw_kernel::scheduler::SchedulerSer
/// Session-level stream concurrency guard. /// Session-level stream concurrency guard.
/// Prevents two concurrent `agent_chat_stream` calls from interleaving events /// Prevents two concurrent `agent_chat_stream` calls from interleaving events
/// for the same session_id. /// for the same session_id.
pub type SessionStreamGuard = Arc<dashmap::DashMap<String, Arc<Mutex<()>>>>; /// Uses `AtomicBool` so the `DashMap` — `true` means active stream, `false` means idle.
/// The `spawn`ed task resets the flag on completion/error.
pub type SessionStreamGuard = Arc<dashmap::DashMap<String, Arc<std::sync::atomic::AtomicBool>>>;
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Shared validation helpers // Shared validation helpers

View File

@@ -87,6 +87,9 @@ impl LlmActionDriver for RuntimeLlmAdapter {
temperature, temperature,
stop: Vec::new(), stop: Vec::new(),
stream: false, stream: false,
thinking_enabled: false,
reasoning_effort: None,
plan_mode: false,
}; };
let response = self.driver.complete(request) let response = self.driver.complete(request)

View File

@@ -5,16 +5,27 @@ import { useChatStore, Message } from '../store/chatStore';
import { useConnectionStore } from '../store/connectionStore'; import { useConnectionStore } from '../store/connectionStore';
import { useAgentStore } from '../store/agentStore'; import { useAgentStore } from '../store/agentStore';
import { useConfigStore } from '../store/configStore'; import { useConfigStore } from '../store/configStore';
import { Paperclip, ChevronDown, Terminal, SquarePen, ArrowUp, MessageSquare, Download, Copy, Check } from 'lucide-react'; import { Paperclip, SquarePen, ArrowUp, MessageSquare, Download, X, FileText, Image as ImageIcon } from 'lucide-react';
import { Button, EmptyState, MessageListSkeleton, LoadingDots } from './ui'; import { Button, EmptyState, MessageListSkeleton, LoadingDots } from './ui';
import { ResizableChatLayout } from './ai/ResizableChatLayout';
import { ArtifactPanel } from './ai/ArtifactPanel';
import { ToolCallChain } from './ai/ToolCallChain';
import { listItemVariants, defaultTransition, fadeInVariants } from '../lib/animations'; import { listItemVariants, defaultTransition, fadeInVariants } from '../lib/animations';
import { FirstConversationPrompt } from './FirstConversationPrompt'; import { FirstConversationPrompt } from './FirstConversationPrompt';
import { MessageSearch } from './MessageSearch'; // MessageSearch temporarily removed during DeerFlow redesign
import { OfflineIndicator } from './OfflineIndicator'; import { OfflineIndicator } from './OfflineIndicator';
import { import {
useVirtualizedMessages, useVirtualizedMessages,
type VirtualizedMessageItem type VirtualizedMessageItem
} from '../lib/message-virtualization'; } from '../lib/message-virtualization';
import { Conversation } from './ai/Conversation';
import { ReasoningBlock } from './ai/ReasoningBlock';
import { StreamingText } from './ai/StreamingText';
import { ChatMode } from './ai/ChatMode';
import { ModelSelector } from './ai/ModelSelector';
import { TaskProgress } from './ai/TaskProgress';
import { SuggestionChips } from './ai/SuggestionChips';
// TokenMeter temporarily unused — using inline text counter instead
// Default heights for virtualized messages // Default heights for virtualized messages
const DEFAULT_MESSAGE_HEIGHTS: Record<string, number> = { const DEFAULT_MESSAGE_HEIGHTS: Record<string, number> = {
@@ -33,17 +44,21 @@ export function ChatArea() {
const { const {
messages, currentAgent, isStreaming, isLoading, currentModel, messages, currentAgent, isStreaming, isLoading, currentModel,
sendMessage: sendToGateway, setCurrentModel, initStreamListener, sendMessage: sendToGateway, setCurrentModel, initStreamListener,
newConversation, newConversation, chatMode, setChatMode, suggestions,
artifacts, selectedArtifactId, artifactPanelOpen,
selectArtifact, setArtifactPanelOpen,
totalInputTokens, totalOutputTokens,
} = useChatStore(); } = useChatStore();
const connectionState = useConnectionStore((s) => s.connectionState); const connectionState = useConnectionStore((s) => s.connectionState);
const clones = useAgentStore((s) => s.clones); const clones = useAgentStore((s) => s.clones);
const models = useConfigStore((s) => s.models); const models = useConfigStore((s) => s.models);
const [input, setInput] = useState(''); const [input, setInput] = useState('');
const [showModelPicker, setShowModelPicker] = useState(false); const [pendingFiles, setPendingFiles] = useState<File[]>([]);
const scrollRef = useRef<HTMLDivElement>(null); const scrollRef = useRef<HTMLDivElement>(null);
const textareaRef = useRef<HTMLTextAreaElement>(null); const textareaRef = useRef<HTMLTextAreaElement>(null);
const messageRefs = useRef<Map<string, HTMLDivElement>>(new Map()); const messageRefs = useRef<Map<string, HTMLDivElement>>(new Map());
const fileInputRef = useRef<HTMLInputElement>(null);
// Convert messages to virtualization format // Convert messages to virtualization format
const virtualizedMessages: VirtualizedMessageItem[] = useMemo( const virtualizedMessages: VirtualizedMessageItem[] = useMemo(
@@ -90,6 +105,41 @@ export function ChatArea() {
} }
}, []); }, []);
// File handling
const MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB
const MAX_FILES = 5;
const addFiles = useCallback((files: FileList | File[]) => {
const incoming = Array.from(files).filter((f) => f.size <= MAX_FILE_SIZE);
setPendingFiles((prev) => {
const combined = [...prev, ...incoming];
return combined.slice(0, MAX_FILES);
});
}, []);
// Paste handler for images/files
useEffect(() => {
const handler = (e: ClipboardEvent) => {
if (e.clipboardData?.files.length) {
e.preventDefault();
addFiles(e.clipboardData.files);
}
};
document.addEventListener('paste', handler);
return () => document.removeEventListener('paste', handler);
}, [addFiles]);
const handleDrop = useCallback((e: React.DragEvent) => {
e.preventDefault();
if (e.dataTransfer.files.length) {
addFiles(e.dataTransfer.files);
}
}, [addFiles]);
const handleDragOver = useCallback((e: React.DragEvent) => {
e.preventDefault();
}, []);
// Init agent stream listener on mount // Init agent stream listener on mount
useEffect(() => { useEffect(() => {
const unsub = initStreamListener(); const unsub = initStreamListener();
@@ -106,10 +156,14 @@ export function ChatArea() {
}, [messages, useVirtualization, scrollToBottom]); }, [messages, useVirtualization, scrollToBottom]);
const handleSend = () => { const handleSend = () => {
if (!input.trim() || isStreaming) return; if ((!input.trim() && pendingFiles.length === 0) || isStreaming) return;
// Allow sending in offline mode - message will be queued // Attach file names as metadata in the message
sendToGateway(input); const fileContext = pendingFiles.length > 0
? `\n\n[附件: ${pendingFiles.map((f) => f.name).join(', ')}]`
: '';
sendToGateway(input + fileContext);
setInput(''); setInput('');
setPendingFiles([]);
}; };
const handleKeyDown = (e: React.KeyboardEvent) => { const handleKeyDown = (e: React.KeyboardEvent) => {
@@ -121,52 +175,73 @@ export function ChatArea() {
const connected = connectionState === 'connected'; const connected = connectionState === 'connected';
// Navigate to a specific message by ID // Export current conversation as Markdown
const handleNavigateToMessage = useCallback((messageId: string) => { const exportCurrentConversation = () => {
const messageEl = messageRefs.current.get(messageId); const title = currentAgent?.name || 'ZCLAW 对话';
if (messageEl && scrollRef.current) { const lines = [`# ${title}`, '', `导出时间: ${new Date().toLocaleString('zh-CN')}`, ''];
messageEl.scrollIntoView({ behavior: 'smooth', block: 'center' }); for (const msg of messages) {
// Add highlight effect const label = msg.role === 'user' ? '用户' : msg.role === 'assistant' ? '助手' : msg.role;
messageEl.classList.add('ring-2', 'ring-orange-400', 'ring-offset-2'); lines.push(`## ${label}`, '', msg.content, '');
setTimeout(() => {
messageEl.classList.remove('ring-2', 'ring-orange-400', 'ring-offset-2');
}, 2000);
} }
}, []); const blob = new Blob([lines.join('\n')], { type: 'text/markdown;charset=utf-8' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `${title.replace(/[/\\?%*:|"<>]/g, '_')}.md`;
a.click();
URL.revokeObjectURL(url);
};
// Build artifact panel content
const artifactRightPanel = (
<ArtifactPanel
artifacts={artifacts}
selectedId={selectedArtifactId}
onSelect={selectArtifact}
onClose={() => setArtifactPanelOpen(false)}
/>
);
return ( return (
<div className="flex flex-col h-full"> <ResizableChatLayout
{/* Header */} chatPanel={
{/* Header */} <div className="flex flex-col h-full">
<div className="h-14 border-b border-gray-100 dark:border-gray-800 flex items-center justify-between px-6 flex-shrink-0 bg-white dark:bg-gray-900"> {/* Header — DeerFlow-style: minimal */}
<div className="flex items-center gap-2"> <div className="h-14 border-b border-transparent flex items-center justify-between px-6 flex-shrink-0 bg-white dark:bg-gray-900">
<h2 className="font-semibold text-gray-900 dark:text-gray-100">{currentAgent?.name || 'ZCLAW'}</h2> <div className="flex items-center gap-2 text-sm text-gray-500">
{isStreaming ? ( <span>{currentAgent?.name || '新对话'}</span>
<span className="text-xs text-gray-500 dark:text-gray-400 flex items-center gap-1">
<span className="w-1.5 h-1.5 bg-gray-500 dark:bg-gray-400 rounded-full thinking-dot"></span>
</span>
) : (
<span className={`text-xs flex items-center gap-1 ${connected ? 'text-green-500' : 'text-gray-500 dark:text-gray-400'}`}>
<span className={`w-1.5 h-1.5 rounded-full ${connected ? 'bg-green-400' : 'bg-gray-300 dark:bg-gray-600'}`}></span>
{connected ? 'Gateway 已连接' : 'Gateway 未连接'}
</span>
)}
</div> </div>
<div className="flex items-center gap-2"> <div className="flex items-center gap-4">
{/* Offline indicator in header */} {/* Token usage counter — DeerFlow-style plain text */}
{(totalInputTokens + totalOutputTokens) > 0 && (() => {
const total = totalInputTokens + totalOutputTokens;
const display = total >= 1000 ? `${(total / 1000).toFixed(1)}K` : String(total);
return (
<span className="text-sm text-gray-500 flex items-center gap-1.5">
{display}
</span>
);
})()}
<OfflineIndicator compact /> <OfflineIndicator compact />
{messages.length > 0 && ( {messages.length > 0 && (
<MessageSearch onNavigateToMessage={handleNavigateToMessage} /> <Button
variant="ghost"
size="sm"
onClick={exportCurrentConversation}
className="flex items-center gap-2 text-gray-600 dark:text-gray-400 hover:bg-gray-50 dark:hover:bg-gray-800 rounded-lg transition-colors"
title="导出对话"
>
<Download className="w-3.5 h-3.5" />
<span className="text-sm"></span>
</Button>
)} )}
{messages.length > 0 && ( {messages.length > 0 && (
<Button <Button
variant="ghost" variant="ghost"
size="sm" size="sm"
onClick={newConversation} onClick={newConversation}
className="flex items-center gap-2 text-gray-600 dark:text-gray-400 hover:bg-gray-50 dark:hover:bg-gray-800 rounded-lg transition-colors"
title="新对话" title="新对话"
aria-label="开始新对话"
className="flex items-center gap-1.5 text-gray-500 dark:text-gray-400 hover:text-orange-600 dark:hover:text-orange-400 hover:bg-orange-50 dark:hover:bg-orange-900/20"
> >
<SquarePen className="w-3.5 h-3.5" /> <SquarePen className="w-3.5 h-3.5" />
@@ -176,7 +251,7 @@ export function ChatArea() {
</div> </div>
{/* Messages */} {/* Messages */}
<div ref={scrollRef} className="flex-1 overflow-y-auto custom-scrollbar bg-white dark:bg-gray-900"> <Conversation className="flex-1 bg-white dark:bg-gray-900">
<AnimatePresence mode="popLayout"> <AnimatePresence mode="popLayout">
{/* Loading skeleton */} {/* Loading skeleton */}
{isLoading && messages.length === 0 && ( {isLoading && messages.length === 0 && (
@@ -240,21 +315,60 @@ export function ChatArea() {
)) ))
)} )}
</AnimatePresence> </AnimatePresence>
</div> </Conversation>
{/* Input */} {/* Input */}
<div className="border-t border-gray-100 dark:border-gray-800 p-4 bg-white dark:bg-gray-900"> <div className="p-4 bg-white dark:bg-gray-900">
<div className="max-w-4xl mx-auto"> <div className="max-w-4xl mx-auto">
<div className="relative flex items-end gap-2 bg-gray-50 dark:bg-gray-800 rounded-2xl border border-gray-200 dark:border-gray-700 p-2 focus-within:border-orange-300 dark:focus-within:border-orange-600 focus-within:ring-2 focus-within:ring-orange-100 dark:focus-within:ring-orange-900/30 transition-all"> {/* Suggestion chips */}
<Button {!isStreaming && suggestions.length > 0 && (
variant="ghost" <SuggestionChips
size="sm" suggestions={suggestions}
className="p-2 text-gray-500 dark:text-gray-400 hover:text-gray-600 dark:hover:text-gray-300" onSelect={(text) => { setInput(text); textareaRef.current?.focus(); }}
aria-label="添加附件" className="mb-3"
> />
<Paperclip className="w-5 h-5" /> )}
</Button> {/* Hidden file input */}
<div className="flex-1 py-1"> <input
ref={fileInputRef}
type="file"
multiple
className="hidden"
onChange={(e) => { if (e.target.files) addFiles(e.target.files); e.target.value = ''; }}
/>
{/* Pending file previews */}
{pendingFiles.length > 0 && (
<div className="flex flex-wrap gap-2 mb-2">
{pendingFiles.map((file, idx) => (
<div
key={`${file.name}-${idx}`}
className="flex items-center gap-2 px-3 py-1.5 bg-gray-100 dark:bg-gray-700 rounded-lg text-xs text-gray-700 dark:text-gray-300 max-w-[200px]"
>
{file.type.startsWith('image/') ? (
<ImageIcon className="w-3.5 h-3.5 flex-shrink-0 text-orange-500" />
) : (
<FileText className="w-3.5 h-3.5 flex-shrink-0 text-gray-500" />
)}
<span className="truncate">{file.name}</span>
<span className="text-gray-400 flex-shrink-0">({(file.size / 1024).toFixed(0)}K)</span>
<button
onClick={() => setPendingFiles((prev) => prev.filter((_, i) => i !== idx))}
className="p-0.5 text-gray-400 hover:text-red-500 flex-shrink-0"
>
<X className="w-3 h-3" />
</button>
</div>
))}
</div>
)}
{/* Input card — DeerFlow-style: white card, textarea top, actions bottom */}
<div
className="bg-white dark:bg-gray-800 rounded-2xl shadow-sm transition-all"
onDrop={handleDrop}
onDragOver={handleDragOver}
>
{/* Textarea area */}
<div className="px-4 pt-4 pb-1">
<textarea <textarea
ref={textareaRef} ref={textareaRef}
value={input} value={input}
@@ -263,277 +377,70 @@ export function ChatArea() {
placeholder={ placeholder={
isStreaming isStreaming
? 'Agent 正在回复...' ? 'Agent 正在回复...'
: `发送给 ${currentAgent?.name || 'ZCLAW'}${!connected ? ' (离线模式)' : ''}` : '今天我能为你做些什么?'
} }
disabled={isStreaming} disabled={isStreaming}
rows={1} rows={2}
className="w-full bg-transparent border-none focus:outline-none text-gray-700 dark:text-gray-200 placeholder-gray-400 dark:placeholder-gray-500 disabled:opacity-50 resize-none leading-relaxed mt-1" className="w-full bg-transparent border-none outline-none ring-0 focus:outline-none focus:ring-0 text-gray-700 dark:text-gray-200 placeholder-gray-400 dark:placeholder-gray-500 disabled:opacity-50 resize-none leading-relaxed"
style={{ minHeight: '24px', maxHeight: '160px' }} style={{ minHeight: '48px', maxHeight: '160px', border: 'none', outline: 'none', boxShadow: 'none' }}
/> />
</div> </div>
<div className="flex items-center gap-2 pr-2 pb-1 relative">
<Button {/* Bottom action bar */}
variant="ghost" <div className="flex items-center justify-between px-3 pb-3">
size="sm" <div className="flex items-center gap-1">
onClick={() => setShowModelPicker(!showModelPicker)} <Button
className="flex items-center gap-1 text-xs text-gray-500 dark:text-gray-400 hover:bg-gray-200 dark:hover:bg-gray-700" variant="ghost"
aria-label="选择模型" size="sm"
aria-expanded={showModelPicker} className="p-2 text-gray-500 dark:text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
> aria-label="添加附件"
<span>{currentModel}</span> onClick={() => fileInputRef.current?.click()}
<ChevronDown className="w-3 h-3" /> >
</Button> <Paperclip className="w-5 h-5" />
{showModelPicker && ( </Button>
<div className="absolute bottom-full right-8 mb-2 bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg shadow-lg py-1 min-w-[160px] max-h-48 overflow-y-auto z-10"> <ChatMode
{models.length > 0 ? ( value={chatMode}
models.map((model) => ( onChange={setChatMode}
<button disabled={isStreaming}
key={model.id} />
onClick={() => { setCurrentModel(model.id); setShowModelPicker(false); }} </div>
className={`w-full text-left px-3 py-2 text-xs hover:bg-gray-50 dark:hover:bg-gray-700 ${model.id === currentModel ? 'text-orange-600 dark:text-orange-400 font-medium' : 'text-gray-700 dark:text-gray-300'}`} <div className="flex items-center gap-2">
> <ModelSelector
{model.name} models={models.map(m => ({ id: m.id, name: m.name, provider: m.provider }))}
</button> currentModel={currentModel}
)) onSelect={setCurrentModel}
) : ( disabled={isStreaming}
<div className="px-3 py-2 text-xs text-gray-400"> />
{connected ? '加载中...' : '未连接 Gateway'} <Button
</div> variant="primary"
)} size="sm"
</div> onClick={handleSend}
)} disabled={isStreaming || (!input.trim() && pendingFiles.length === 0)}
<Button className="w-8 h-8 rounded-full p-0 flex items-center justify-center bg-orange-500 hover:bg-orange-600 text-white disabled:opacity-50"
variant="primary" aria-label="发送消息"
size="sm" >
onClick={handleSend} <ArrowUp className="w-4 h-4 text-white" />
disabled={isStreaming || !input.trim()} </Button>
className="w-8 h-8 rounded-full p-0 flex items-center justify-center bg-orange-500 hover:bg-orange-600 text-white disabled:opacity-50" </div>
aria-label="发送消息"
>
<ArrowUp className="w-4 h-4 text-white" />
</Button>
</div> </div>
</div> </div>
<div className="text-center mt-2 text-xs text-gray-500 dark:text-gray-400">
Agent AI
</div>
</div> </div>
</div> </div>
</div> </div>
);
}
/** Code block with copy and download functionality */
function CodeBlock({ code, language, index }: { code: string; language: string; index: number }) {
const [copied, setCopied] = useState(false);
const [downloading, setDownloading] = useState(false);
// Infer filename from language or content
const inferFilename = (): string => {
const extMap: Record<string, string> = {
javascript: 'js', typescript: 'ts', python: 'py', rust: 'rs',
go: 'go', java: 'java', cpp: 'cpp', c: 'c', csharp: 'cs',
html: 'html', css: 'css', scss: 'scss', json: 'json',
yaml: 'yaml', yml: 'yaml', xml: 'xml', sql: 'sql',
shell: 'sh', bash: 'sh', powershell: 'ps1',
markdown: 'md', md: 'md', dockerfile: 'dockerfile',
};
// Check if language contains a filename (e.g., ```app.tsx)
if (language.includes('.') || language.includes('/')) {
return language;
}
// Check for common patterns in code
const codeLower = code.toLowerCase();
if (codeLower.includes('<!doctype html') || codeLower.includes('<html')) {
return 'index.html';
}
if (codeLower.includes('package.json') || (codeLower.includes('"name"') && codeLower.includes('"version"'))) {
return 'package.json';
}
if (codeLower.startsWith('{') && (codeLower.includes('"import"') || codeLower.includes('"export"'))) {
return 'config.json';
}
// Use language extension
const ext = extMap[language.toLowerCase()] || language.toLowerCase();
return `code-${index + 1}.${ext || 'txt'}`;
};
const handleCopy = async () => {
try {
await navigator.clipboard.writeText(code);
setCopied(true);
setTimeout(() => setCopied(false), 2000);
} catch (err) {
console.error('Failed to copy:', err);
}
};
const handleDownload = () => {
setDownloading(true);
try {
const filename = inferFilename();
const blob = new Blob([code], { type: 'text/plain;charset=utf-8' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = filename;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
} catch (err) {
console.error('Failed to download:', err);
}
setTimeout(() => setDownloading(false), 500);
};
return (
<div className="relative group my-2">
<pre className="bg-gray-900 text-gray-100 rounded-lg p-3 overflow-x-auto text-xs font-mono leading-relaxed">
{language && (
<div className="text-gray-500 text-[10px] mb-1 uppercase flex items-center justify-between">
<span>{language}</span>
</div>
)}
<code>{code}</code>
</pre>
{/* Action buttons - show on hover */}
<div className="absolute top-2 right-2 flex gap-1 opacity-0 group-hover:opacity-100 transition-opacity">
<button
onClick={handleCopy}
className="p-1.5 bg-gray-700 hover:bg-gray-600 rounded text-gray-300 hover:text-white transition-colors"
title="复制代码"
>
{copied ? <Check className="w-3.5 h-3.5 text-green-400" /> : <Copy className="w-3.5 h-3.5" />}
</button>
<button
onClick={handleDownload}
className="p-1.5 bg-gray-700 hover:bg-gray-600 rounded text-gray-300 hover:text-white transition-colors"
title="下载文件"
disabled={downloading}
>
<Download className={`w-3.5 h-3.5 ${downloading ? 'animate-pulse' : ''}`} />
</button>
</div>
</div>
);
}
/** Lightweight markdown renderer — handles code blocks, inline code, bold, italic, links */
function sanitizeUrl(url: string): string {
const safeProtocols = ['http:', 'https:', 'mailto:'];
try {
const parsed = new URL(url, window.location.origin);
if (safeProtocols.includes(parsed.protocol)) {
return parsed.href;
}
} catch {
// Invalid URL
}
return '#';
}
function renderMarkdown(text: string): React.ReactNode[] {
const nodes: React.ReactNode[] = [];
const lines = text.split('\n');
let i = 0;
while (i < lines.length) {
const line = lines[i];
// Fenced code block
if (line.startsWith('```')) {
const lang = line.slice(3).trim();
const codeLines: string[] = [];
i++;
while (i < lines.length && !lines[i].startsWith('```')) {
codeLines.push(lines[i]);
i++;
} }
i++; // skip closing ``` rightPanel={artifactRightPanel}
nodes.push( rightPanelTitle="产物"
<CodeBlock key={nodes.length} code={codeLines.join('\n')} language={lang} index={nodes.length} /> rightPanelOpen={artifactPanelOpen}
); onRightPanelToggle={setArtifactPanelOpen}
continue; />
} );
// Normal line — parse inline markdown
nodes.push(
<span key={nodes.length}>
{i > 0 && lines[i - 1] !== undefined && !nodes[nodes.length - 1]?.toString().includes('pre') && '\n'}
{renderInline(line)}
</span>
);
i++;
}
return nodes;
}
function renderInline(text: string): React.ReactNode[] {
const parts: React.ReactNode[] = [];
// Pattern: **bold**, *italic*, `code`, [text](url)
const regex = /(\*\*(.+?)\*\*)|(\*(.+?)\*)|(`(.+?)`)|(\[(.+?)\]\((.+?)\))/g;
let lastIndex = 0;
let match: RegExpExecArray | null;
while ((match = regex.exec(text)) !== null) {
// Text before match
if (match.index > lastIndex) {
parts.push(text.slice(lastIndex, match.index));
}
if (match[1]) {
// **bold**
parts.push(<strong key={parts.length} className="font-semibold">{match[2]}</strong>);
} else if (match[3]) {
// *italic*
parts.push(<em key={parts.length}>{match[4]}</em>);
} else if (match[5]) {
// `code`
parts.push(
<code key={parts.length} className="bg-gray-100 dark:bg-gray-700 text-orange-700 dark:text-orange-400 px-1 py-0.5 rounded text-[0.85em] font-mono">
{match[6]}
</code>
);
} else if (match[7]) {
// [text](url) - 使用 sanitizeUrl 防止 XSS
parts.push(
<a key={parts.length} href={sanitizeUrl(match[9])} target="_blank" rel="noopener noreferrer"
className="text-orange-600 dark:text-orange-400 underline hover:text-orange-700 dark:hover:text-orange-300">{match[8]}</a>
);
}
lastIndex = match.index + match[0].length;
}
if (lastIndex < text.length) {
parts.push(text.slice(lastIndex));
}
return parts.length > 0 ? parts : [text];
} }
function MessageBubble({ message }: { message: Message }) { function MessageBubble({ message }: { message: Message }) {
// Tool messages are now absorbed into the assistant message's toolSteps chain.
// Legacy standalone tool messages (from older sessions) still render as before.
if (message.role === 'tool') { if (message.role === 'tool') {
return ( return null;
<div className="ml-12 bg-gray-50 dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-3 text-xs font-mono">
<div className="flex items-center gap-2 text-gray-500 dark:text-gray-400 mb-1">
<Terminal className="w-3.5 h-3.5" />
<span className="font-semibold">{message.toolName || 'tool'}</span>
</div>
{message.toolInput && (
<pre className="text-gray-600 dark:text-gray-300 bg-white dark:bg-gray-900 rounded p-2 mb-1 overflow-x-auto">{message.toolInput}</pre>
)}
{message.content && (
<pre className="text-green-700 dark:text-green-400 bg-white dark:bg-gray-900 rounded p-2 overflow-x-auto">{message.content}</pre>
)}
</div>
);
} }
const isUser = message.role === 'user'; const isUser = message.role === 'user';
@@ -573,11 +480,42 @@ function MessageBubble({ message }: { message: Message }) {
</div> </div>
) : ( ) : (
<div className={`p-4 shadow-sm ${isUser ? 'chat-bubble-user shadow-md' : 'chat-bubble-assistant'} relative group`}> <div className={`p-4 shadow-sm ${isUser ? 'chat-bubble-user shadow-md' : 'chat-bubble-assistant'} relative group`}>
<div className={`leading-relaxed whitespace-pre-wrap ${isUser ? 'text-white' : 'text-gray-700 dark:text-gray-200'}`}> {/* Optimistic sending indicator */}
{isUser && message.optimistic && (
<span className="text-xs text-blue-200 dark:text-blue-300 mb-1 block animate-pulse">
Sending...
</span>
)}
{/* Reasoning block for thinking content (DeerFlow-inspired) */}
{!isUser && message.thinkingContent && (
<ReasoningBlock
content={message.thinkingContent}
isStreaming={message.streaming}
/>
)}
{/* Tool call steps chain (DeerFlow-inspired) */}
{!isUser && message.toolSteps && message.toolSteps.length > 0 && (
<ToolCallChain
steps={message.toolSteps}
isStreaming={message.streaming}
/>
)}
{/* Subtask tracking (DeerFlow-inspired) */}
{!isUser && message.subtasks && message.subtasks.length > 0 && (
<TaskProgress tasks={message.subtasks} className="mb-3" />
)}
{/* Message content with streaming support */}
<div className={`leading-relaxed ${isUser ? 'text-white whitespace-pre-wrap' : 'text-gray-700 dark:text-gray-200'}`}>
{message.content {message.content
? (isUser ? message.content : renderMarkdown(message.content)) ? (isUser
? message.content
: <StreamingText
content={message.content}
isStreaming={!!message.streaming}
className="text-gray-700 dark:text-gray-200"
/>
)
: '...'} : '...'}
{message.streaming && <span className="inline-block w-1.5 h-4 bg-orange-500 animate-pulse ml-0.5 align-text-bottom rounded-sm" />}
</div> </div>
{message.error && ( {message.error && (
<p className="text-xs text-red-500 mt-2">{message.error}</p> <p className="text-xs text-red-500 mt-2">{message.error}</p>

View File

@@ -1,119 +1,219 @@
import { useState, useRef, useEffect } from 'react';
import { useChatStore } from '../store/chatStore'; import { useChatStore } from '../store/chatStore';
import { MessageSquare, Trash2, SquarePen } from 'lucide-react'; import { MessageSquare, Trash2, SquarePen, Download, Check, X } from 'lucide-react';
import { EmptyConversations, ConversationListSkeleton } from './ui'; import { EmptyConversations } from './ui';
export function ConversationList() { function formatTime(date: Date): string {
const { const now = new Date();
conversations, currentConversationId, messages, agents, currentAgent, const diff = now.getTime() - date.getTime();
newConversation, switchConversation, deleteConversation, const minutes = Math.floor(diff / 60000);
isLoading, if (minutes < 1) return '刚刚';
} = useChatStore(); if (minutes < 60) return `${minutes}分钟前`;
const hours = Math.floor(minutes / 60);
if (hours < 24) return `${hours}小时前`;
const days = Math.floor(hours / 24);
if (days < 7) return `${days}天前`;
return date.toLocaleDateString('zh-CN', { month: 'short', day: 'numeric' });
}
const hasActiveChat = messages.length > 0; function exportConversation(title: string, messages: { role: string; content: string }[]): void {
const lines = [`# ${title}`, '', `导出时间: ${new Date().toLocaleString('zh-CN')}`, ''];
// Show skeleton during initial load for (const msg of messages) {
if (isLoading && conversations.length === 0 && !hasActiveChat) { const label = msg.role === 'user' ? '用户' : msg.role === 'assistant' ? '助手' : msg.role;
return <ConversationListSkeleton count={4} />; lines.push(`## ${label}`, '', msg.content, '');
} }
const blob = new Blob([lines.join('\n')], { type: 'text/markdown;charset=utf-8' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `${title.replace(/[/\\?%*:|"<>]/g, '_')}.md`;
a.click();
URL.revokeObjectURL(url);
}
interface ConversationItemProps {
id: string;
title: string;
updatedAt: Date;
messageCount: number;
isActive: boolean;
onSelect: () => void;
onDelete: () => void;
onRename: (newTitle: string) => void;
onExport: () => void;
}
function ConversationItem({
title,
updatedAt,
messageCount,
isActive,
onSelect,
onDelete,
onRename,
onExport,
}: ConversationItemProps) {
const [hovering, setHovering] = useState(false);
const [editing, setEditing] = useState(false);
const [editValue, setEditValue] = useState(title);
const inputRef = useRef<HTMLInputElement>(null);
useEffect(() => {
if (editing && inputRef.current) {
inputRef.current.focus();
inputRef.current.select();
}
}, [editing]);
const handleRenameSubmit = () => {
const trimmed = editValue.trim();
if (trimmed && trimmed !== title) {
onRename(trimmed);
} else {
setEditValue(title);
}
setEditing(false);
};
const handleRenameKeyDown = (e: React.KeyboardEvent) => {
if (e.key === 'Enter') {
handleRenameSubmit();
} else if (e.key === 'Escape') {
setEditValue(title);
setEditing(false);
}
};
const timeStr = formatTime(updatedAt);
return ( return (
<div className="h-full flex flex-col"> <div
{/* Header */} onMouseEnter={() => setHovering(true)}
<div className="flex items-center justify-between px-3 py-2 border-b border-gray-200"> onMouseLeave={() => setHovering(false)}
<span className="text-xs font-medium text-gray-500"></span> onClick={() => { if (!editing) onSelect(); }}
<button className={`
onClick={newConversation} group relative flex items-center gap-2.5 px-3 py-2.5 rounded-lg cursor-pointer transition-colors
className="p-1 text-gray-400 hover:text-orange-500 rounded" ${isActive
title="新对话" ? 'bg-primary/10 dark:bg-primary/20 border border-primary/20'
> : 'hover:bg-gray-50 dark:hover:bg-gray-800/50 border border-transparent'
<SquarePen className="w-4 h-4" /> }
</button> `}
</div> >
<MessageSquare className={`w-4 h-4 flex-shrink-0 ${isActive ? 'text-primary' : 'text-gray-400'}`} />
<div className="flex-1 overflow-y-auto custom-scrollbar"> <div className="flex-1 min-w-0">
{/* Current active chat (unsaved) */} {editing ? (
{hasActiveChat && !currentConversationId && ( <div className="flex items-center gap-1" onClick={(e) => e.stopPropagation()}>
<div className="flex items-center gap-3 px-3 py-3 bg-orange-50 border-b border-orange-100 cursor-default"> <input
<div className="w-7 h-7 bg-orange-500 rounded-lg flex items-center justify-center text-white flex-shrink-0"> ref={inputRef}
<MessageSquare className="w-3.5 h-3.5" /> type="text"
</div> value={editValue}
<div className="flex-1 min-w-0"> onChange={(e) => setEditValue(e.target.value)}
<div className="text-xs font-medium text-orange-700 truncate"></div> onKeyDown={handleRenameKeyDown}
<div className="text-[11px] text-orange-500 truncate"> onBlur={handleRenameSubmit}
{messages.filter(m => m.role === 'user').length} · {currentAgent?.name || 'ZCLAW'} className="flex-1 min-w-0 px-1.5 py-0.5 text-sm bg-white dark:bg-gray-700 border border-orange-300 dark:border-orange-600 rounded outline-none"
</div> maxLength={100}
</div> />
</div> <button
)} onClick={(e) => { e.stopPropagation(); handleRenameSubmit(); }}
className="p-0.5 text-green-600 hover:text-green-700"
{/* Saved conversations */}
{conversations.map((conv) => {
const isActive = conv.id === currentConversationId;
const msgCount = conv.messages.filter(m => m.role === 'user').length;
const timeStr = formatTime(conv.updatedAt);
const agentName = conv.agentId
? agents.find((agent) => agent.id === conv.agentId)?.name || conv.agentId
: 'ZCLAW';
return (
<div
key={conv.id}
onClick={() => switchConversation(conv.id)}
className={`group flex items-center gap-3 px-3 py-3 cursor-pointer border-b border-gray-50 transition-colors ${
isActive ? 'bg-orange-50' : 'hover:bg-gray-100'
}`}
> >
<div className={`w-7 h-7 rounded-lg flex items-center justify-center flex-shrink-0 ${ <Check className="w-3.5 h-3.5" />
isActive ? 'bg-orange-500 text-white' : 'bg-gray-200 text-gray-500' </button>
}`}> <button
<MessageSquare className="w-3.5 h-3.5" /> onClick={(e) => { e.stopPropagation(); setEditValue(title); setEditing(false); }}
</div> className="p-0.5 text-gray-400 hover:text-gray-600"
<div className="flex-1 min-w-0"> >
<div className={`text-xs font-medium truncate ${isActive ? 'text-orange-700' : 'text-gray-900'}`}> <X className="w-3.5 h-3.5" />
{conv.title} </button>
</div> </div>
<div className="text-[11px] text-gray-400 truncate"> ) : (
{msgCount} · {agentName} · {timeStr} <>
</div> <p className={`text-sm truncate ${isActive ? 'font-medium text-gray-900 dark:text-gray-100' : 'text-gray-700 dark:text-gray-300'}`}>
</div> {title}
<button </p>
onClick={(e) => { <p className="text-[11px] text-gray-400 dark:text-gray-500 mt-0.5">
e.stopPropagation(); {timeStr}
if (confirm('删除该对话?')) { {messageCount > 0 && <span className="ml-1.5">{messageCount} </span>}
deleteConversation(conv.id); </p>
} </>
}}
className="opacity-0 group-hover:opacity-100 p-1 text-gray-300 hover:text-red-500 transition-opacity"
title="删除"
>
<Trash2 className="w-3 h-3" />
</button>
</div>
);
})}
{conversations.length === 0 && !hasActiveChat && (
<EmptyConversations size="sm" className="h-auto" />
)} )}
</div> </div>
{/* Hover action bar */}
{hovering && !editing && (
<div className="flex items-center gap-0.5 flex-shrink-0" onClick={(e) => e.stopPropagation()}>
<button
onClick={() => setEditing(true)}
title="重命名"
className="p-1 rounded text-gray-400 hover:text-gray-600 dark:hover:text-gray-200 hover:bg-gray-100 dark:hover:bg-gray-700 transition-colors"
>
<SquarePen className="w-3.5 h-3.5" />
</button>
<button
onClick={onExport}
title="导出"
className="p-1 rounded text-gray-400 hover:text-gray-600 dark:hover:text-gray-200 hover:bg-gray-100 dark:hover:bg-gray-700 transition-colors"
>
<Download className="w-3.5 h-3.5" />
</button>
<button
onClick={onDelete}
title="删除"
className="p-1 rounded text-gray-400 hover:text-red-500 hover:bg-red-50 dark:hover:bg-red-900/20 transition-colors"
>
<Trash2 className="w-3.5 h-3.5" />
</button>
</div>
)}
</div> </div>
); );
} }
function formatTime(date: Date): string { export function ConversationList() {
const now = new Date(); const {
const d = new Date(date); conversations,
const diffMs = now.getTime() - d.getTime(); currentConversationId,
const diffMin = Math.floor(diffMs / 60000); switchConversation,
deleteConversation,
} = useChatStore();
if (diffMin < 1) return '刚刚'; const handleRename = (id: string, newTitle: string) => {
if (diffMin < 60) return `${diffMin} 分钟前`; useChatStore.setState((state) => ({
conversations: state.conversations.map((c) =>
c.id === id ? { ...c, title: newTitle, updatedAt: new Date() } : c
),
}));
};
const diffHr = Math.floor(diffMin / 60); const handleExport = (id: string) => {
if (diffHr < 24) return `${diffHr} 小时前`; const conv = conversations.find((c) => c.id === id);
if (!conv) return;
exportConversation(conv.title, conv.messages);
};
const diffDay = Math.floor(diffHr / 24); if (conversations.length === 0) {
if (diffDay < 7) return `${diffDay} 天前`; return <EmptyConversations />;
}
return `${d.getMonth() + 1}/${d.getDate()}`; return (
<div className="flex flex-col gap-0.5 py-1">
{conversations.map((conv) => (
<ConversationItem
key={conv.id}
id={conv.id}
title={conv.title}
updatedAt={conv.updatedAt}
messageCount={conv.messages.filter((m) => m.role === 'user').length}
isActive={conv.id === currentConversationId}
onSelect={() => switchConversation(conv.id)}
onDelete={() => deleteConversation(conv.id)}
onRename={(newTitle) => handleRename(conv.id, newTitle)}
onExport={() => handleExport(conv.id)}
/>
))}
</div>
);
} }
export default ConversationList;

View File

@@ -1,19 +1,45 @@
/** /**
* FirstConversationPrompt - Welcome prompt for new Agents * FirstConversationPrompt - Welcome prompt for new conversations
* *
* Displays a personalized welcome message and quick start suggestions * DeerFlow-inspired design:
* when entering a new Agent's chat for the first time. * - Centered layout with emoji greeting
* - Input bar embedded in welcome screen
* - Horizontal quick-action chips (colored pills)
* - Clean, minimal aesthetic
*/ */
import { motion } from 'framer-motion'; import { motion } from 'framer-motion';
import { Lightbulb, ArrowRight } from 'lucide-react'; import {
Sparkles,
PenLine,
Microscope,
Layers,
GraduationCap,
} from 'lucide-react';
import { cn } from '../lib/utils'; import { cn } from '../lib/utils';
import { import {
generateWelcomeMessage, generateWelcomeMessage,
getQuickStartSuggestions,
getScenarioById, getScenarioById,
type QuickStartSuggestion,
} from '../lib/personality-presets'; } from '../lib/personality-presets';
import type { Clone } from '../store/agentStore'; import type { Clone } from '../store/agentStore';
import { useChatStore } from '../store/chatStore';
// Quick action chip definitions — DeerFlow-style colored pills
const QUICK_ACTIONS = [
{ key: 'surprise', label: '小惊喜', icon: Sparkles, color: 'text-orange-500' },
{ key: 'write', label: '写作', icon: PenLine, color: 'text-blue-500' },
{ key: 'research', label: '研究', icon: Microscope, color: 'text-purple-500' },
{ key: 'collect', label: '收集', icon: Layers, color: 'text-green-500' },
{ key: 'learn', label: '学习', icon: GraduationCap, color: 'text-indigo-500' },
];
// Pre-filled prompts for each quick action
const QUICK_ACTION_PROMPTS: Record<string, string> = {
surprise: '给我一个小惊喜吧!来点创意的',
write: '帮我写一篇文章,主题你来定',
research: '帮我做一个深度研究分析',
collect: '帮我收集整理一些有用的信息',
learn: '我想学点新东西,教我一些有趣的知识',
};
interface FirstConversationPromptProps { interface FirstConversationPromptProps {
clone: Clone; clone: Clone;
@@ -25,7 +51,15 @@ export function FirstConversationPrompt({
clone, clone,
onSelectSuggestion, onSelectSuggestion,
}: FirstConversationPromptProps) { }: FirstConversationPromptProps) {
// Generate welcome message const chatMode = useChatStore((s) => s.chatMode);
const modeGreeting: Record<string, string> = {
flash: '快速回答,即时响应',
thinking: '深度分析,逐步推理',
pro: '专业规划,系统思考',
ultra: '多代理协作,全能力调度',
};
const welcomeMessage = generateWelcomeMessage({ const welcomeMessage = generateWelcomeMessage({
userName: clone.userName, userName: clone.userName,
agentName: clone.nickname || clone.name, agentName: clone.nickname || clone.name,
@@ -34,11 +68,9 @@ export function FirstConversationPrompt({
scenarios: clone.scenarios, scenarios: clone.scenarios,
}); });
// Get quick start suggestions based on scenarios const handleQuickAction = (key: string) => {
const suggestions = getQuickStartSuggestions(clone.scenarios || []); const prompt = QUICK_ACTION_PROMPTS[key] || '你好!';
onSelectSuggestion?.(prompt);
const handleSuggestionClick = (suggestion: QuickStartSuggestion) => {
onSelectSuggestion?.(suggestion.text);
}; };
return ( return (
@@ -48,48 +80,63 @@ export function FirstConversationPrompt({
exit={{ opacity: 0, y: -10 }} exit={{ opacity: 0, y: -10 }}
className="flex flex-col items-center justify-center py-12 px-4" className="flex flex-col items-center justify-center py-12 px-4"
> >
{/* Avatar with emoji */} {/* Greeting emoji */}
<div className="mb-6"> <div className="text-5xl mb-4">{clone.emoji || '👋'}</div>
<div className="w-20 h-20 rounded-2xl bg-gradient-to-br from-primary/20 to-primary/10 dark:from-primary/30 dark:to-primary/20 flex items-center justify-center shadow-lg">
<span className="text-4xl">{clone.emoji || '🦞'}</span> {/* Title */}
</div> <motion.h1
</div> initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: 0.1, duration: 0.5 }}
className="text-2xl font-semibold text-gray-900 dark:text-gray-100 mb-2"
>
</motion.h1>
{/* Mode-aware subtitle */}
<motion.p
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
transition={{ delay: 0.2, duration: 0.4 }}
className="text-sm text-orange-500 dark:text-orange-400 font-medium mb-4 flex items-center gap-1.5"
>
<Sparkles className="w-3.5 h-3.5" />
{modeGreeting[chatMode] || '智能对话,随时待命'}
</motion.p>
{/* Welcome message */} {/* Welcome message */}
<div className="text-center max-w-md mb-8"> <div className="text-center max-w-md mb-8">
<p className="text-lg text-gray-700 dark:text-gray-200 whitespace-pre-line leading-relaxed"> <p className="text-sm text-gray-500 dark:text-gray-400 leading-relaxed">
{welcomeMessage} {welcomeMessage}
</p> </p>
</div> </div>
{/* Quick start suggestions */} {/* Quick action chips — DeerFlow-style horizontal colored pills */}
<div className="w-full max-w-lg space-y-2"> <div className="flex items-center justify-center gap-2 flex-wrap">
<div className="flex items-center gap-2 text-sm text-gray-500 dark:text-gray-400 mb-3"> {QUICK_ACTIONS.map((action, index) => {
<Lightbulb className="w-4 h-4" /> const ActionIcon = action.icon;
<span></span> return (
</div> <motion.button
key={action.key}
{suggestions.map((suggestion, index) => ( initial={{ opacity: 0, y: 8 }}
<motion.button animate={{ opacity: 1, y: 0 }}
key={index} transition={{ delay: 0.3 + index * 0.05, duration: 0.2 }}
initial={{ opacity: 0, x: -20 }} onClick={() => handleQuickAction(action.key)}
animate={{ opacity: 1, x: 0 }} className={cn(
transition={{ delay: index * 0.1 }} 'flex items-center gap-2 px-4 py-2',
onClick={() => handleSuggestionClick(suggestion)} 'bg-white dark:bg-gray-800',
className={cn( 'border border-gray-200 dark:border-gray-700',
'w-full flex items-center gap-3 px-4 py-3 rounded-xl', 'rounded-full text-sm text-gray-600 dark:text-gray-300',
'bg-gray-50 dark:bg-gray-800/50 border border-gray-200 dark:border-gray-700', 'hover:border-gray-300 dark:hover:border-gray-600',
'hover:bg-gray-100 dark:hover:bg-gray-800 hover:border-primary/30', 'hover:bg-gray-50 dark:hover:bg-gray-750',
'transition-all duration-200 group text-left' 'transition-all duration-150'
)} )}
> >
<span className="text-xl flex-shrink-0">{suggestion.icon}</span> <ActionIcon className={`w-4 h-4 ${action.color}`} />
<span className="flex-1 text-sm text-gray-700 dark:text-gray-200"> <span>{action.label}</span>
{suggestion.text} </motion.button>
</span> );
<ArrowRight className="w-4 h-4 text-gray-400 group-hover:text-primary transition-colors flex-shrink-0" /> })}
</motion.button>
))}
</div> </div>
{/* Scenario tags */} {/* Scenario tags */}

View File

@@ -1,11 +1,11 @@
import { useState } from 'react'; import { useState } from 'react';
import { motion, AnimatePresence } from 'framer-motion'; import { motion, AnimatePresence } from 'framer-motion';
import { import {
Bot, Zap, Package, SquarePen, MessageSquare, Bot, Search, X, Settings
Search, ChevronRight, X
} from 'lucide-react'; } from 'lucide-react';
import { ConversationList } from './ConversationList';
import { CloneManager } from './CloneManager'; import { CloneManager } from './CloneManager';
import { useConfigStore } from '../store/configStore'; import { useChatStore } from '../store/chatStore';
import { containerVariants, defaultTransition } from '../lib/animations'; import { containerVariants, defaultTransition } from '../lib/animations';
export type MainViewType = 'chat' | 'automation' | 'skills'; export type MainViewType = 'chat' | 'automation' | 'skills';
@@ -16,86 +16,81 @@ interface SidebarProps {
onNewChat?: () => void; onNewChat?: () => void;
} }
type Tab = 'chat' | 'clones' | 'automation' | 'skills'; type Tab = 'conversations' | 'clones';
// 导航项配置 - WorkBuddy 风格
const NAV_ITEMS: {
key: Tab;
label: string;
icon: React.ComponentType<{ className?: string }>;
mainView?: MainViewType;
}[] = [
{ key: 'clones', label: '分身', icon: Bot },
{ key: 'automation', label: '自动化', icon: Zap, mainView: 'automation' },
{ key: 'skills', label: '技能', icon: Package, mainView: 'skills' },
];
export function Sidebar({ export function Sidebar({
onOpenSettings, onOpenSettings,
onMainViewChange, onMainViewChange,
}: Omit<SidebarProps, 'onNewChat'>) { }: Omit<SidebarProps, 'onNewChat'>) {
const [activeTab, setActiveTab] = useState<Tab>('clones'); const [activeTab, setActiveTab] = useState<Tab>('conversations');
const [searchQuery, setSearchQuery] = useState(''); const [searchQuery, setSearchQuery] = useState('');
const userName = useConfigStore((state) => state.quickConfig?.userName) || '用户7141'; const newConversation = useChatStore((s) => s.newConversation);
const handleNavClick = (key: Tab, mainView?: MainViewType) => { const handleNewConversation = () => {
setActiveTab(key); newConversation();
if (mainView && onMainViewChange) { onMainViewChange?.('chat');
onMainViewChange(mainView); };
} else if (onMainViewChange) {
onMainViewChange('chat'); const handleNavClick = (tab: Tab) => {
setActiveTab(tab);
if (tab === 'clones') {
onMainViewChange?.('chat');
} else {
onMainViewChange?.('chat');
} }
}; };
return ( return (
<aside className="w-64 bg-white dark:bg-gray-900 border-r border-gray-200 dark:border-gray-700 flex flex-col flex-shrink-0"> <aside className="w-64 sidebar-bg border-r border-[#e8e6e1] dark:border-gray-800 flex flex-col h-full shrink-0">
{/* 搜索框 */} {/* Logo area */}
<div className="p-3 border-b border-gray-100 dark:border-gray-800"> <div className="h-14 flex items-center px-4 border-b border-[#e8e6e1]/50 dark:border-gray-800">
<div className="relative"> <span className="text-lg font-semibold tracking-tight text-gray-900 dark:text-gray-100">ZCLAW</span>
<Search className="absolute left-3 top-1/2 -translate-y-1/2 text-gray-400 w-4 h-4" /> <button
<input onClick={handleNewConversation}
type="text" className="ml-auto p-1.5 hover:bg-black/5 dark:hover:bg-white/5 rounded-md transition-colors text-gray-600 dark:text-gray-400"
placeholder="搜索..." title="新对话"
value={searchQuery} >
onChange={(e) => setSearchQuery(e.target.value)} <SquarePen className="w-4 h-4" />
className="w-full pl-9 pr-8 py-2 bg-gray-50 dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg text-sm focus:outline-none focus:border-gray-400 focus:ring-1 focus:ring-gray-400 transition-all text-gray-700 dark:text-gray-300 placeholder-gray-400" </button>
/>
{searchQuery && (
<button
onClick={() => setSearchQuery('')}
className="absolute right-2 top-1/2 -translate-y-1/2 p-1 hover:bg-gray-200 dark:hover:bg-gray-700 rounded text-gray-400 transition-colors"
>
<X className="w-3 h-3" />
</button>
)}
</div>
</div> </div>
{/* 导航项 */} {/* Main Nav — DeerFlow-style: new chat / conversations / agents */}
<nav className="px-3 space-y-0.5"> <div className="p-2 space-y-1">
{NAV_ITEMS.map(({ key, label, icon: Icon, mainView }) => ( <button
<button onClick={handleNewConversation}
key={key} className="w-full flex items-center gap-3 px-3 py-2 rounded-lg bg-black/5 dark:bg-white/5 text-sm font-medium text-gray-900 dark:text-gray-100"
onClick={() => handleNavClick(key, mainView)} >
className={`w-full flex items-center gap-3 px-3 py-2 rounded-lg transition-colors ${ <SquarePen className="w-4 h-4" />
activeTab === key
? 'bg-gray-100 dark:bg-gray-800 text-gray-900 dark:text-gray-100 font-medium' </button>
: 'text-gray-600 dark:text-gray-400 hover:bg-gray-50 dark:hover:bg-gray-800/50 hover:text-gray-900 dark:hover:text-gray-200' <button
}`} onClick={() => handleNavClick('conversations')}
> className={`w-full flex items-center gap-3 px-3 py-2 rounded-lg text-sm transition-colors ${
<Icon className={`w-5 h-5 ${activeTab === key ? 'text-gray-700 dark:text-gray-300' : 'text-gray-400'}`} /> activeTab === 'conversations'
<span>{label}</span> ? 'bg-black/5 dark:bg-white/5 font-medium text-gray-900 dark:text-gray-100'
{activeTab === key && ( : 'text-gray-600 dark:text-gray-400 hover:bg-black/5 dark:hover:bg-white/5'
<ChevronRight className="w-4 h-4 ml-auto text-gray-400" /> }`}
)} >
</button> <MessageSquare className="w-4 h-4" />
))}
</nav> </button>
<button
onClick={() => handleNavClick('clones')}
className={`w-full flex items-center gap-3 px-3 py-2 rounded-lg text-sm transition-colors ${
activeTab === 'clones'
? 'bg-black/5 dark:bg-white/5 font-medium text-gray-900 dark:text-gray-100'
: 'text-gray-600 dark:text-gray-400 hover:bg-black/5 dark:hover:bg-white/5'
}`}
>
<Bot className="w-4 h-4" />
</button>
</div>
{/* 分隔线 */} {/* Divider */}
<div className="my-3 mx-3 border-t border-gray-100 dark:border-gray-800" /> <div className="mx-3 border-t border-[#e8e6e1]/50 dark:border-gray-800" />
{/* 内容区域 - 只显示分身内容,自动化和技能在主内容区显示 */} {/* Content area */}
<div className="flex-1 overflow-hidden"> <div className="flex-1 overflow-hidden">
<AnimatePresence mode="wait"> <AnimatePresence mode="wait">
<motion.div <motion.div
@@ -107,27 +102,45 @@ export function Sidebar({
transition={defaultTransition} transition={defaultTransition}
className="h-full overflow-y-auto" className="h-full overflow-y-auto"
> >
{activeTab === 'conversations' && (
<div className="p-2">
{/* Search in conversations */}
<div className="relative mb-2">
<Search className="absolute left-3 top-1/2 -translate-y-1/2 text-gray-400 w-4 h-4" />
<input
type="text"
placeholder="搜索对话..."
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
className="w-full pl-9 pr-8 py-1.5 bg-white/60 dark:bg-gray-800 border border-[#e8e6e1] dark:border-gray-700 rounded-lg text-sm focus:outline-none focus:border-gray-400 transition-all text-gray-700 dark:text-gray-300 placeholder-gray-400"
/>
{searchQuery && (
<button
onClick={() => setSearchQuery('')}
className="absolute right-2 top-1/2 -translate-y-1/2 p-1 hover:bg-gray-200 dark:hover:bg-gray-700 rounded text-gray-400"
>
<X className="w-3 h-3" />
</button>
)}
</div>
<ConversationList />
</div>
)}
{activeTab === 'clones' && <CloneManager />} {activeTab === 'clones' && <CloneManager />}
{/* skills 和 automation 不在侧边栏显示内容,由主内容区显示 */}
</motion.div> </motion.div>
</AnimatePresence> </AnimatePresence>
</div> </div>
{/* 底部用户栏 */} {/* Bottom user bar */}
<div className="p-3 border-t border-gray-200 dark:border-gray-700"> <div className="p-2 border-t border-[#e8e6e1] dark:border-gray-700">
<button <button
onClick={onOpenSettings} onClick={onOpenSettings}
aria-label="打开设置" aria-label="打开设置"
title="设置" title="设置和更多"
className="flex items-center gap-3 w-full hover:bg-gray-50 dark:hover:bg-gray-800 p-2 rounded-lg transition-colors" className="w-full flex items-center gap-3 px-3 py-2 rounded-lg text-sm text-gray-600 dark:text-gray-400 hover:bg-black/5 dark:hover:bg-white/5 transition-colors"
> >
<div className="w-8 h-8 bg-gray-600 rounded-full flex items-center justify-center text-white font-bold shadow-sm"> <Settings className="w-4 h-4" />
{userName?.charAt(0) || '用'} <span></span>
</div>
<span className="flex-1 text-left text-sm font-medium text-gray-700 dark:text-gray-300 truncate">
{userName}
</span>
<ChevronRight className="w-4 h-4 text-gray-400" />
</button> </button>
</div> </div>
</aside> </aside>

View File

@@ -0,0 +1,302 @@
import { useState, useMemo } from 'react';
import {
FileText,
FileCode2,
Table2,
Image as ImageIcon,
Download,
Copy,
ChevronLeft,
File,
} from 'lucide-react';
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface ArtifactFile {
id: string;
name: string;
type: 'markdown' | 'code' | 'table' | 'image' | 'text';
content: string;
language?: string;
createdAt: Date;
sourceStepId?: string; // Links to ToolCallStep that created this artifact
}
interface ArtifactPanelProps {
artifacts: ArtifactFile[];
selectedId?: string | null;
onSelect: (id: string) => void;
onClose?: () => void;
className?: string;
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
function getFileIcon(type: ArtifactFile['type']) {
switch (type) {
case 'markdown': return FileText;
case 'code': return FileCode2;
case 'table': return Table2;
case 'image': return ImageIcon;
default: return File;
}
}
function getTypeLabel(type: ArtifactFile['type']): string {
switch (type) {
case 'markdown': return 'MD';
case 'code': return 'CODE';
case 'table': return 'TABLE';
case 'image': return 'IMG';
default: return 'TXT';
}
}
function getTypeColor(type: ArtifactFile['type']): string {
switch (type) {
case 'markdown': return 'bg-blue-100 text-blue-700 dark:bg-blue-900/30 dark:text-blue-300';
case 'code': return 'bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-300';
case 'table': return 'bg-purple-100 text-purple-700 dark:bg-purple-900/30 dark:text-purple-300';
case 'image': return 'bg-yellow-100 text-yellow-700 dark:bg-yellow-900/30 dark:text-yellow-300';
default: return 'bg-gray-100 text-gray-700 dark:bg-gray-700 dark:text-gray-300';
}
}
// ---------------------------------------------------------------------------
// ArtifactPanel
// ---------------------------------------------------------------------------
export function ArtifactPanel({
artifacts,
selectedId,
onSelect,
onClose: _onClose,
className = '',
}: ArtifactPanelProps) {
const [viewMode, setViewMode] = useState<'preview' | 'code'>('preview');
const selected = useMemo(
() => artifacts.find((a) => a.id === selectedId),
[artifacts, selectedId]
);
// List view when no artifact is selected
if (!selected) {
return (
<div className={`h-full flex flex-col ${className}`}>
<div className="p-4 flex-1 overflow-y-auto custom-scrollbar">
{artifacts.length === 0 ? (
<div className="flex flex-col items-center justify-center h-full text-gray-400 dark:text-gray-500">
<FileText className="w-8 h-8 mb-2 opacity-50" />
<p className="text-sm"></p>
<p className="text-xs mt-1">Agent </p>
</div>
) : (
<div className="space-y-2">
{artifacts.map((artifact) => {
const Icon = getFileIcon(artifact.type);
return (
<button
key={artifact.id}
onClick={() => onSelect(artifact.id)}
className="w-full flex items-center gap-3 p-3 rounded-lg border border-gray-200 dark:border-gray-700 bg-white dark:bg-gray-800 hover:bg-gray-50 dark:hover:bg-gray-750 transition-colors text-left group"
>
<Icon className="w-5 h-5 text-gray-400 flex-shrink-0 group-hover:text-orange-500 transition-colors" />
<div className="flex-1 min-w-0">
<p className="text-sm font-medium text-gray-700 dark:text-gray-200 truncate">
{artifact.name}
</p>
<div className="flex items-center gap-2 mt-0.5">
<span className={`text-[10px] px-1.5 py-0.5 rounded font-medium ${getTypeColor(artifact.type)}`}>
{getTypeLabel(artifact.type)}
</span>
<span className="text-[11px] text-gray-400 dark:text-gray-500">
{new Date(artifact.createdAt).toLocaleTimeString()}
</span>
</div>
</div>
</button>
);
})}
</div>
)}
</div>
</div>
);
}
// Detail view
const Icon = getFileIcon(selected.type);
return (
<div className={`h-full flex flex-col ${className}`}>
{/* File header */}
<div className="px-4 py-2 border-b border-gray-200 dark:border-gray-700 flex items-center gap-2 flex-shrink-0">
<button
onClick={() => onSelect('')}
className="p-1 rounded hover:bg-gray-100 dark:hover:bg-gray-700 text-gray-400 hover:text-gray-600 dark:hover:text-gray-200 transition-colors"
title="返回文件列表"
>
<ChevronLeft className="w-4 h-4" />
</button>
<Icon className="w-4 h-4 text-orange-500 flex-shrink-0" />
<span className="text-sm font-medium text-gray-700 dark:text-gray-200 truncate flex-1">
{selected.name}
</span>
<span className={`text-[10px] px-1.5 py-0.5 rounded font-medium ${getTypeColor(selected.type)}`}>
{getTypeLabel(selected.type)}
</span>
</div>
{/* View mode toggle */}
<div className="px-4 py-1.5 border-b border-gray-100 dark:border-gray-800 flex items-center gap-1 flex-shrink-0">
<button
onClick={() => setViewMode('preview')}
className={`px-2.5 py-1 rounded text-xs font-medium transition-colors ${
viewMode === 'preview'
? 'bg-orange-100 text-orange-700 dark:bg-orange-900/30 dark:text-orange-300'
: 'text-gray-500 hover:text-gray-700 dark:hover:text-gray-300'
}`}
>
</button>
<button
onClick={() => setViewMode('code')}
className={`px-2.5 py-1 rounded text-xs font-medium transition-colors ${
viewMode === 'code'
? 'bg-orange-100 text-orange-700 dark:bg-orange-900/30 dark:text-orange-300'
: 'text-gray-500 hover:text-gray-700 dark:hover:text-gray-300'
}`}
>
</button>
</div>
{/* Content area */}
<div className="flex-1 overflow-y-auto custom-scrollbar p-4">
{viewMode === 'preview' ? (
<div className="prose prose-sm dark:prose-invert max-w-none">
{selected.type === 'markdown' ? (
<MarkdownPreview content={selected.content} />
) : selected.type === 'code' ? (
<pre className="bg-gray-50 dark:bg-gray-800 rounded-lg p-3 text-xs font-mono overflow-x-auto text-gray-700 dark:text-gray-200">
{selected.content}
</pre>
) : (
<pre className="whitespace-pre-wrap text-sm text-gray-700 dark:text-gray-200">
{selected.content}
</pre>
)}
</div>
) : (
<pre className="bg-gray-50 dark:bg-gray-800 rounded-lg p-3 text-xs font-mono overflow-x-auto text-gray-700 dark:text-gray-200 leading-relaxed">
{selected.content}
</pre>
)}
</div>
{/* Action bar */}
<div className="px-4 py-2 border-t border-gray-200 dark:border-gray-700 flex items-center gap-2 flex-shrink-0">
<ActionButton
icon={<Copy className="w-3.5 h-3.5" />}
label="复制"
onClick={() => navigator.clipboard.writeText(selected.content)}
/>
<ActionButton
icon={<Download className="w-3.5 h-3.5" />}
label="下载"
onClick={() => downloadArtifact(selected)}
/>
</div>
</div>
);
}
// ---------------------------------------------------------------------------
// ActionButton
// ---------------------------------------------------------------------------
function ActionButton({ icon, label, onClick }: { icon: React.ReactNode; label: string; onClick: () => void }) {
const [copied, setCopied] = useState(false);
const handleClick = () => {
onClick();
if (label === '复制') {
setCopied(true);
setTimeout(() => setCopied(false), 1500);
}
};
return (
<button
onClick={handleClick}
className="flex items-center gap-1.5 px-2.5 py-1.5 rounded-md text-xs font-medium text-gray-500 dark:text-gray-400 hover:text-gray-700 dark:hover:text-gray-200 hover:bg-gray-100 dark:hover:bg-gray-800 transition-colors"
>
{copied ? <span className="text-green-500 text-xs"></span> : icon}
{!copied && label}
</button>
);
}
// ---------------------------------------------------------------------------
// Simple Markdown preview (no external deps)
// ---------------------------------------------------------------------------
function MarkdownPreview({ content }: { content: string }) {
// Basic markdown rendering: headings, bold, code blocks, lists
const lines = content.split('\n');
return (
<div className="space-y-2">
{lines.map((line, i) => {
// Heading
if (line.startsWith('### ')) {
return <h3 key={i} className="text-sm font-bold text-gray-800 dark:text-gray-100 mt-3">{line.slice(4)}</h3>;
}
if (line.startsWith('## ')) {
return <h2 key={i} className="text-base font-bold text-gray-800 dark:text-gray-100 mt-4">{line.slice(3)}</h2>;
}
if (line.startsWith('# ')) {
return <h1 key={i} className="text-lg font-bold text-gray-800 dark:text-gray-100">{line.slice(2)}</h1>;
}
// Code block (simplified)
if (line.startsWith('```')) return null;
// List item
if (line.startsWith('- ') || line.startsWith('* ')) {
return <li key={i} className="text-sm text-gray-700 dark:text-gray-300 ml-4">{renderInline(line.slice(2))}</li>;
}
// Empty line
if (!line.trim()) return <div key={i} className="h-2" />;
// Regular paragraph
return <p key={i} className="text-sm text-gray-700 dark:text-gray-300 leading-relaxed">{renderInline(line)}</p>;
})}
</div>
);
}
function renderInline(text: string): React.ReactNode {
// Bold
const parts = text.split(/\*\*(.*?)\*\*/g);
return parts.map((part, i) =>
i % 2 === 1 ? <strong key={i} className="font-semibold">{part}</strong> : part
);
}
// ---------------------------------------------------------------------------
// Download helper
// ---------------------------------------------------------------------------
function downloadArtifact(artifact: ArtifactFile) {
const blob = new Blob([artifact.content], { type: 'text/plain;charset=utf-8' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = artifact.name;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
}

View File

@@ -0,0 +1,133 @@
import { useState, useRef, useEffect } from 'react';
import { Zap, Lightbulb, GraduationCap, Rocket, Check } from 'lucide-react';
import { motion, AnimatePresence } from 'framer-motion';
/**
* Chat interaction mode selector — DeerFlow-style dropdown.
*
* A single trigger button in the input bar that opens an upward dropdown
* showing each mode with icon, title, description, and checkmark.
*/
export type ChatModeType = 'flash' | 'thinking' | 'pro' | 'ultra';
export interface ChatModeConfig {
thinking_enabled: boolean;
reasoning_effort?: 'low' | 'medium' | 'high';
plan_mode?: boolean;
subagent_enabled?: boolean;
}
export const CHAT_MODES: Record<ChatModeType, { label: string; icon: typeof Zap; config: ChatModeConfig; description: string }> = {
flash: {
label: '闪速',
icon: Zap,
config: { thinking_enabled: false },
description: '快速且高效的完成任务,但可能不够精准',
},
thinking: {
label: '思考',
icon: Lightbulb,
config: { thinking_enabled: true, reasoning_effort: 'low' },
description: '启用推理,低强度思考',
},
pro: {
label: 'Pro',
icon: GraduationCap,
config: { thinking_enabled: true, reasoning_effort: 'medium', plan_mode: true },
description: '思考、计划再执行,获得更精准的结果,可能需要更多时间',
},
ultra: {
label: 'Ultra',
icon: Rocket,
config: { thinking_enabled: true, reasoning_effort: 'high', plan_mode: true, subagent_enabled: true },
description: '继承自 Pro 模式,可调用子代理分工协作,适合复杂多步骤任务,能力最强',
},
};
interface ChatModeProps {
value: ChatModeType;
onChange: (mode: ChatModeType) => void;
disabled?: boolean;
}
export function ChatMode({ value, onChange, disabled = false }: ChatModeProps) {
const [open, setOpen] = useState(false);
const containerRef = useRef<HTMLDivElement>(null);
// Close on outside click
useEffect(() => {
if (!open) return;
const handler = (e: MouseEvent) => {
if (containerRef.current && !containerRef.current.contains(e.target as Node)) {
setOpen(false);
}
};
document.addEventListener('mousedown', handler);
return () => document.removeEventListener('mousedown', handler);
}, [open]);
const current = CHAT_MODES[value];
const Icon = current.icon;
return (
<div ref={containerRef} className="relative">
{/* Trigger button */}
<button
onClick={() => { if (!disabled) setOpen(!open); }}
disabled={disabled}
className="flex items-center gap-1.5 px-2 py-1.5 rounded-lg text-sm text-gray-600 dark:text-gray-400 hover:bg-black/5 dark:hover:bg-white/5 transition-colors disabled:opacity-50"
>
<Icon className="w-3.5 h-3.5" />
<span>{current.label}</span>
</button>
{/* Dropdown — pops up above the input bar */}
<AnimatePresence>
{open && !disabled && (
<motion.div
initial={{ opacity: 0, y: 4, scale: 0.95 }}
animate={{ opacity: 1, y: 0, scale: 1 }}
exit={{ opacity: 0, y: 4, scale: 0.95 }}
transition={{ duration: 0.12 }}
className="absolute bottom-full left-0 mb-2 w-80 bg-white dark:bg-gray-800 rounded-xl shadow-xl border border-gray-100 dark:border-gray-700 py-2 z-50"
>
<div className="px-3 py-2 text-xs text-gray-400 font-medium"></div>
<div className="space-y-1">
{(Object.entries(CHAT_MODES) as [ChatModeType, typeof CHAT_MODES.flash][]).map(([mode, def]) => {
const ModeIcon = def.icon;
const isActive = value === mode;
return (
<button
key={mode}
onClick={() => {
onChange(mode);
setOpen(false);
}}
className="w-full text-left px-3 py-3 hover:bg-gray-50 dark:hover:bg-gray-700/50 flex items-start gap-3 transition-colors"
>
<div className="mt-0.5">
<ModeIcon className={`w-4 h-4 ${isActive ? 'text-gray-900 dark:text-white' : 'text-gray-500 dark:text-gray-400'}`} />
</div>
<div className="flex-1">
<div className="flex items-center justify-between">
<span className={`font-medium text-sm ${isActive ? 'text-gray-900 dark:text-white' : 'text-gray-700 dark:text-gray-300'}`}>
{def.label}
</span>
{isActive && (
<Check className="w-3.5 h-3.5 text-gray-900 dark:text-white" />
)}
</div>
<p className="text-xs text-gray-500 dark:text-gray-400 mt-0.5">{def.description}</p>
</div>
</button>
);
})}
</div>
</motion.div>
)}
</AnimatePresence>
</div>
);
}

View File

@@ -0,0 +1,117 @@
import { useRef, useEffect, useState, createContext, useContext, useMemo, type ReactNode } from 'react';
// ---------------------------------------------------------------------------
// ConversationContext — shared state for child ai-elements components
// ---------------------------------------------------------------------------
interface ConversationContextValue {
isStreaming: boolean;
setIsStreaming: (v: boolean) => void;
messages: unknown[];
setMessages: (msgs: unknown[]) => void;
}
const ConversationContext = createContext<ConversationContextValue | null>(null);
export function useConversationContext() {
const ctx = useContext(ConversationContext);
if (!ctx) {
throw new Error('useConversationContext must be used within ConversationProvider');
}
return ctx;
}
export function ConversationProvider({ children }: { children: ReactNode }) {
const [isStreaming, setIsStreaming] = useState(false);
const [messages, setMessages] = useState<unknown[]>([]);
const value = useMemo(
() => ({ isStreaming, setIsStreaming, messages, setMessages }),
[isStreaming, messages],
);
return (
<ConversationContext.Provider value={value}>
{children}
</ConversationContext.Provider>
);
}
// ---------------------------------------------------------------------------
// Conversation container with auto-stick-to-bottom scroll behavior
// ---------------------------------------------------------------------------
/**
* Conversation container with auto-stick-to-bottom scroll behavior.
*
* Inspired by DeerFlow's use-stick-to-bottom pattern:
* - Stays pinned to bottom during streaming
* - Remembers user's scroll position when they scroll up
* - Auto-scrolls back to bottom on new content when near the bottom
*/
interface ConversationProps {
children: ReactNode;
className?: string;
}
const SCROLL_THRESHOLD = 80; // px from bottom to consider "at bottom"
export function Conversation({ children, className = '' }: ConversationProps) {
const containerRef = useRef<HTMLDivElement>(null);
const isAtBottomRef = useRef(true);
const observerRef = useRef<ResizeObserver | null>(null);
// Track whether user is near the bottom
const handleScroll = () => {
const el = containerRef.current;
if (!el) return;
const distanceFromBottom = el.scrollHeight - el.scrollTop - el.clientHeight;
isAtBottomRef.current = distanceFromBottom < SCROLL_THRESHOLD;
};
// Auto-scroll to bottom when content changes and user is at bottom
useEffect(() => {
const el = containerRef.current;
if (!el) return;
observerRef.current = new ResizeObserver(() => {
if (isAtBottomRef.current) {
el.scrollTop = el.scrollHeight;
}
});
observerRef.current.observe(el);
return () => {
observerRef.current?.disconnect();
};
}, []);
// Also observe child list changes (new messages)
useEffect(() => {
const el = containerRef.current;
if (!el) return;
const mutationObserver = new MutationObserver(() => {
if (isAtBottomRef.current) {
el.scrollTop = el.scrollHeight;
}
});
mutationObserver.observe(el, { childList: true, subtree: true });
return () => {
mutationObserver.disconnect();
};
}, []);
return (
<div
ref={containerRef}
onScroll={handleScroll}
className={`overflow-y-auto custom-scrollbar ${className}`}
>
{children}
</div>
);
}

View File

@@ -0,0 +1,140 @@
import { useState, useRef, useEffect } from 'react';
import { ChevronDown, Check } from 'lucide-react';
import { motion, AnimatePresence } from 'framer-motion';
/**
* Model selector dropdown.
*
* Inspired by DeerFlow's model-selector.tsx:
* - Searchable dropdown with keyboard navigation
* - Shows model provider badge
* - Compact design that fits in the input area
*/
interface ModelOption {
id: string;
name: string;
provider?: string;
}
interface ModelSelectorProps {
models: ModelOption[];
currentModel: string;
onSelect: (modelId: string) => void;
disabled?: boolean;
}
export function ModelSelector({
models,
currentModel,
onSelect,
disabled = false,
}: ModelSelectorProps) {
const [open, setOpen] = useState(false);
const [search, setSearch] = useState('');
const containerRef = useRef<HTMLDivElement>(null);
const inputRef = useRef<HTMLInputElement>(null);
const selectedModel = models.find(m => m.id === currentModel);
const filteredModels = search
? models.filter(m =>
m.name.toLowerCase().includes(search.toLowerCase()) ||
(m.provider && m.provider.toLowerCase().includes(search.toLowerCase()))
)
: models;
// Close on outside click
useEffect(() => {
if (!open) return;
const handler = (e: MouseEvent) => {
if (containerRef.current && !containerRef.current.contains(e.target as Node)) {
setOpen(false);
setSearch('');
}
};
document.addEventListener('mousedown', handler);
return () => document.removeEventListener('mousedown', handler);
}, [open]);
// Focus search on open
useEffect(() => {
if (open && inputRef.current) {
inputRef.current.focus();
}
}, [open]);
return (
<div ref={containerRef} className="relative">
<button
onClick={() => { if (!disabled) setOpen(!open); }}
disabled={disabled}
className="flex items-center gap-1 text-xs text-gray-500 dark:text-gray-400 hover:bg-gray-200 dark:hover:bg-gray-700 px-2 py-1 rounded-md transition-colors disabled:opacity-50"
aria-expanded={open}
aria-haspopup="listbox"
>
<span className="max-w-[120px] truncate">{selectedModel?.name || currentModel}</span>
<ChevronDown className={`w-3 h-3 transition-transform ${open ? 'rotate-180' : ''}`} />
</button>
<AnimatePresence>
{open && (
<motion.div
initial={{ opacity: 0, y: 4, scale: 0.95 }}
animate={{ opacity: 1, y: 0, scale: 1 }}
exit={{ opacity: 0, y: 4, scale: 0.95 }}
transition={{ duration: 0.12 }}
className="absolute bottom-full right-0 mb-2 w-56 bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg shadow-lg z-20 overflow-hidden"
>
{/* Search */}
<div className="p-2 border-b border-gray-100 dark:border-gray-700">
<input
ref={inputRef}
value={search}
onChange={e => setSearch(e.target.value)}
placeholder="搜索模型..."
className="w-full bg-transparent text-xs text-gray-700 dark:text-gray-200 placeholder-gray-400 outline-none"
/>
</div>
{/* Model list */}
<div className="max-h-48 overflow-y-auto py-1" role="listbox">
{filteredModels.length > 0 ? (
filteredModels.map(model => (
<button
key={model.id}
onClick={() => {
onSelect(model.id);
setOpen(false);
setSearch('');
}}
role="option"
aria-selected={model.id === currentModel}
className={`
w-full text-left px-3 py-2 text-xs flex items-center justify-between gap-2 transition-colors
${model.id === currentModel
? 'text-orange-600 dark:text-orange-400 bg-orange-50 dark:bg-orange-900/20'
: 'text-gray-700 dark:text-gray-300 hover:bg-gray-50 dark:hover:bg-gray-700'
}
`}
>
<div className="flex flex-col min-w-0">
<span className="truncate font-medium">{model.name}</span>
{model.provider && (
<span className="text-[10px] text-gray-400 dark:text-gray-500">{model.provider}</span>
)}
</div>
{model.id === currentModel && (
<Check className="w-3.5 h-3.5 flex-shrink-0" />
)}
</button>
))
) : (
<div className="px-3 py-2 text-xs text-gray-400"></div>
)}
</div>
</motion.div>
)}
</AnimatePresence>
</div>
);
}

View File

@@ -0,0 +1,156 @@
import { useState, useEffect } from 'react';
import { motion, AnimatePresence } from 'framer-motion';
import { ChevronRight, Lightbulb } from 'lucide-react';
/**
* Collapsible reasoning/thinking block with timing display.
*
* Inspired by DeerFlow's reasoning display:
* - Shows elapsed time during streaming ("Thinking for 3s...")
* - Shows final time when complete ("Thought for 5 seconds")
* - Animated expand/collapse
* - Auto-collapses 1 second after streaming ends
*/
interface ReasoningBlockProps {
content: string;
isStreaming?: boolean;
defaultExpanded?: boolean;
/** Unix timestamp (ms) when thinking started, for elapsed time display */
startedAt?: number;
}
export function ReasoningBlock({
content,
isStreaming = false,
defaultExpanded = false,
startedAt,
}: ReasoningBlockProps) {
const [expanded, setExpanded] = useState(defaultExpanded || isStreaming);
const [elapsedSeconds, setElapsedSeconds] = useState(0);
// Auto-expand when streaming starts
useEffect(() => {
if (isStreaming) setExpanded(true);
}, [isStreaming]);
// Auto-collapse 1 second after streaming ends
const [prevStreaming, setPrevStreaming] = useState(isStreaming);
useEffect(() => {
if (prevStreaming && !isStreaming && expanded) {
const timer = setTimeout(() => setExpanded(false), 1000);
return () => clearTimeout(timer);
}
setPrevStreaming(isStreaming);
}, [isStreaming, prevStreaming, expanded]);
// Timer for elapsed seconds display
useEffect(() => {
if (!isStreaming || !startedAt) return;
const interval = setInterval(() => {
setElapsedSeconds(Math.floor((Date.now() - startedAt) / 1000));
}, 200);
return () => clearInterval(interval);
}, [isStreaming, startedAt]);
// Final duration (when streaming ends, calculate from startedAt to now)
const durationLabel = (() => {
if (!startedAt) return null;
if (isStreaming) {
return elapsedSeconds > 0 ? `已思考 ${elapsedSeconds}` : '思考中...';
}
// Streaming finished — show "Thought for N seconds"
const totalSec = Math.floor((Date.now() - startedAt) / 1000);
if (totalSec <= 0) return null;
return `思考了 ${totalSec}`;
})();
if (!content) return null;
return (
<div className="my-2">
<button
onClick={() => setExpanded(!expanded)}
className="flex items-center gap-1.5 text-xs text-gray-500 dark:text-gray-400 hover:text-gray-700 dark:hover:text-gray-200 transition-colors group w-full text-left"
aria-expanded={expanded}
>
<motion.span
animate={{ rotate: expanded ? 90 : 0 }}
transition={{ duration: 0.15 }}
>
<ChevronRight className="w-3.5 h-3.5" />
</motion.span>
<Lightbulb className="w-3.5 h-3.5 text-amber-500" />
<span className="font-medium"></span>
{durationLabel && !isStreaming && (
<span className="text-[11px] text-gray-400 dark:text-gray-500 ml-1">
{durationLabel}
</span>
)}
{isStreaming && (
<span className="flex gap-0.5 ml-1">
<span className="w-1 h-1 bg-amber-500 rounded-full animate-bounce" style={{ animationDelay: '0ms' }} />
<span className="w-1 h-1 bg-amber-500 rounded-full animate-bounce" style={{ animationDelay: '150ms' }} />
<span className="w-1 h-1 bg-amber-500 rounded-full animate-bounce" style={{ animationDelay: '300ms' }} />
</span>
)}
</button>
<AnimatePresence>
{expanded && (
<motion.div
initial={{ height: 0, opacity: 0 }}
animate={{ height: 'auto', opacity: 1 }}
exit={{ height: 0, opacity: 0 }}
transition={{ duration: 0.2, ease: 'easeInOut' }}
className="overflow-hidden"
>
<div className="mt-1.5 ml-5 pl-3 border-l-2 border-amber-300 dark:border-amber-700 text-xs text-gray-600 dark:text-gray-400 leading-relaxed whitespace-pre-wrap">
{content}
{isStreaming && (
<span className="inline-block w-1 h-3 bg-amber-500 animate-pulse ml-0.5 align-text-bottom rounded-sm" />
)}
</div>
</motion.div>
)}
</AnimatePresence>
</div>
);
}
/**
* Chain of thought step display.
* Shows individual reasoning steps with status indicators.
*/
interface ThoughtStep {
id: string;
content: string;
status: 'thinking' | 'done' | 'error';
}
interface ChainOfThoughtProps {
steps: ThoughtStep[];
className?: string;
}
export function ChainOfThought({ steps, className = '' }: ChainOfThoughtProps) {
return (
<div className={`ml-5 space-y-2 ${className}`}>
{steps.map((step) => (
<div key={step.id} className="flex items-start gap-2">
<div className="mt-1 flex-shrink-0">
{step.status === 'thinking' ? (
<span className="w-2 h-2 bg-amber-400 rounded-full animate-pulse" />
) : step.status === 'done' ? (
<span className="w-2 h-2 bg-green-500 rounded-full" />
) : (
<span className="w-2 h-2 bg-red-500 rounded-full" />
)}
</div>
<span className="text-xs text-gray-600 dark:text-gray-400 leading-relaxed">
{step.content}
</span>
</div>
))}
</div>
);
}

View File

@@ -0,0 +1,136 @@
import { useCallback, type ReactNode } from 'react';
import { Group, Panel, Separator } from 'react-resizable-panels';
import { X, PanelRightOpen, PanelRightClose } from 'lucide-react';
/**
* Resizable dual-panel layout for chat + artifact/detail panel.
*
* Uses react-resizable-panels v4 API:
* - Left panel: Chat area (always visible)
* - Right panel: Artifact/detail viewer (collapsible)
* - Draggable resize handle between panels
* - Persisted panel sizes via localStorage
*/
interface ResizableChatLayoutProps {
chatPanel: ReactNode;
rightPanel?: ReactNode;
rightPanelTitle?: string;
rightPanelOpen?: boolean;
onRightPanelToggle?: (open: boolean) => void;
}
const STORAGE_KEY = 'zclaw-layout-panels';
const LEFT_PANEL_ID = 'chat-panel';
const RIGHT_PANEL_ID = 'detail-panel';
function loadPanelSizes(): { left: string; right: string } {
try {
const stored = localStorage.getItem(STORAGE_KEY);
if (stored) {
const parsed = JSON.parse(stored);
if (parsed.left && parsed.right) {
return { left: parsed.left, right: parsed.right };
}
}
} catch { /* ignore */ }
return { left: '65%', right: '35%' };
}
function savePanelSizes(layout: Record<string, number>) {
try {
const left = layout[LEFT_PANEL_ID];
const right = layout[RIGHT_PANEL_ID];
if (left !== undefined && right !== undefined) {
localStorage.setItem(STORAGE_KEY, JSON.stringify({ left, right }));
}
} catch { /* ignore */ }
}
export function ResizableChatLayout({
chatPanel,
rightPanel,
rightPanelTitle = '详情',
rightPanelOpen = false,
onRightPanelToggle,
}: ResizableChatLayoutProps) {
const sizes = loadPanelSizes();
const handleToggle = useCallback(() => {
onRightPanelToggle?.(!rightPanelOpen);
}, [rightPanelOpen, onRightPanelToggle]);
if (!rightPanelOpen || !rightPanel) {
return (
<div className="flex-1 flex flex-col overflow-hidden relative">
{chatPanel}
<button
onClick={handleToggle}
className="absolute top-3 right-3 z-10 p-1.5 rounded-md bg-white/80 dark:bg-gray-800/80 border border-gray-200 dark:border-gray-700 text-gray-500 dark:text-gray-400 hover:text-gray-700 dark:hover:text-gray-200 hover:bg-white dark:hover:bg-gray-800 transition-colors shadow-sm"
title="打开侧面板"
>
<PanelRightOpen className="w-4 h-4" />
</button>
</div>
);
}
return (
<div className="flex-1 flex flex-col overflow-hidden">
<Group
orientation="horizontal"
onLayoutChanged={(layout) => savePanelSizes(layout)}
>
{/* Left panel: Chat */}
<Panel
id={LEFT_PANEL_ID}
defaultSize={sizes.left}
minSize="40%"
>
<div className="h-full flex flex-col relative">
{chatPanel}
<button
onClick={handleToggle}
className="absolute top-3 right-3 z-10 p-1.5 rounded-md bg-white/80 dark:bg-gray-800/80 border border-gray-200 dark:border-gray-700 text-gray-500 dark:text-gray-400 hover:text-gray-700 dark:hover:text-gray-200 hover:bg-white dark:hover:bg-gray-800 transition-colors shadow-sm"
title="关闭侧面板"
>
<PanelRightClose className="w-4 h-4" />
</button>
</div>
</Panel>
{/* Resize handle */}
<Separator className="w-1.5 flex items-center justify-center group cursor-col-resize hover:bg-orange-100 dark:hover:bg-orange-900/20 transition-colors">
<div className="w-0.5 h-8 rounded-full bg-gray-300 dark:bg-gray-600 group-hover:bg-orange-400 dark:group-hover:bg-orange-500 transition-colors" />
</Separator>
{/* Right panel: Artifact/Detail */}
<Panel
id={RIGHT_PANEL_ID}
defaultSize={sizes.right}
minSize="25%"
>
<div className="h-full flex flex-col bg-gray-50 dark:bg-gray-900 border-l border-gray-200 dark:border-gray-800">
{/* Panel header */}
<div className="h-12 flex items-center justify-between px-4 border-b border-gray-200 dark:border-gray-800 flex-shrink-0">
<span className="text-xs font-medium text-gray-600 dark:text-gray-400 uppercase tracking-wide">
{rightPanelTitle}
</span>
<button
onClick={handleToggle}
className="p-1 rounded text-gray-400 hover:text-gray-600 dark:hover:text-gray-200 transition-colors"
title="关闭面板"
>
<X className="w-4 h-4" />
</button>
</div>
{/* Panel content */}
<div className="flex-1 overflow-y-auto custom-scrollbar">
{rightPanel}
</div>
</div>
</Panel>
</Group>
</div>
);
}

View File

@@ -0,0 +1,136 @@
import { useMemo, useRef, useEffect, useState } from 'react';
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
/**
* Streaming text with word-by-word reveal animation.
*
* Inspired by DeerFlow's Streamdown library:
* - Splits streaming text into "words" at whitespace and CJK boundaries
* - Each word gets a CSS fade-in animation
* - Historical messages render statically (no animation overhead)
*
* For non-streaming content, falls back to react-markdown for full
* markdown rendering including GFM tables, strikethrough, etc.
*/
interface StreamingTextProps {
content: string;
isStreaming: boolean;
className?: string;
/** Render as markdown for completed messages */
asMarkdown?: boolean;
}
// Split text into words at whitespace and CJK character boundaries
function splitIntoTokens(text: string): string[] {
const tokens: string[] = [];
let current = '';
for (const char of text) {
const code = char.codePointAt(0);
const isCJK = code && (
(code >= 0x4E00 && code <= 0x9FFF) || // CJK Unified Ideographs
(code >= 0x3400 && code <= 0x4DBF) || // CJK Extension A
(code >= 0x3000 && code <= 0x303F) || // CJK Symbols and Punctuation
(code >= 0xFF00 && code <= 0xFFEF) || // Fullwidth Forms
(code >= 0x2E80 && code <= 0x2EFF) || // CJK Radicals Supplement
(code >= 0xF900 && code <= 0xFAFF) // CJK Compatibility Ideographs
);
const isWhitespace = /\s/.test(char);
if (isCJK) {
// CJK chars are individual tokens
if (current) {
tokens.push(current);
current = '';
}
tokens.push(char);
} else if (isWhitespace) {
current += char;
} else {
current += char;
}
}
if (current) {
tokens.push(current);
}
return tokens;
}
export function StreamingText({
content,
isStreaming,
className = '',
asMarkdown = true,
}: StreamingTextProps) {
// For completed messages, use full markdown rendering
if (!isStreaming && asMarkdown) {
return (
<div className={`prose-sm prose-gray dark:prose-invert max-w-none ${className}`}>
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{content}
</ReactMarkdown>
</div>
);
}
// For streaming messages, use token-by-token animation
if (isStreaming && content) {
return (
<StreamingTokenText content={content} className={className} />
);
}
// Empty streaming - show nothing
return null;
}
/**
* Token-by-token streaming text with CSS animation.
* Each token (word/CJK char) fades in sequentially.
*/
function StreamingTokenText({ content, className }: { content: string; className: string }) {
const tokens = useMemo(() => splitIntoTokens(content), [content]);
const containerRef = useRef<HTMLSpanElement>(null);
const [visibleCount, setVisibleCount] = useState(0);
// Animate tokens appearing
useEffect(() => {
if (visibleCount >= tokens.length) return;
const remaining = tokens.length - visibleCount;
// Batch reveal: show multiple tokens per frame for fast streaming
const batchSize = Math.min(remaining, 3);
const timer = requestAnimationFrame(() => {
setVisibleCount(prev => Math.min(prev + batchSize, tokens.length));
});
return () => cancelAnimationFrame(timer);
}, [tokens.length, visibleCount]);
// Reset visible count when content changes significantly
useEffect(() => {
setVisibleCount(tokens.length);
}, [tokens.length]);
return (
<span ref={containerRef} className={`whitespace-pre-wrap ${className}`}>
{tokens.map((token, i) => (
<span
key={i}
className="streaming-token"
style={{
opacity: i < visibleCount ? 1 : 0,
transition: 'opacity 0.15s ease-in',
}}
>
{token}
</span>
))}
<span className="inline-block w-1.5 h-4 bg-orange-500 animate-pulse ml-0.5 align-text-bottom rounded-sm" />
</span>
);
}

View File

@@ -0,0 +1,48 @@
import { motion } from 'framer-motion';
/**
* Follow-up suggestion chips.
*
* Inspired by DeerFlow's suggestion.tsx:
* - Horizontal scrollable chip list
* - Click to fill input
* - Animated entrance
*/
interface SuggestionChipsProps {
suggestions: string[];
onSelect: (text: string) => void;
className?: string;
}
export function SuggestionChips({ suggestions, onSelect, className = '' }: SuggestionChipsProps) {
if (suggestions.length === 0) return null;
return (
<div className={`flex flex-wrap gap-2 ${className}`}>
{suggestions.map((text, index) => (
<motion.button
key={index}
initial={{ opacity: 0, y: 8 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: index * 0.05, duration: 0.2 }}
onClick={() => onSelect(text)}
className="
px-3 py-1.5 text-xs rounded-full
bg-gray-50 dark:bg-gray-800
border border-gray-200 dark:border-gray-700
text-gray-600 dark:text-gray-400
hover:bg-orange-50 dark:hover:bg-orange-900/20
hover:text-orange-700 dark:hover:text-orange-300
hover:border-orange-300 dark:hover:border-orange-600
transition-colors
max-w-[280px] truncate
"
title={text}
>
{text}
</motion.button>
))}
</div>
);
}

View File

@@ -0,0 +1,169 @@
import { useState, createContext, useContext, useCallback, type ReactNode } from 'react';
import { motion, AnimatePresence } from 'framer-motion';
import { ChevronRight, CheckCircle2, XCircle, Loader2, Circle } from 'lucide-react';
// ---------------------------------------------------------------------------
// TaskContext — shared task state for sub-agent orchestration
// ---------------------------------------------------------------------------
interface TaskContextValue {
tasks: Subtask[];
updateTask: (id: string, updates: Partial<Subtask>) => void;
}
const TaskContext = createContext<TaskContextValue | null>(null);
export function useTaskContext() {
const ctx = useContext(TaskContext);
if (!ctx) {
throw new Error('useTaskContext must be used within TaskProvider');
}
return ctx;
}
export function TaskProvider({
children,
initialTasks = [],
}: {
children: ReactNode;
initialTasks?: Subtask[];
}) {
const [tasks, setTasks] = useState<Subtask[]>(initialTasks);
const updateTask = useCallback((id: string, updates: Partial<Subtask>) => {
setTasks(prev => prev.map(t => (t.id === id ? { ...t, ...updates } : t)));
}, []);
return (
<TaskContext.Provider value={{ tasks, updateTask }}>
{children}
</TaskContext.Provider>
);
}
/**
* Subtask progress display for sub-agent orchestration.
*
* Inspired by DeerFlow's SubtaskCard + ShineBorder pattern:
* - Shows task status with animated indicators
* - Collapsible details with thinking chain
* - Pulsing border animation for active tasks
* - Status icons: running (pulse), completed (green), failed (red)
*/
export interface Subtask {
id: string;
description: string;
status: 'pending' | 'in_progress' | 'completed' | 'failed';
result?: string;
error?: string;
steps?: Array<{ content: string; status: 'thinking' | 'done' | 'error' }>;
}
interface TaskProgressProps {
tasks: Subtask[];
className?: string;
}
export function TaskProgress({ tasks, className = '' }: TaskProgressProps) {
if (tasks.length === 0) return null;
return (
<div className={`space-y-2 ${className}`}>
{tasks.map(task => (
<SubtaskCard key={task.id} task={task} />
))}
</div>
);
}
function SubtaskCard({ task }: { task: Subtask }) {
const [expanded, setExpanded] = useState(task.status === 'in_progress');
const isActive = task.status === 'in_progress';
return (
<div
className={`
rounded-lg border transition-all overflow-hidden
${isActive
? 'border-orange-300 dark:border-orange-700 bg-orange-50/50 dark:bg-orange-900/10 shadow-[0_0_15px_-3px_rgba(249,115,22,0.15)] dark:shadow-[0_0_15px_-3px_rgba(249,115,22,0.1)]'
: task.status === 'completed'
? 'border-green-200 dark:border-green-800 bg-green-50/30 dark:bg-green-900/10'
: task.status === 'failed'
? 'border-red-200 dark:border-red-800 bg-red-50/30 dark:bg-red-900/10'
: 'border-gray-200 dark:border-gray-700 bg-gray-50/50 dark:bg-gray-800/50'
}
`}
>
{/* Header */}
<button
onClick={() => setExpanded(!expanded)}
className="w-full flex items-center gap-2 px-3 py-2 text-left"
>
<motion.span animate={{ rotate: expanded ? 90 : 0 }} transition={{ duration: 0.15 }}>
<ChevronRight className="w-3.5 h-3.5 text-gray-400" />
</motion.span>
{/* Status icon */}
{task.status === 'in_progress' ? (
<Loader2 className="w-4 h-4 text-orange-500 animate-spin" />
) : task.status === 'completed' ? (
<CheckCircle2 className="w-4 h-4 text-green-500" />
) : task.status === 'failed' ? (
<XCircle className="w-4 h-4 text-red-500" />
) : (
<Circle className="w-4 h-4 text-gray-400" />
)}
<span className="flex-1 text-xs font-medium text-gray-700 dark:text-gray-300 truncate">
{task.description}
</span>
</button>
{/* Expanded details */}
<AnimatePresence>
{expanded && (task.result || task.error || (task.steps && task.steps.length > 0)) && (
<motion.div
initial={{ height: 0, opacity: 0 }}
animate={{ height: 'auto', opacity: 1 }}
exit={{ height: 0, opacity: 0 }}
transition={{ duration: 0.2 }}
className="overflow-hidden"
>
<div className="px-3 pb-2 ml-6 border-l-2 border-gray-200 dark:border-gray-700 space-y-1">
{/* Steps */}
{task.steps?.map((step, i) => (
<div key={i} className="flex items-start gap-2">
{step.status === 'thinking' ? (
<span className="w-1.5 h-1.5 mt-1.5 bg-amber-400 rounded-full animate-pulse flex-shrink-0" />
) : step.status === 'done' ? (
<span className="w-1.5 h-1.5 mt-1.5 bg-green-500 rounded-full flex-shrink-0" />
) : (
<span className="w-1.5 h-1.5 mt-1.5 bg-red-500 rounded-full flex-shrink-0" />
)}
<span className="text-[11px] text-gray-600 dark:text-gray-400 leading-relaxed">
{step.content}
</span>
</div>
))}
{/* Result */}
{task.result && (
<div className="text-xs text-gray-700 dark:text-gray-300 mt-1 whitespace-pre-wrap">
{task.result}
</div>
)}
{/* Error */}
{task.error && (
<div className="text-xs text-red-600 dark:text-red-400 mt-1">
{task.error}
</div>
)}
</div>
</motion.div>
)}
</AnimatePresence>
</div>
);
}

View File

@@ -0,0 +1,121 @@
import { useState } from 'react';
import { motion, AnimatePresence } from 'framer-motion';
// ---------------------------------------------------------------------------
// TokenMeter — circular SVG gauge showing token usage
// Inspired by DeerFlow's token usage display
// ---------------------------------------------------------------------------
interface TokenMeterProps {
inputTokens: number;
outputTokens: number;
model?: string;
className?: string;
}
// Color thresholds
function getUsageColor(percent: number): string {
if (percent >= 80) return '#ef4444'; // red
if (percent >= 50) return '#eab308'; // yellow
return '#22c55e'; // green
}
// Format token count for display
function formatTokens(n: number): string {
if (n >= 1_000_000) return `${(n / 1_000_000).toFixed(1)}M`;
if (n >= 1_000) return `${(n / 1_000).toFixed(1)}K`;
return String(n);
}
export function TokenMeter({ inputTokens, outputTokens, model, className = '' }: TokenMeterProps) {
const [showDetail, setShowDetail] = useState(false);
const total = inputTokens + outputTokens;
// Assume ~128K context window as budget for percentage calculation
const budget = 128_000;
const percent = Math.min(100, (total / budget) * 100);
const color = getUsageColor(percent);
// SVG circular gauge parameters
const size = 28;
const strokeWidth = 3;
const radius = (size - strokeWidth) / 2;
const circumference = 2 * Math.PI * radius;
const offset = circumference - (percent / 100) * circumference;
if (total === 0) return null;
return (
<div className={`relative ${className}`}>
<button
onClick={() => setShowDetail(!showDetail)}
onMouseEnter={() => setShowDetail(true)}
onMouseLeave={() => setShowDetail(false)}
className="focus:outline-none"
title="Token 用量"
>
<svg width={size} height={size} className="transform -rotate-90">
{/* Background circle */}
<circle
cx={size / 2}
cy={size / 2}
r={radius}
fill="none"
stroke="currentColor"
strokeWidth={strokeWidth}
className="text-gray-200 dark:text-gray-700"
/>
{/* Usage arc */}
<circle
cx={size / 2}
cy={size / 2}
r={radius}
fill="none"
stroke={color}
strokeWidth={strokeWidth}
strokeDasharray={circumference}
strokeDashoffset={offset}
strokeLinecap="round"
className="transition-all duration-500"
/>
</svg>
{/* Center text */}
<span className="absolute inset-0 flex items-center justify-center text-[9px] font-medium text-gray-500 dark:text-gray-400">
{percent >= 1 ? `${Math.round(percent)}` : '<1'}
</span>
</button>
{/* Hover detail card */}
<AnimatePresence>
{showDetail && (
<motion.div
initial={{ opacity: 0, y: 4, scale: 0.95 }}
animate={{ opacity: 1, y: 0, scale: 1 }}
exit={{ opacity: 0, y: 4, scale: 0.95 }}
transition={{ duration: 0.15 }}
className="absolute bottom-full right-0 mb-2 w-44 p-3 rounded-lg bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 shadow-lg z-50"
>
<div className="space-y-2">
<div className="flex items-center justify-between">
<span className="text-[11px] text-gray-500 dark:text-gray-400">Input</span>
<span className="text-[11px] font-medium text-gray-700 dark:text-gray-200">{formatTokens(inputTokens)}</span>
</div>
<div className="flex items-center justify-between">
<span className="text-[11px] text-gray-500 dark:text-gray-400">Output</span>
<span className="text-[11px] font-medium text-gray-700 dark:text-gray-200">{formatTokens(outputTokens)}</span>
</div>
<div className="border-t border-gray-100 dark:border-gray-700 pt-1.5 flex items-center justify-between">
<span className="text-[11px] text-gray-500 dark:text-gray-400">Total</span>
<span className="text-[11px] font-bold text-gray-800 dark:text-gray-100">{formatTokens(total)}</span>
</div>
{model && (
<div className="border-t border-gray-100 dark:border-gray-700 pt-1.5">
<span className="text-[10px] text-gray-400 dark:text-gray-500 truncate block">{model}</span>
</div>
)}
</div>
</motion.div>
)}
</AnimatePresence>
</div>
);
}

View File

@@ -0,0 +1,255 @@
import { useState } from 'react';
import { motion, AnimatePresence } from 'framer-motion';
import {
Search,
Globe,
Terminal,
FileText,
FilePlus,
FolderOpen,
FileEdit,
HelpCircle,
Code2,
Wrench,
ChevronDown,
Loader2,
CheckCircle2,
XCircle,
} from 'lucide-react';
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface ToolCallStep {
id: string;
toolName: string;
input?: string;
output?: string;
status: 'running' | 'completed' | 'error';
timestamp: Date;
}
interface ToolCallChainProps {
steps: ToolCallStep[];
isStreaming?: boolean;
className?: string;
}
// ---------------------------------------------------------------------------
// Icon mapping — each tool type gets a distinctive icon
// ---------------------------------------------------------------------------
const TOOL_ICONS: Record<string, typeof Search> = {
web_search: Search,
web_fetch: Globe,
bash: Terminal,
read_file: FileText,
write_file: FilePlus,
ls: FolderOpen,
str_replace: FileEdit,
ask_clarification: HelpCircle,
code_execute: Code2,
// Default fallback
};
const TOOL_LABELS: Record<string, string> = {
web_search: '搜索',
web_fetch: '获取网页',
bash: '执行命令',
read_file: '读取文件',
write_file: '写入文件',
ls: '列出目录',
str_replace: '编辑文件',
ask_clarification: '澄清问题',
code_execute: '执行代码',
};
function getToolIcon(toolName: string): typeof Search {
const lower = toolName.toLowerCase();
for (const [key, icon] of Object.entries(TOOL_ICONS)) {
if (lower.includes(key)) return icon;
}
return Wrench;
}
function getToolLabel(toolName: string): string {
const lower = toolName.toLowerCase();
for (const [key, label] of Object.entries(TOOL_LABELS)) {
if (lower.includes(key)) return label;
}
return toolName;
}
// ---------------------------------------------------------------------------
// Truncate helper
// ---------------------------------------------------------------------------
function truncate(str: string, maxLen: number): string {
if (!str) return '';
const oneLine = str.replace(/\n/g, ' ').trim();
return oneLine.length > maxLen ? oneLine.slice(0, maxLen) + '...' : oneLine;
}
// ---------------------------------------------------------------------------
// ToolCallChain — main component
// ---------------------------------------------------------------------------
/**
* Collapsible tool-call step chain.
*
* Inspired by DeerFlow's message-group.tsx convertToSteps():
* - Each tool call shows a type-specific icon + label
* - The latest 2 steps are expanded by default
* - Earlier steps collapse into "查看其他 N 个步骤"
* - Running steps show a spinner; completed show a checkmark
*/
const DEFAULT_EXPANDED_COUNT = 2;
export function ToolCallChain({ steps, isStreaming = false, className = '' }: ToolCallChainProps) {
const [showAll, setShowAll] = useState(false);
if (steps.length === 0) return null;
const visibleSteps = showAll
? steps
: steps.slice(-DEFAULT_EXPANDED_COUNT);
const hiddenCount = steps.length - visibleSteps.length;
// The last step is "active" during streaming
const activeStepIdx = isStreaming ? steps.length - 1 : -1;
return (
<div className={`my-1.5 ${className}`}>
{/* Collapsed indicator */}
{hiddenCount > 0 && !showAll && (
<button
onClick={() => setShowAll(true)}
className="flex items-center gap-1.5 text-xs text-gray-400 dark:text-gray-500 hover:text-orange-500 dark:hover:text-orange-400 transition-colors mb-1.5 ml-0.5 group"
>
<ChevronDown className="w-3 h-3 group-hover:text-orange-500 dark:group-hover:text-orange-400 transition-transform" />
<span> {hiddenCount} </span>
</button>
)}
{/* Steps list */}
<div className="space-y-0.5">
{visibleSteps.map((step, idx) => {
const globalIdx = showAll ? idx : hiddenCount + idx;
const isActive = globalIdx === activeStepIdx;
const isLast = globalIdx === steps.length - 1;
return (
<ToolStepRow
key={step.id}
step={step}
isActive={isActive}
showConnector={!isLast}
/>
);
})}
</div>
</div>
);
}
// ---------------------------------------------------------------------------
// ToolStepRow — a single step in the chain
// ---------------------------------------------------------------------------
interface ToolStepRowProps {
step: ToolCallStep;
isActive: boolean;
showConnector: boolean;
}
function ToolStepRow({ step, isActive, showConnector }: ToolStepRowProps) {
const [expanded, setExpanded] = useState(false);
const Icon = getToolIcon(step.toolName);
const label = getToolLabel(step.toolName);
const isRunning = step.status === 'running';
const isError = step.status === 'error';
return (
<div>
<button
onClick={() => setExpanded(!expanded)}
className={`
flex items-center gap-2 w-full text-left px-2 py-1 rounded-md transition-colors
${isActive
? 'bg-orange-50 dark:bg-orange-900/15'
: 'hover:bg-gray-50 dark:hover:bg-gray-800/60'
}
`}
>
{/* Status indicator */}
{isRunning ? (
<Loader2 className="w-3.5 h-3.5 text-orange-500 animate-spin flex-shrink-0" />
) : isError ? (
<XCircle className="w-3.5 h-3.5 text-red-400 flex-shrink-0" />
) : (
<CheckCircle2 className="w-3.5 h-3.5 text-green-400 flex-shrink-0" />
)}
{/* Tool icon */}
<Icon className={`w-3.5 h-3.5 flex-shrink-0 ${isActive ? 'text-orange-500' : 'text-gray-400 dark:text-gray-500'}`} />
{/* Tool label */}
<span className={`text-xs font-medium ${isActive ? 'text-orange-600 dark:text-orange-400' : 'text-gray-600 dark:text-gray-400'}`}>
{label}
</span>
{/* Input preview */}
{step.input && !expanded && (
<span className="text-[11px] text-gray-400 dark:text-gray-500 truncate flex-1">
{truncate(step.input, 60)}
</span>
)}
{/* Expand chevron */}
{(step.input || step.output) && (
<motion.span
animate={{ rotate: expanded ? 180 : 0 }}
transition={{ duration: 0.15 }}
className="ml-auto flex-shrink-0"
>
<ChevronDown className="w-3 h-3 text-gray-400" />
</motion.span>
)}
</button>
{/* Expanded details */}
<AnimatePresence>
{expanded && (step.input || step.output) && (
<motion.div
initial={{ height: 0, opacity: 0 }}
animate={{ height: 'auto', opacity: 1 }}
exit={{ height: 0, opacity: 0 }}
transition={{ duration: 0.15 }}
className="overflow-hidden"
>
<div className="ml-9 mr-2 mb-1 space-y-1">
{step.input && (
<div className="text-[11px] text-gray-500 dark:text-gray-400 bg-gray-50 dark:bg-gray-800/80 rounded px-2 py-1 font-mono overflow-x-auto">
{truncate(step.input, 500)}
</div>
)}
{step.output && (
<div className={`text-[11px] font-mono rounded px-2 py-1 overflow-x-auto ${isError ? 'text-red-500 bg-red-50 dark:bg-red-900/10' : 'text-green-600 dark:text-green-400 bg-green-50 dark:bg-green-900/10'}`}>
{truncate(step.output, 500)}
</div>
)}
</div>
</motion.div>
)}
</AnimatePresence>
{/* Vertical connector */}
{showConnector && (
<div className="ml-[18px] w-px h-1.5 bg-gray-200 dark:bg-gray-700" />
)}
</div>
);
}

View File

@@ -0,0 +1,11 @@
export { Conversation, ConversationProvider, useConversationContext } from './Conversation';
export { ReasoningBlock } from './ReasoningBlock';
export { StreamingText } from './StreamingText';
export { ChatMode, type ChatModeType, type ChatModeConfig, CHAT_MODES } from './ChatMode';
export { ModelSelector } from './ModelSelector';
export { TaskProgress, type Subtask, TaskProvider, useTaskContext } from './TaskProgress';
export { SuggestionChips } from './SuggestionChips';
export { ResizableChatLayout } from './ResizableChatLayout';
export { ToolCallChain, type ToolCallStep } from './ToolCallChain';
export { ArtifactPanel, type ArtifactFile } from './ArtifactPanel';
export { TokenMeter } from './TokenMeter';

View File

@@ -15,3 +15,5 @@ export type {
UseAutomationEventsOptions, UseAutomationEventsOptions,
} from './useAutomationEvents'; } from './useAutomationEvents';
export { useOptimisticMessages } from './useOptimisticMessages';

View File

@@ -0,0 +1,102 @@
import { useCallback, useRef } from 'react';
import { useChatStore, type Message } from '../store/chatStore';
import { createLogger } from '../lib/logger';
const log = createLogger('OptimisticMessages');
/**
* Represents a file attached to an optimistic message,
* tracking its upload lifecycle. Extends MessageFile with a status field.
*/
interface OptimisticFile {
name: string;
size: number;
status: 'uploading' | 'uploaded' | 'error';
url?: string;
}
/**
* 3-phase optimistic message merging hook (inspired by DeerFlow useThreadStream).
*
* Phase 1: Instant local echo -- creates a synthetic user message with `optimistic: true`
* Phase 2: Server confirmation -- removes optimistic message when real message arrives
* Phase 3: File status transition -- updates file status from uploading -> uploaded | error
*
* This hook provides standalone utilities for components that need fine-grained
* control over optimistic rendering outside the main chat flow.
*/
export function useOptimisticMessages() {
const optimisticIdCounter = useRef(0);
const generateOptimisticId = useCallback(() => {
optimisticIdCounter.current += 1;
return `opt-user-${Date.now()}-${optimisticIdCounter.current}`;
}, []);
/**
* Phase 1: Create and insert an optimistic user message into the store.
* Returns the optimistic ID for later correlation.
*/
const addOptimistic = useCallback((content: string, files?: File[]) => {
const id = generateOptimisticId();
const optimisticFiles: OptimisticFile[] | undefined = files?.map(f => ({
name: f.name,
size: f.size,
status: 'uploading' as const,
}));
const optimisticMessage: Message = {
id,
role: 'user',
content,
timestamp: new Date(),
optimistic: true,
// Cast through unknown because OptimisticFile extends MessageFile with status
files: optimisticFiles as Message['files'],
};
log.debug('Adding optimistic message', { id, content: content.slice(0, 50) });
useChatStore.setState(state => ({
messages: [...state.messages, optimisticMessage],
}));
return id;
}, [generateOptimisticId]);
/**
* Phase 2: Remove an optimistic message when the server confirms
* by sending back the real message.
*/
const clearOnConfirm = useCallback((optimisticId: string) => {
log.debug('Clearing optimistic message on confirm', { optimisticId });
useChatStore.setState(state => ({
messages: state.messages.filter(m => m.id !== optimisticId),
}));
}, []);
/**
* Phase 3: Transition file attachment status for an optimistic message.
*/
const updateFileStatus = useCallback((optimisticId: string, status: 'uploaded' | 'error') => {
log.debug('Updating file status', { optimisticId, status });
useChatStore.setState(state => ({
messages: state.messages.map(m => {
if (m.id === optimisticId && m.files) {
return {
...m,
files: m.files.map(f => ({
...f,
status,
})),
};
}
return m;
}),
}));
}, []);
return { addOptimistic, clearOnConfirm, updateFileStatus };
}

View File

@@ -1,5 +1,27 @@
@import "tailwindcss"; @import "tailwindcss";
/* Aurora gradient animation for welcome title (DeerFlow-inspired) */
@keyframes gradient-shift {
0%, 100% { background-position: 0% 50%; }
50% { background-position: 100% 50%; }
}
.aurora-title {
background: linear-gradient(
135deg,
#f97316 0%, /* orange-500 */
#ef4444 25%, /* red-500 */
#f97316 50%, /* orange-500 */
#fb923c 75%, /* orange-400 */
#f97316 100% /* orange-500 */
);
background-size: 200% 200%;
-webkit-background-clip: text;
background-clip: text;
-webkit-text-fill-color: transparent;
animation: gradient-shift 4s ease infinite;
}
:root { :root {
/* Brand Colors - 中性灰色系 */ /* Brand Colors - 中性灰色系 */
--color-primary: #374151; /* gray-700 */ --color-primary: #374151; /* gray-700 */
@@ -18,8 +40,8 @@
/* Neutral Colors */ /* Neutral Colors */
--color-bg: #ffffff; --color-bg: #ffffff;
--color-bg-secondary: #f9fafb; --color-bg-secondary: #faf9f6;
--color-border: #e5e7eb; --color-border: #e8e6e1;
--color-text: #111827; --color-text: #111827;
--color-text-secondary: #6b7280; --color-text-secondary: #6b7280;
--color-text-muted: #9ca3af; --color-text-muted: #9ca3af;
@@ -50,9 +72,9 @@
} }
.dark { .dark {
--color-bg: #0f172a; --color-bg: #0f1117;
--color-bg-secondary: #1e293b; --color-bg-secondary: #1a1b26;
--color-border: #334155; --color-border: #2e303a;
--color-text: #f1f5f9; --color-text: #f1f5f9;
--color-text-secondary: #94a3b8; --color-text-secondary: #94a3b8;
--color-text-muted: #64748b; --color-text-muted: #64748b;
@@ -66,7 +88,7 @@
body { body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
background-color: #f9fafb; background-color: #faf9f6;
} }
.custom-scrollbar::-webkit-scrollbar { .custom-scrollbar::-webkit-scrollbar {
@@ -84,6 +106,14 @@ body {
background: #4b5563; /* gray-600 */ background: #4b5563; /* gray-600 */
} }
/* Sidebar warm background — DeerFlow-style */
.sidebar-bg {
background: #f5f4f1;
}
:root.dark .sidebar-bg {
background: #0f1117;
}
.chat-bubble-assistant { .chat-bubble-assistant {
background: white; background: white;
border: 1px solid #e5e7eb; border: 1px solid #e5e7eb;
@@ -111,3 +141,16 @@ body {
0%, 100% { background-position: 0% 50%; } 0%, 100% { background-position: 0% 50%; }
50% { background-position: 100% 50%; } 50% { background-position: 100% 50%; }
} }
/* Force remove textarea border — WebView2 / Tailwind v4 preflight override */
textarea {
border: none !important;
outline: none !important;
box-shadow: none !important;
}
textarea:focus,
textarea:focus-visible {
border: none !important;
outline: none !important;
box-shadow: none !important;
}

View File

@@ -0,0 +1,304 @@
/**
* gateway-api-types.ts - Return type interfaces for Gateway REST API methods
*
* Provides concrete TypeScript interfaces for every API method
* that previously returned Promise<any>. Grouped by domain:
* - Health / Status
* - Agents (Clones)
* - Stats & Workspace
* - Quick Config
* - Skills
* - Channels
* - Scheduler
* - Config apply
* - Hands (detail)
* - Session (detail)
* - Trigger (detail)
*/
// === Health / Status ===
export interface HealthResponse {
status: string;
version?: string;
uptime?: number;
}
export interface StatusResponse {
initialized?: boolean;
version?: string;
agents_count?: number;
sessions_count?: number;
uptime?: number;
[key: string]: unknown;
}
// === Agents (Clones) ===
export interface AgentClone {
id: string;
name?: string;
state?: string;
model?: string;
role?: string;
emoji?: string;
personality?: string;
scenarios?: string[];
workspace_dir?: string;
created_at?: string;
updated_at?: string;
[key: string]: unknown;
}
export interface ListClonesResponse {
agents?: AgentClone[];
clones?: AgentClone[];
}
export interface CreateCloneResponse {
clone?: AgentClone;
agent?: AgentClone;
}
export interface UpdateCloneResponse {
clone?: AgentClone;
agent?: AgentClone;
}
export interface DeleteCloneResponse {
status?: string;
[key: string]: unknown;
}
// === Stats & Workspace ===
export interface UsageStatsResponse {
totalMessages?: number;
totalTokens?: number;
sessionsCount?: number;
agentsCount?: number;
// Fallback compatibility fields
totalSessions?: number;
byModel?: Record<string, { messages: number; inputTokens: number; outputTokens: number }>;
}
export interface SessionStatsResponse {
sessions?: Array<{
id: string;
agent_id?: string;
message_count?: number;
created_at?: string;
}>;
}
export interface WorkspaceInfoResponse {
rootDir?: string | null;
skillsDir?: string | null;
handsDir?: string | null;
configDir?: string | null;
path?: string;
resolvedPath?: string;
exists?: boolean;
fileCount?: number;
totalSize?: number;
}
export interface PluginEntry {
id: string;
name?: string;
status?: string;
version?: string;
}
export interface PluginStatusResponse {
plugins?: PluginEntry[];
loaded?: number;
total?: number;
}
// === Quick Config ===
export interface QuickConfigData {
agentName?: string;
agentRole?: string;
userName?: string;
userRole?: string;
agentNickname?: string;
scenarios?: string[];
workspaceDir?: string;
gatewayUrl?: string;
gatewayToken?: string;
defaultModel?: string;
defaultProvider?: string;
theme?: 'light' | 'dark';
autoStart?: boolean;
showToolCalls?: boolean;
autoSaveContext?: boolean;
fileWatching?: boolean;
privacyOptIn?: boolean;
skillsExtraDirs?: string[];
mcpServices?: Array<{ id: string; name: string; enabled: boolean }>;
restrictFiles?: boolean;
[key: string]: unknown;
}
export interface QuickConfigResponse {
quickConfig?: QuickConfigData;
}
export interface SaveQuickConfigResponse {
quickConfig?: QuickConfigData;
}
// === Skills ===
export interface SkillInfo {
id: string;
name: string;
description?: string;
version?: string;
capabilities?: string[];
tags?: string[];
mode?: string;
triggers?: Array<{ type: string; pattern?: string }>;
actions?: Array<{ type: string; params?: Record<string, unknown> }>;
enabled?: boolean;
category?: string;
}
export interface ListSkillsResponse {
skills?: SkillInfo[];
extraDirs?: string[];
}
export interface GetSkillResponse {
skill?: SkillInfo;
}
export interface CreateSkillResponse {
skill?: SkillInfo;
}
export interface UpdateSkillResponse {
skill?: SkillInfo;
}
export interface DeleteSkillResponse {
status?: string;
}
// === Channels ===
export interface ChannelInfo {
id: string;
type: string;
name: string;
config?: Record<string, unknown>;
enabled?: boolean;
label?: string;
status?: string;
}
export interface ListChannelsResponse {
channels?: ChannelInfo[];
}
export interface GetChannelResponse {
channel?: ChannelInfo;
}
export interface CreateChannelResponse {
channel?: ChannelInfo;
}
export interface UpdateChannelResponse {
channel?: ChannelInfo;
}
export interface DeleteChannelResponse {
status?: string;
}
export interface FeishuStatusResponse {
configured?: boolean;
accounts?: number;
status?: string;
}
// === Scheduler ===
export interface ScheduledTaskEntry {
id: string;
name: string;
schedule: string;
scheduleType?: 'cron' | 'interval' | 'once';
status?: string;
target?: { type: 'agent' | 'hand' | 'workflow'; id: string };
description?: string;
enabled?: boolean;
}
export interface ListScheduledTasksResponse {
tasks?: ScheduledTaskEntry[];
total?: number;
}
// === Config Apply ===
export interface ApplyConfigResponse {
ok?: boolean;
applied?: boolean;
hash?: string;
restartScheduled?: boolean;
}
// === Hands (detail) ===
export interface HandDetail {
id?: string;
name?: string;
description?: string;
status?: string;
requirements_met?: boolean;
category?: string;
icon?: string;
provider?: string;
model?: string;
requirements?: Array<{
description?: string;
name?: string;
met?: boolean;
satisfied?: boolean;
details?: string;
hint?: string;
}>;
tools?: string[];
metrics?: string[];
config?: Record<string, unknown>;
tool_count?: number;
metric_count?: number;
[key: string]: unknown;
}
// === Session detail ===
export interface SessionDetail {
id: string;
agent_id: string;
created_at: string;
updated_at?: string;
message_count?: number;
status?: 'active' | 'archived' | 'expired';
metadata?: Record<string, unknown>;
[key: string]: unknown;
}
// === Trigger detail ===
export interface TriggerDetail {
id: string;
type: string;
name?: string;
enabled: boolean;
config?: Record<string, unknown>;
}

View File

@@ -473,6 +473,9 @@ export class GatewayClient {
opts?: { opts?: {
sessionKey?: string; sessionKey?: string;
agentId?: string; agentId?: string;
thinking_enabled?: boolean;
reasoning_effort?: string;
plan_mode?: boolean;
} }
): Promise<{ runId: string }> { ): Promise<{ runId: string }> {
const agentId = opts?.agentId || this.defaultAgentId; const agentId = opts?.agentId || this.defaultAgentId;
@@ -482,11 +485,16 @@ export class GatewayClient {
// If no agent ID, try to fetch from ZCLAW status (async, but we'll handle it in connectZclawStream) // If no agent ID, try to fetch from ZCLAW status (async, but we'll handle it in connectZclawStream)
if (!agentId) { if (!agentId) {
// Try to get default agent asynchronously // Try to get default agent asynchronously
const chatModeOpts = {
thinking_enabled: opts?.thinking_enabled,
reasoning_effort: opts?.reasoning_effort,
plan_mode: opts?.plan_mode,
};
this.fetchDefaultAgentId().then(() => { this.fetchDefaultAgentId().then(() => {
const resolvedAgentId = this.defaultAgentId; const resolvedAgentId = this.defaultAgentId;
if (resolvedAgentId) { if (resolvedAgentId) {
this.streamCallbacks.set(runId, callbacks); this.streamCallbacks.set(runId, callbacks);
this.connectZclawStream(resolvedAgentId, runId, sessionId, message); this.connectZclawStream(resolvedAgentId, runId, sessionId, message, chatModeOpts);
} else { } else {
callbacks.onError('No agent available. Please ensure ZCLAW has at least one agent.'); callbacks.onError('No agent available. Please ensure ZCLAW has at least one agent.');
callbacks.onComplete(); callbacks.onComplete();
@@ -502,7 +510,11 @@ export class GatewayClient {
this.streamCallbacks.set(runId, callbacks); this.streamCallbacks.set(runId, callbacks);
// Connect to ZCLAW WebSocket if not connected // Connect to ZCLAW WebSocket if not connected
this.connectZclawStream(agentId, runId, sessionId, message); this.connectZclawStream(agentId, runId, sessionId, message, {
thinking_enabled: opts?.thinking_enabled,
reasoning_effort: opts?.reasoning_effort,
plan_mode: opts?.plan_mode,
});
return { runId }; return { runId };
} }
@@ -512,7 +524,12 @@ export class GatewayClient {
agentId: string, agentId: string,
runId: string, runId: string,
sessionId: string, sessionId: string,
message: string message: string,
chatModeOpts?: {
thinking_enabled?: boolean;
reasoning_effort?: string;
plan_mode?: boolean;
}
): void { ): void {
// Close existing connection if any // Close existing connection if any
if (this.zclawWs && this.zclawWs.readyState !== WebSocket.CLOSED) { if (this.zclawWs && this.zclawWs.readyState !== WebSocket.CLOSED) {
@@ -539,11 +556,20 @@ export class GatewayClient {
this.zclawWs.onopen = () => { this.zclawWs.onopen = () => {
this.log('info', 'ZCLAW WebSocket connected'); this.log('info', 'ZCLAW WebSocket connected');
// Send chat message using ZCLAW actual protocol // Send chat message using ZCLAW actual protocol
const chatRequest = { const chatRequest: Record<string, unknown> = {
type: 'message', type: 'message',
content: message, content: message,
session_id: sessionId, session_id: sessionId,
}; };
if (chatModeOpts?.thinking_enabled !== undefined) {
chatRequest.thinking_enabled = chatModeOpts.thinking_enabled;
}
if (chatModeOpts?.reasoning_effort !== undefined) {
chatRequest.reasoning_effort = chatModeOpts.reasoning_effort;
}
if (chatModeOpts?.plan_mode !== undefined) {
chatRequest.plan_mode = chatModeOpts.plan_mode;
}
this.zclawWs?.send(JSON.stringify(chatRequest)); this.zclawWs?.send(JSON.stringify(chatRequest));
}; };
@@ -569,8 +595,13 @@ export class GatewayClient {
this.zclawWs.onclose = (event) => { this.zclawWs.onclose = (event) => {
this.log('info', `ZCLAW WebSocket closed: ${event.code} ${event.reason}`); this.log('info', `ZCLAW WebSocket closed: ${event.code} ${event.reason}`);
const callbacks = this.streamCallbacks.get(runId); const callbacks = this.streamCallbacks.get(runId);
if (callbacks && event.code !== 1000) { if (callbacks) {
callbacks.onError(`Connection closed: ${event.reason || 'unknown'}`); if (event.code !== 1000) {
callbacks.onError(`Connection closed: ${event.reason || 'unknown'}`);
} else {
// Normal closure — ensure stream is completed even if no done event was sent
callbacks.onComplete();
}
} }
this.streamCallbacks.delete(runId); this.streamCallbacks.delete(runId);
this.zclawWs = null; this.zclawWs = null;
@@ -614,8 +645,9 @@ export class GatewayClient {
case 'response': case 'response':
// Final response with tokens info // Final response with tokens info
if (data.content) { if (data.content) {
// If we haven't received any deltas yet, send the full response // Forward the full response content via onDelta
// This handles non-streaming responses // This handles non-streaming responses from the server
callbacks.onDelta(data.content);
} }
// Mark complete if phase done wasn't sent // Mark complete if phase done wasn't sent
callbacks.onComplete(); callbacks.onComplete();

View File

@@ -1,288 +0,0 @@
/**
* gateway-stream.ts - Gateway Stream Methods
*
* Extracted from gateway-client.ts for modularity.
* Installs streaming methods onto GatewayClient.prototype via mixin pattern.
*
* Contains:
* - chatStream (public): Send message with streaming response
* - connectZclawStream (private): Connect to ZCLAW WebSocket for streaming
* - handleZclawStreamEvent (private): Parse and dispatch stream events
* - cancelStream (public): Cancel an ongoing stream
*/
import type { ZclawStreamEvent } from './gateway-types';
import type { GatewayClient } from './gateway-client';
import { createIdempotencyKey } from './gateway-errors';
// === Mixin Installer ===
/**
* Install streaming methods onto GatewayClient.prototype.
*
* These methods access instance properties:
* - this.defaultAgentId: string
* - this.zclawWs: WebSocket | null
* - this.streamCallbacks: Map<string, StreamCallbacks>
* - this.log(level, message): void
* - this.getRestBaseUrl(): string
* - this.fetchDefaultAgentId(): Promise<string | null>
* - this.emitEvent(event, payload): void
*/
export function installStreamMethods(ClientClass: { prototype: GatewayClient }): void {
const proto = ClientClass.prototype as any;
/**
* Send message with streaming response (ZCLAW WebSocket).
*/
proto.chatStream = async function (
this: GatewayClient,
message: string,
callbacks: {
onDelta: (delta: string) => void;
onTool?: (tool: string, input: string, output: string) => void;
onHand?: (name: string, status: string, result?: unknown) => void;
onComplete: () => void;
onError: (error: string) => void;
},
opts?: {
sessionKey?: string;
agentId?: string;
}
): Promise<{ runId: string }> {
const self = this as any;
const agentId = opts?.agentId || self.defaultAgentId;
const runId = createIdempotencyKey();
const sessionId = opts?.sessionKey || crypto.randomUUID();
// If no agent ID, try to fetch from ZCLAW status (async, but we'll handle it in connectZclawStream)
if (!agentId) {
// Try to get default agent asynchronously
self.fetchDefaultAgentId().then(() => {
const resolvedAgentId = self.defaultAgentId;
if (resolvedAgentId) {
self.streamCallbacks.set(runId, callbacks);
self.connectZclawStream(resolvedAgentId, runId, sessionId, message);
} else {
callbacks.onError('No agent available. Please ensure ZCLAW has at least one agent.');
callbacks.onComplete();
}
}).catch((err: unknown) => {
callbacks.onError(`Failed to get agent: ${err}`);
callbacks.onComplete();
});
return { runId };
}
// Store callbacks for this run
self.streamCallbacks.set(runId, callbacks);
// Connect to ZCLAW WebSocket if not connected
self.connectZclawStream(agentId, runId, sessionId, message);
return { runId };
};
/**
* Connect to ZCLAW streaming WebSocket.
*/
proto.connectZclawStream = function (
this: GatewayClient,
agentId: string,
runId: string,
sessionId: string,
message: string
): void {
const self = this as any;
// Close existing connection if any
if (self.zclawWs && self.zclawWs.readyState !== WebSocket.CLOSED) {
self.zclawWs.close();
}
// Build WebSocket URL
// In dev mode, use Vite proxy; in production, use direct connection
let wsUrl: string;
if (typeof window !== 'undefined' && window.location.port === '1420') {
// Dev mode: use Vite proxy with relative path
wsUrl = `ws://${window.location.host}/api/agents/${agentId}/ws`;
} else {
// Production: extract from stored URL
const httpUrl = self.getRestBaseUrl();
wsUrl = httpUrl.replace(/^http/, 'ws') + `/api/agents/${agentId}/ws`;
}
self.log('info', `Connecting to ZCLAW stream: ${wsUrl}`);
try {
self.zclawWs = new WebSocket(wsUrl);
self.zclawWs.onopen = () => {
self.log('info', 'ZCLAW WebSocket connected');
// Send chat message using ZCLAW actual protocol
const chatRequest = {
type: 'message',
content: message,
session_id: sessionId,
};
self.zclawWs?.send(JSON.stringify(chatRequest));
};
self.zclawWs.onmessage = (event: MessageEvent) => {
try {
const data = JSON.parse(event.data);
self.handleZclawStreamEvent(runId, data, sessionId);
} catch (err: unknown) {
const errorMessage = err instanceof Error ? err.message : String(err);
self.log('error', `Failed to parse stream event: ${errorMessage}`);
}
};
self.zclawWs.onerror = (_event: Event) => {
self.log('error', 'ZCLAW WebSocket error');
const callbacks = self.streamCallbacks.get(runId);
if (callbacks) {
callbacks.onError('WebSocket connection failed');
self.streamCallbacks.delete(runId);
}
};
self.zclawWs.onclose = (event: CloseEvent) => {
self.log('info', `ZCLAW WebSocket closed: ${event.code} ${event.reason}`);
const callbacks = self.streamCallbacks.get(runId);
if (callbacks && event.code !== 1000) {
callbacks.onError(`Connection closed: ${event.reason || 'unknown'}`);
}
self.streamCallbacks.delete(runId);
self.zclawWs = null;
};
} catch (err: unknown) {
const errorMessage = err instanceof Error ? err.message : String(err);
self.log('error', `Failed to create WebSocket: ${errorMessage}`);
const callbacks = self.streamCallbacks.get(runId);
if (callbacks) {
callbacks.onError(errorMessage);
self.streamCallbacks.delete(runId);
}
}
};
/**
* Handle ZCLAW stream events.
*/
proto.handleZclawStreamEvent = function (
this: GatewayClient,
runId: string,
data: ZclawStreamEvent,
sessionId: string
): void {
const self = this as any;
const callbacks = self.streamCallbacks.get(runId);
if (!callbacks) return;
switch (data.type) {
// ZCLAW actual event types
case 'text_delta':
// Stream delta content
if (data.content) {
callbacks.onDelta(data.content);
}
break;
case 'phase':
// Phase change: streaming | done
if (data.phase === 'done') {
callbacks.onComplete();
self.streamCallbacks.delete(runId);
if (self.zclawWs) {
self.zclawWs.close(1000, 'Stream complete');
}
}
break;
case 'response':
// Final response with tokens info
if (data.content) {
// If we haven't received any deltas yet, send the full response
// This handles non-streaming responses
}
// Mark complete if phase done wasn't sent
callbacks.onComplete();
self.streamCallbacks.delete(runId);
if (self.zclawWs) {
self.zclawWs.close(1000, 'Stream complete');
}
break;
case 'typing':
// Typing indicator: { state: 'start' | 'stop' }
// Can be used for UI feedback
break;
case 'tool_call':
// Tool call event
if (callbacks.onTool && data.tool) {
callbacks.onTool(data.tool, JSON.stringify(data.input || {}), data.output || '');
}
break;
case 'tool_result':
if (callbacks.onTool && data.tool) {
callbacks.onTool(data.tool, '', String(data.result || data.output || ''));
}
break;
case 'hand':
if (callbacks.onHand && data.hand_name) {
callbacks.onHand(data.hand_name, data.hand_status || 'triggered', data.hand_result);
}
break;
case 'error':
callbacks.onError(data.message || data.code || data.content || 'Unknown error');
self.streamCallbacks.delete(runId);
if (self.zclawWs) {
self.zclawWs.close(1011, 'Error');
}
break;
case 'connected':
// Connection established
self.log('info', `ZCLAW agent connected: ${data.agent_id}`);
break;
case 'agents_updated':
// Agents list updated
self.log('debug', 'Agents list updated');
break;
default:
// Emit unknown events for debugging
self.log('debug', `Stream event: ${data.type}`);
}
// Also emit to general 'agent' event listeners
self.emitEvent('agent', {
stream: data.type === 'text_delta' ? 'assistant' : data.type,
delta: data.content,
content: data.content,
runId,
sessionId,
...data,
});
};
/**
* Cancel an ongoing stream.
*/
proto.cancelStream = function (this: GatewayClient, runId: string): void {
const self = this as any;
const callbacks = self.streamCallbacks.get(runId);
if (callbacks) {
callbacks.onError('Stream cancelled');
self.streamCallbacks.delete(runId);
}
if (self.zclawWs && self.zclawWs.readyState === WebSocket.OPEN) {
self.zclawWs.close(1000, 'User cancelled');
}
};
}

View File

@@ -0,0 +1,61 @@
/**
* Intelligence Layer - LocalStorage Compactor Fallback
*
* Provides rule-based compaction for browser/dev environment.
*/
import type { CompactableMessage, CompactionResult, CompactionCheck, CompactionConfig } from '../intelligence-backend';
export const fallbackCompactor = {
async estimateTokens(text: string): Promise<number> {
// Simple heuristic: ~4 chars per token for English, ~1.5 for CJK
const cjkChars = (text.match(/[\u4e00-\u9fff\u3040-\u30ff]/g) ?? []).length;
const otherChars = text.length - cjkChars;
return Math.ceil(cjkChars * 1.5 + otherChars / 4);
},
async estimateMessagesTokens(messages: CompactableMessage[]): Promise<number> {
let total = 0;
for (const m of messages) {
total += await fallbackCompactor.estimateTokens(m.content);
}
return total;
},
async checkThreshold(
messages: CompactableMessage[],
config?: CompactionConfig
): Promise<CompactionCheck> {
const threshold = config?.soft_threshold_tokens ?? 15000;
const currentTokens = await fallbackCompactor.estimateMessagesTokens(messages);
return {
should_compact: currentTokens >= threshold,
current_tokens: currentTokens,
threshold,
urgency: currentTokens >= (config?.hard_threshold_tokens ?? 20000) ? 'hard' :
currentTokens >= threshold ? 'soft' : 'none',
};
},
async compact(
messages: CompactableMessage[],
_agentId: string,
_conversationId?: string,
config?: CompactionConfig
): Promise<CompactionResult> {
// Simple rule-based compaction: keep last N messages
const keepRecent = config?.keep_recent_messages ?? 10;
const retained = messages.slice(-keepRecent);
return {
compacted_messages: retained,
summary: `[Compacted ${messages.length - retained.length} earlier messages]`,
original_count: messages.length,
retained_count: retained.length,
flushed_memories: 0,
tokens_before_compaction: await fallbackCompactor.estimateMessagesTokens(messages),
tokens_after_compaction: await fallbackCompactor.estimateMessagesTokens(retained),
};
},
};

View File

@@ -0,0 +1,54 @@
/**
* Intelligence Layer - LocalStorage Heartbeat Fallback
*
* Provides no-op heartbeat for browser/dev environment.
*/
import type { HeartbeatConfig, HeartbeatResult } from '../intelligence-backend';
export const fallbackHeartbeat = {
_configs: new Map<string, HeartbeatConfig>(),
async init(agentId: string, config?: HeartbeatConfig): Promise<void> {
if (config) {
fallbackHeartbeat._configs.set(agentId, config);
}
},
async start(_agentId: string): Promise<void> {
// No-op for fallback (no background tasks in browser)
},
async stop(_agentId: string): Promise<void> {
// No-op
},
async tick(_agentId: string): Promise<HeartbeatResult> {
return {
status: 'ok',
alerts: [],
checked_items: 0,
timestamp: new Date().toISOString(),
};
},
async getConfig(agentId: string): Promise<HeartbeatConfig> {
return fallbackHeartbeat._configs.get(agentId) ?? {
enabled: false,
interval_minutes: 30,
quiet_hours_start: null,
quiet_hours_end: null,
notify_channel: 'ui',
proactivity_level: 'standard',
max_alerts_per_tick: 5,
};
},
async updateConfig(agentId: string, config: HeartbeatConfig): Promise<void> {
fallbackHeartbeat._configs.set(agentId, config);
},
async getHistory(_agentId: string, _limit?: number): Promise<HeartbeatResult[]> {
return [];
},
};

View File

@@ -0,0 +1,239 @@
/**
* Intelligence Layer - LocalStorage Identity Fallback
*
* Provides localStorage-based identity management for browser/dev environment.
*/
import { createLogger } from '../logger';
import type { IdentityFiles, IdentityChangeProposal, IdentitySnapshot } from '../intelligence-backend';
const logger = createLogger('intelligence-client');
const IDENTITY_STORAGE_KEY = 'zclaw-fallback-identities';
const PROPOSALS_STORAGE_KEY = 'zclaw-fallback-proposals';
const SNAPSHOTS_STORAGE_KEY = 'zclaw-fallback-snapshots';
function loadIdentitiesFromStorage(): Map<string, IdentityFiles> {
try {
const stored = localStorage.getItem(IDENTITY_STORAGE_KEY);
if (stored) {
const parsed = JSON.parse(stored) as Record<string, IdentityFiles>;
return new Map(Object.entries(parsed));
}
} catch (e) {
logger.warn('Failed to load identities from localStorage', { error: e });
}
return new Map();
}
function saveIdentitiesToStorage(identities: Map<string, IdentityFiles>): void {
try {
const obj = Object.fromEntries(identities);
localStorage.setItem(IDENTITY_STORAGE_KEY, JSON.stringify(obj));
} catch (e) {
logger.warn('Failed to save identities to localStorage', { error: e });
}
}
function loadProposalsFromStorage(): IdentityChangeProposal[] {
try {
const stored = localStorage.getItem(PROPOSALS_STORAGE_KEY);
if (stored) {
return JSON.parse(stored) as IdentityChangeProposal[];
}
} catch (e) {
logger.warn('Failed to load proposals from localStorage', { error: e });
}
return [];
}
function saveProposalsToStorage(proposals: IdentityChangeProposal[]): void {
try {
localStorage.setItem(PROPOSALS_STORAGE_KEY, JSON.stringify(proposals));
} catch (e) {
logger.warn('Failed to save proposals to localStorage', { error: e });
}
}
function loadSnapshotsFromStorage(): IdentitySnapshot[] {
try {
const stored = localStorage.getItem(SNAPSHOTS_STORAGE_KEY);
if (stored) {
return JSON.parse(stored) as IdentitySnapshot[];
}
} catch (e) {
logger.warn('Failed to load snapshots from localStorage', { error: e });
}
return [];
}
function saveSnapshotsToStorage(snapshots: IdentitySnapshot[]): void {
try {
localStorage.setItem(SNAPSHOTS_STORAGE_KEY, JSON.stringify(snapshots));
} catch (e) {
logger.warn('Failed to save snapshots to localStorage', { error: e });
}
}
// Module-level state initialized from localStorage
const fallbackIdentities = loadIdentitiesFromStorage();
const fallbackProposals = loadProposalsFromStorage();
let fallbackSnapshots = loadSnapshotsFromStorage();
export const fallbackIdentity = {
async get(agentId: string): Promise<IdentityFiles> {
if (!fallbackIdentities.has(agentId)) {
const defaults: IdentityFiles = {
soul: '# Agent Soul\n\nA helpful AI assistant.',
instructions: '# Instructions\n\nBe helpful and concise.',
user_profile: '# User Profile\n\nNo profile yet.',
};
fallbackIdentities.set(agentId, defaults);
saveIdentitiesToStorage(fallbackIdentities);
}
return fallbackIdentities.get(agentId)!;
},
async getFile(agentId: string, file: string): Promise<string> {
const files = await fallbackIdentity.get(agentId);
return files[file as keyof IdentityFiles] ?? '';
},
async buildPrompt(agentId: string, memoryContext?: string): Promise<string> {
const files = await fallbackIdentity.get(agentId);
let prompt = `${files.soul}\n\n## Instructions\n${files.instructions}\n\n## User Profile\n${files.user_profile}`;
if (memoryContext) {
prompt += `\n\n## Memory Context\n${memoryContext}`;
}
return prompt;
},
async updateUserProfile(agentId: string, content: string): Promise<void> {
const files = await fallbackIdentity.get(agentId);
files.user_profile = content;
fallbackIdentities.set(agentId, files);
saveIdentitiesToStorage(fallbackIdentities);
},
async appendUserProfile(agentId: string, addition: string): Promise<void> {
const files = await fallbackIdentity.get(agentId);
files.user_profile += `\n\n${addition}`;
fallbackIdentities.set(agentId, files);
saveIdentitiesToStorage(fallbackIdentities);
},
async proposeChange(
agentId: string,
file: 'soul' | 'instructions',
suggestedContent: string,
reason: string
): Promise<IdentityChangeProposal> {
const files = await fallbackIdentity.get(agentId);
const proposal: IdentityChangeProposal = {
id: `prop_${Date.now()}`,
agent_id: agentId,
file,
reason,
current_content: files[file] ?? '',
suggested_content: suggestedContent,
status: 'pending',
created_at: new Date().toISOString(),
};
fallbackProposals.push(proposal);
saveProposalsToStorage(fallbackProposals);
return proposal;
},
async approveProposal(proposalId: string): Promise<IdentityFiles> {
const proposal = fallbackProposals.find(p => p.id === proposalId);
if (!proposal) throw new Error('Proposal not found');
const files = await fallbackIdentity.get(proposal.agent_id);
// Create snapshot before applying change
const snapshot: IdentitySnapshot = {
id: `snap_${Date.now()}`,
agent_id: proposal.agent_id,
files: { ...files },
timestamp: new Date().toISOString(),
reason: `Before applying: ${proposal.reason}`,
};
fallbackSnapshots.unshift(snapshot);
// Keep only last 20 snapshots per agent
const agentSnapshots = fallbackSnapshots.filter(s => s.agent_id === proposal.agent_id);
if (agentSnapshots.length > 20) {
const toRemove = agentSnapshots.slice(20);
fallbackSnapshots = fallbackSnapshots.filter(s => !toRemove.includes(s));
}
saveSnapshotsToStorage(fallbackSnapshots);
proposal.status = 'approved';
files[proposal.file] = proposal.suggested_content;
fallbackIdentities.set(proposal.agent_id, files);
saveIdentitiesToStorage(fallbackIdentities);
saveProposalsToStorage(fallbackProposals);
return files;
},
async rejectProposal(proposalId: string): Promise<void> {
const proposal = fallbackProposals.find(p => p.id === proposalId);
if (proposal) {
proposal.status = 'rejected';
saveProposalsToStorage(fallbackProposals);
}
},
async getPendingProposals(agentId?: string): Promise<IdentityChangeProposal[]> {
return fallbackProposals.filter(p =>
p.status === 'pending' && (!agentId || p.agent_id === agentId)
);
},
async updateFile(agentId: string, file: string, content: string): Promise<void> {
const files = await fallbackIdentity.get(agentId);
if (file in files) {
// IdentityFiles has known properties, update safely
const key = file as keyof IdentityFiles;
if (key in files) {
files[key] = content;
fallbackIdentities.set(agentId, files);
saveIdentitiesToStorage(fallbackIdentities);
}
}
},
async getSnapshots(agentId: string, limit?: number): Promise<IdentitySnapshot[]> {
const agentSnapshots = fallbackSnapshots.filter(s => s.agent_id === agentId);
return agentSnapshots.slice(0, limit ?? 10);
},
async restoreSnapshot(agentId: string, snapshotId: string): Promise<void> {
const snapshot = fallbackSnapshots.find(s => s.id === snapshotId && s.agent_id === agentId);
if (!snapshot) throw new Error('Snapshot not found');
// Create a snapshot of current state before restore
const currentFiles = await fallbackIdentity.get(agentId);
const beforeRestoreSnapshot: IdentitySnapshot = {
id: `snap_${Date.now()}`,
agent_id: agentId,
files: { ...currentFiles },
timestamp: new Date().toISOString(),
reason: 'Auto-backup before restore',
};
fallbackSnapshots.unshift(beforeRestoreSnapshot);
saveSnapshotsToStorage(fallbackSnapshots);
// Restore the snapshot
fallbackIdentities.set(agentId, { ...snapshot.files });
saveIdentitiesToStorage(fallbackIdentities);
},
async listAgents(): Promise<string[]> {
return Array.from(fallbackIdentities.keys());
},
async deleteAgent(agentId: string): Promise<void> {
fallbackIdentities.delete(agentId);
},
};

View File

@@ -0,0 +1,165 @@
/**
* Intelligence Layer - LocalStorage Memory Fallback
*
* Provides localStorage-based memory operations for browser/dev environment.
*/
import { createLogger } from '../logger';
import { generateRandomString } from '../crypto-utils';
import type { MemoryEntry, MemorySearchOptions, MemoryStats, MemoryType, MemorySource } from './types';
const logger = createLogger('intelligence-client');
import type { MemoryEntryInput } from '../intelligence-backend';
const FALLBACK_STORAGE_KEY = 'zclaw-intelligence-fallback';
interface FallbackMemoryStore {
memories: MemoryEntry[];
}
function getFallbackStore(): FallbackMemoryStore {
try {
const stored = localStorage.getItem(FALLBACK_STORAGE_KEY);
if (stored) {
return JSON.parse(stored);
}
} catch (e) {
logger.debug('Failed to read fallback store from localStorage', { error: e });
}
return { memories: [] };
}
function saveFallbackStore(store: FallbackMemoryStore): void {
try {
localStorage.setItem(FALLBACK_STORAGE_KEY, JSON.stringify(store));
} catch (e) {
logger.warn('Failed to save fallback store to localStorage', { error: e });
}
}
export const fallbackMemory = {
async init(): Promise<void> {
// No-op for localStorage
},
async store(entry: MemoryEntryInput): Promise<string> {
const store = getFallbackStore();
const id = `mem_${Date.now()}_${generateRandomString(6)}`;
const now = new Date().toISOString();
const memory: MemoryEntry = {
id,
agentId: entry.agent_id,
content: entry.content,
type: entry.memory_type as MemoryType,
importance: entry.importance ?? 5,
source: (entry.source as MemorySource) ?? 'auto',
tags: entry.tags ?? [],
createdAt: now,
lastAccessedAt: now,
accessCount: 0,
conversationId: entry.conversation_id,
};
store.memories.push(memory);
saveFallbackStore(store);
return id;
},
async get(id: string): Promise<MemoryEntry | null> {
const store = getFallbackStore();
return store.memories.find(m => m.id === id) ?? null;
},
async search(options: MemorySearchOptions): Promise<MemoryEntry[]> {
const store = getFallbackStore();
let results = store.memories;
if (options.agentId) {
results = results.filter(m => m.agentId === options.agentId);
}
if (options.type) {
results = results.filter(m => m.type === options.type);
}
if (options.minImportance !== undefined) {
results = results.filter(m => m.importance >= options.minImportance!);
}
if (options.query) {
const queryLower = options.query.toLowerCase();
results = results.filter(m =>
m.content.toLowerCase().includes(queryLower) ||
m.tags.some(t => t.toLowerCase().includes(queryLower))
);
}
if (options.limit) {
results = results.slice(0, options.limit);
}
return results;
},
async delete(id: string): Promise<void> {
const store = getFallbackStore();
store.memories = store.memories.filter(m => m.id !== id);
saveFallbackStore(store);
},
async deleteAll(agentId: string): Promise<number> {
const store = getFallbackStore();
const before = store.memories.length;
store.memories = store.memories.filter(m => m.agentId !== agentId);
saveFallbackStore(store);
return before - store.memories.length;
},
async stats(): Promise<MemoryStats> {
const store = getFallbackStore();
const byType: Record<string, number> = {};
const byAgent: Record<string, number> = {};
for (const m of store.memories) {
byType[m.type] = (byType[m.type] ?? 0) + 1;
byAgent[m.agentId] = (byAgent[m.agentId] ?? 0) + 1;
}
const sorted = [...store.memories].sort((a, b) =>
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
);
// Estimate storage size from serialized data
let storageSizeBytes = 0;
try {
const serialized = JSON.stringify(store.memories);
storageSizeBytes = new Blob([serialized]).size;
} catch (e) {
logger.debug('Failed to estimate storage size', { error: e });
}
return {
totalEntries: store.memories.length,
byType,
byAgent,
oldestEntry: sorted[0]?.createdAt ?? null,
newestEntry: sorted[sorted.length - 1]?.createdAt ?? null,
storageSizeBytes,
};
},
async export(): Promise<MemoryEntry[]> {
const store = getFallbackStore();
return store.memories;
},
async import(memories: MemoryEntry[]): Promise<number> {
const store = getFallbackStore();
store.memories.push(...memories);
saveFallbackStore(store);
return memories.length;
},
async dbPath(): Promise<string> {
return 'localStorage://zclaw-intelligence-fallback';
},
};

View File

@@ -0,0 +1,167 @@
/**
* Intelligence Layer - LocalStorage Reflection Fallback
*
* Provides rule-based reflection for browser/dev environment.
*/
import type {
ReflectionResult,
ReflectionState,
ReflectionConfig,
PatternObservation,
ImprovementSuggestion,
ReflectionIdentityProposal,
MemoryEntryForAnalysis,
} from '../intelligence-backend';
export const fallbackReflection = {
_conversationCount: 0,
_lastReflection: null as string | null,
_history: [] as ReflectionResult[],
async init(_config?: ReflectionConfig): Promise<void> {
// No-op
},
async recordConversation(): Promise<void> {
fallbackReflection._conversationCount++;
},
async shouldReflect(): Promise<boolean> {
return fallbackReflection._conversationCount >= 5;
},
async reflect(agentId: string, memories: MemoryEntryForAnalysis[]): Promise<ReflectionResult> {
fallbackReflection._conversationCount = 0;
fallbackReflection._lastReflection = new Date().toISOString();
// Analyze patterns (simple rule-based implementation)
const patterns: PatternObservation[] = [];
const improvements: ImprovementSuggestion[] = [];
const identityProposals: ReflectionIdentityProposal[] = [];
// Count memory types
const typeCounts: Record<string, number> = {};
for (const m of memories) {
typeCounts[m.memory_type] = (typeCounts[m.memory_type] || 0) + 1;
}
// Pattern: Too many tasks
const taskCount = typeCounts['task'] || 0;
if (taskCount >= 5) {
const taskMemories = memories.filter(m => m.memory_type === 'task').slice(0, 3);
patterns.push({
observation: `积累了 ${taskCount} 个待办任务,可能存在任务管理不善`,
frequency: taskCount,
sentiment: 'negative',
evidence: taskMemories.map(m => m.content),
});
improvements.push({
area: '任务管理',
suggestion: '清理已完成的任务记忆,对长期未处理的任务降低重要性',
priority: 'high',
});
}
// Pattern: Strong preference accumulation
const prefCount = typeCounts['preference'] || 0;
if (prefCount >= 5) {
const prefMemories = memories.filter(m => m.memory_type === 'preference').slice(0, 3);
patterns.push({
observation: `已记录 ${prefCount} 个用户偏好,对用户习惯有较好理解`,
frequency: prefCount,
sentiment: 'positive',
evidence: prefMemories.map(m => m.content),
});
}
// Pattern: Lessons learned
const lessonCount = typeCounts['lesson'] || 0;
if (lessonCount >= 5) {
patterns.push({
observation: `积累了 ${lessonCount} 条经验教训,知识库在成长`,
frequency: lessonCount,
sentiment: 'positive',
evidence: memories.filter(m => m.memory_type === 'lesson').slice(0, 3).map(m => m.content),
});
}
// Pattern: High-access important memories
const highAccessMemories = memories.filter(m => m.access_count >= 5 && m.importance >= 7);
if (highAccessMemories.length >= 3) {
patterns.push({
observation: `${highAccessMemories.length} 条高频访问的重要记忆,核心知识正在形成`,
frequency: highAccessMemories.length,
sentiment: 'positive',
evidence: highAccessMemories.slice(0, 3).map(m => m.content),
});
}
// Pattern: Low importance memories accumulating
const lowImportanceCount = memories.filter(m => m.importance <= 3).length;
if (lowImportanceCount > 20) {
patterns.push({
observation: `${lowImportanceCount} 条低重要性记忆,建议清理`,
frequency: lowImportanceCount,
sentiment: 'neutral',
evidence: [],
});
improvements.push({
area: '记忆管理',
suggestion: '执行记忆清理移除30天以上未访问且重要性低于3的记忆',
priority: 'medium',
});
}
// Generate identity proposal if negative patterns exist
const negativePatterns = patterns.filter(p => p.sentiment === 'negative');
if (negativePatterns.length >= 2) {
const additions = negativePatterns.map(p => `- 注意: ${p.observation}`).join('\n');
identityProposals.push({
agent_id: agentId,
field: 'instructions',
current_value: '...',
proposed_value: `\n\n## 自我反思改进\n${additions}`,
reason: `基于 ${negativePatterns.length} 个负面模式观察,建议在指令中增加自我改进提醒`,
});
}
// Suggestion: User profile enrichment
if (prefCount < 3) {
improvements.push({
area: '用户理解',
suggestion: '主动在对话中了解用户偏好(沟通风格、技术栈、工作习惯),丰富用户画像',
priority: 'medium',
});
}
const result: ReflectionResult = {
patterns,
improvements,
identity_proposals: identityProposals,
new_memories: patterns.filter(p => p.frequency >= 3).length + improvements.filter(i => i.priority === 'high').length,
timestamp: new Date().toISOString(),
};
// Store in history
fallbackReflection._history.push(result);
if (fallbackReflection._history.length > 20) {
fallbackReflection._history = fallbackReflection._history.slice(-10);
}
return result;
},
async getHistory(limit?: number, _agentId?: string): Promise<ReflectionResult[]> {
const l = limit ?? 10;
return fallbackReflection._history.slice(-l).reverse();
},
async getState(): Promise<ReflectionState> {
return {
conversations_since_reflection: fallbackReflection._conversationCount,
last_reflection_time: fallbackReflection._lastReflection,
last_reflection_agent_id: null,
};
},
};

View File

@@ -0,0 +1,72 @@
/**
* Intelligence Layer - Barrel Re-export
*
* Re-exports everything from sub-modules to maintain backward compatibility.
* Existing imports like `import { intelligenceClient } from './intelligence-client'`
* continue to work unchanged because TypeScript resolves directory imports
* through this index.ts file.
*/
// Types
export type {
MemoryType,
MemorySource,
MemoryEntry,
MemorySearchOptions,
MemoryStats,
BehaviorPattern,
PatternTypeVariant,
PatternContext,
WorkflowRecommendation,
MeshConfig,
MeshAnalysisResult,
ActivityType,
EvolutionChangeType,
InsightCategory,
IdentityFileType,
ProposalStatus,
EvolutionProposal,
ProfileUpdate,
EvolutionInsight,
EvolutionResult,
PersonaEvolverConfig,
PersonaEvolverState,
} from './types';
export {
getPatternTypeString,
} from './types';
// Re-exported types from intelligence-backend
export type {
HeartbeatConfig,
HeartbeatResult,
HeartbeatAlert,
CompactableMessage,
CompactionResult,
CompactionCheck,
CompactionConfig,
PatternObservation,
ImprovementSuggestion,
ReflectionResult,
ReflectionState,
ReflectionConfig,
ReflectionIdentityProposal,
IdentityFiles,
IdentityChangeProposal,
IdentitySnapshot,
MemoryEntryForAnalysis,
} from './types';
// Type conversion utilities
export {
toFrontendMemory,
toBackendMemoryInput,
toBackendSearchOptions,
toFrontendStats,
parseTags,
} from './type-conversions';
// Unified client
export { intelligenceClient } from './unified-client';
export { intelligenceClient as default } from './unified-client';

View File

@@ -0,0 +1,101 @@
/**
* Intelligence Layer - Type Conversion Utilities
*
* Functions for converting between frontend and backend data formats.
*/
import { intelligence } from '../intelligence-backend';
import type {
MemoryEntryInput,
PersistentMemory,
MemorySearchOptions as BackendSearchOptions,
MemoryStats as BackendMemoryStats,
} from '../intelligence-backend';
import { createLogger } from '../logger';
import type { MemoryEntry, MemorySearchOptions, MemoryStats, MemoryType, MemorySource } from './types';
const logger = createLogger('intelligence-client');
// Re-import intelligence for use in conversions (already imported above but
// the `intelligence` binding is needed by unified-client.ts indirectly).
export { intelligence };
export type { MemoryEntryInput, PersistentMemory, BackendSearchOptions, BackendMemoryStats };
/**
* Convert backend PersistentMemory to frontend MemoryEntry format
*/
export function toFrontendMemory(backend: PersistentMemory): MemoryEntry {
return {
id: backend.id,
agentId: backend.agent_id,
content: backend.content,
type: backend.memory_type as MemoryType,
importance: backend.importance,
source: backend.source as MemorySource,
tags: parseTags(backend.tags),
createdAt: backend.created_at,
lastAccessedAt: backend.last_accessed_at,
accessCount: backend.access_count,
conversationId: backend.conversation_id ?? undefined,
};
}
/**
* Convert frontend MemoryEntry to backend MemoryEntryInput format
*/
export function toBackendMemoryInput(entry: Omit<MemoryEntry, 'id' | 'createdAt' | 'lastAccessedAt' | 'accessCount'>): MemoryEntryInput {
return {
agent_id: entry.agentId,
memory_type: entry.type,
content: entry.content,
importance: entry.importance,
source: entry.source,
tags: entry.tags,
conversation_id: entry.conversationId,
};
}
/**
* Convert frontend search options to backend format
*/
export function toBackendSearchOptions(options: MemorySearchOptions): BackendSearchOptions {
return {
agent_id: options.agentId,
memory_type: options.type,
tags: options.tags,
query: options.query,
limit: options.limit,
min_importance: options.minImportance,
};
}
/**
* Convert backend stats to frontend format
*/
export function toFrontendStats(backend: BackendMemoryStats): MemoryStats {
return {
totalEntries: backend.total_entries,
byType: backend.by_type,
byAgent: backend.by_agent,
oldestEntry: backend.oldest_entry,
newestEntry: backend.newest_entry,
storageSizeBytes: backend.storage_size_bytes ?? 0,
};
}
/**
* Parse tags from backend (JSON string or array)
*/
export function parseTags(tags: string | string[]): string[] {
if (Array.isArray(tags)) return tags;
if (!tags) return [];
try {
return JSON.parse(tags);
} catch (e) {
logger.debug('JSON parse failed for tags, using fallback', { error: e });
return [];
}
}

View File

@@ -0,0 +1,199 @@
/**
* Intelligence Layer - Type Definitions
*
* All frontend types, mesh types, persona evolver types,
* and re-exports from intelligence-backend.
*/
// === Re-export types from intelligence-backend ===
export type {
HeartbeatConfig,
HeartbeatResult,
HeartbeatAlert,
CompactableMessage,
CompactionResult,
CompactionCheck,
CompactionConfig,
PatternObservation,
ImprovementSuggestion,
ReflectionResult,
ReflectionState,
ReflectionConfig,
ReflectionIdentityProposal,
IdentityFiles,
IdentityChangeProposal,
IdentitySnapshot,
MemoryEntryForAnalysis,
} from '../intelligence-backend';
// === Frontend Types (for backward compatibility) ===
export type MemoryType = 'fact' | 'preference' | 'lesson' | 'context' | 'task';
export type MemorySource = 'auto' | 'user' | 'reflection' | 'llm-reflection';
export interface MemoryEntry {
id: string;
agentId: string;
content: string;
type: MemoryType;
importance: number;
source: MemorySource;
tags: string[];
createdAt: string;
lastAccessedAt: string;
accessCount: number;
conversationId?: string;
}
export interface MemorySearchOptions {
agentId?: string;
type?: MemoryType;
types?: MemoryType[];
tags?: string[];
query?: string;
limit?: number;
minImportance?: number;
}
export interface MemoryStats {
totalEntries: number;
byType: Record<string, number>;
byAgent: Record<string, number>;
oldestEntry: string | null;
newestEntry: string | null;
storageSizeBytes: number;
}
// === Mesh Types ===
export type PatternTypeVariant =
| { type: 'SkillCombination'; skill_ids: string[] }
| { type: 'TemporalTrigger'; hand_id: string; time_pattern: string }
| { type: 'TaskPipelineMapping'; task_type: string; pipeline_id: string }
| { type: 'InputPattern'; keywords: string[]; intent: string };
export interface BehaviorPattern {
id: string;
pattern_type: PatternTypeVariant;
frequency: number;
last_occurrence: string;
first_occurrence: string;
confidence: number;
context: PatternContext;
}
export function getPatternTypeString(patternType: PatternTypeVariant): string {
if (typeof patternType === 'string') {
return patternType;
}
return patternType.type;
}
export interface PatternContext {
skill_ids?: string[];
recent_topics?: string[];
intent?: string;
time_of_day?: number;
day_of_week?: number;
}
export interface WorkflowRecommendation {
id: string;
pipeline_id: string;
confidence: number;
reason: string;
suggested_inputs: Record<string, unknown>;
patterns_matched: string[];
timestamp: string;
}
export interface MeshConfig {
enabled: boolean;
min_confidence: number;
max_recommendations: number;
analysis_window_hours: number;
}
export interface MeshAnalysisResult {
recommendations: WorkflowRecommendation[];
patterns_detected: number;
timestamp: string;
}
export type ActivityType =
| { type: 'skill_used'; skill_ids: string[] }
| { type: 'pipeline_executed'; task_type: string; pipeline_id: string }
| { type: 'input_received'; keywords: string[]; intent: string };
// === Persona Evolver Types ===
export type EvolutionChangeType =
| 'instruction_addition'
| 'instruction_refinement'
| 'trait_addition'
| 'style_adjustment'
| 'domain_expansion';
export type InsightCategory =
| 'communication_style'
| 'technical_expertise'
| 'task_efficiency'
| 'user_preference'
| 'knowledge_gap';
export type IdentityFileType = 'soul' | 'instructions';
export type ProposalStatus = 'pending' | 'approved' | 'rejected';
export interface EvolutionProposal {
id: string;
agent_id: string;
target_file: IdentityFileType;
change_type: EvolutionChangeType;
reason: string;
current_content: string;
proposed_content: string;
confidence: number;
evidence: string[];
status: ProposalStatus;
created_at: string;
}
export interface ProfileUpdate {
section: string;
previous: string;
updated: string;
source: string;
}
export interface EvolutionInsight {
category: InsightCategory;
observation: string;
recommendation: string;
confidence: number;
}
export interface EvolutionResult {
agent_id: string;
timestamp: string;
profile_updates: ProfileUpdate[];
proposals: EvolutionProposal[];
insights: EvolutionInsight[];
evolved: boolean;
}
export interface PersonaEvolverConfig {
auto_profile_update: boolean;
min_preferences_for_update: number;
min_conversations_for_evolution: number;
enable_instruction_refinement: boolean;
enable_soul_evolution: boolean;
max_proposals_per_cycle: number;
}
export interface PersonaEvolverState {
last_evolution: string | null;
total_evolutions: number;
pending_proposals: number;
profile_enrichment_score: number;
}

View File

@@ -0,0 +1,561 @@
/**
* Intelligence Layer Unified Client
*
* Provides a unified API for intelligence operations that:
* - Uses Rust backend (via Tauri commands) when running in Tauri environment
* - Falls back to localStorage-based implementation in browser/dev environment
*
* Degradation strategy:
* - In Tauri mode: if a Tauri invoke fails, the error is logged and re-thrown.
* The caller is responsible for handling the error. We do NOT silently fall
* back to localStorage, because that would give users degraded functionality
* (localStorage instead of SQLite, rule-based instead of LLM-based, no-op
* instead of real execution) without any indication that something is wrong.
* - In browser/dev mode: localStorage fallback is the intended behavior for
* development and testing without a Tauri backend.
*
* This replaces direct usage of:
* - agent-memory.ts
* - heartbeat-engine.ts
* - context-compactor.ts
* - reflection-engine.ts
* - agent-identity.ts
*
* Usage:
* ```typescript
* import { intelligenceClient, toFrontendMemory, toBackendMemoryInput } from './intelligence-client';
*
* // Store memory
* const id = await intelligenceClient.memory.store({
* agent_id: 'agent-1',
* memory_type: 'fact',
* content: 'User prefers concise responses',
* importance: 7,
* });
*
* // Search memories
* const memories = await intelligenceClient.memory.search({
* agent_id: 'agent-1',
* query: 'user preference',
* limit: 10,
* });
*
* // Convert to frontend format if needed
* const frontendMemories = memories.map(toFrontendMemory);
* ```
*/
import { invoke } from '@tauri-apps/api/core';
import { isTauriRuntime } from '../tauri-gateway';
import { intelligence } from './type-conversions';
import type { PersistentMemory } from '../intelligence-backend';
import type {
HeartbeatConfig,
HeartbeatResult,
CompactableMessage,
CompactionResult,
CompactionCheck,
CompactionConfig,
ReflectionConfig,
ReflectionResult,
ReflectionState,
MemoryEntryForAnalysis,
IdentityFiles,
IdentityChangeProposal,
IdentitySnapshot,
} from '../intelligence-backend';
import type { MemoryEntry, MemorySearchOptions, MemoryStats } from './types';
import { toFrontendMemory, toBackendSearchOptions, toFrontendStats } from './type-conversions';
import { fallbackMemory } from './fallback-memory';
import { fallbackCompactor } from './fallback-compactor';
import { fallbackReflection } from './fallback-reflection';
import { fallbackIdentity } from './fallback-identity';
import { fallbackHeartbeat } from './fallback-heartbeat';
/**
* Helper: wrap a Tauri invoke call so that failures are logged and re-thrown
* instead of silently falling back to localStorage implementations.
*/
function tauriInvoke<T>(label: string, fn: () => Promise<T>): Promise<T> {
return fn().catch((e: unknown) => {
console.warn(`[IntelligenceClient] Tauri invoke failed (${label}):`, e);
throw e;
});
}
/**
* Unified intelligence client that automatically selects backend or fallback.
*
* - In Tauri mode: calls Rust backend via invoke(). On failure, logs a warning
* and re-throws -- does NOT fall back to localStorage.
* - In browser/dev mode: uses localStorage-based fallback implementations.
*/
export const intelligenceClient = {
memory: {
init: async (): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('memory.init', () => intelligence.memory.init());
} else {
await fallbackMemory.init();
}
},
store: async (entry: import('../intelligence-backend').MemoryEntryInput): Promise<string> => {
if (isTauriRuntime()) {
return tauriInvoke('memory.store', () => intelligence.memory.store(entry));
}
return fallbackMemory.store(entry);
},
get: async (id: string): Promise<MemoryEntry | null> => {
if (isTauriRuntime()) {
const result = await tauriInvoke('memory.get', () => intelligence.memory.get(id));
return result ? toFrontendMemory(result) : null;
}
return fallbackMemory.get(id);
},
search: async (options: MemorySearchOptions): Promise<MemoryEntry[]> => {
if (isTauriRuntime()) {
const results = await tauriInvoke('memory.search', () =>
intelligence.memory.search(toBackendSearchOptions(options))
);
return results.map(toFrontendMemory);
}
return fallbackMemory.search(options);
},
delete: async (id: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('memory.delete', () => intelligence.memory.delete(id));
} else {
await fallbackMemory.delete(id);
}
},
deleteAll: async (agentId: string): Promise<number> => {
if (isTauriRuntime()) {
return tauriInvoke('memory.deleteAll', () => intelligence.memory.deleteAll(agentId));
}
return fallbackMemory.deleteAll(agentId);
},
stats: async (): Promise<MemoryStats> => {
if (isTauriRuntime()) {
const stats = await tauriInvoke('memory.stats', () => intelligence.memory.stats());
return toFrontendStats(stats);
}
return fallbackMemory.stats();
},
export: async (): Promise<MemoryEntry[]> => {
if (isTauriRuntime()) {
const results = await tauriInvoke('memory.export', () => intelligence.memory.export());
return results.map(toFrontendMemory);
}
return fallbackMemory.export();
},
import: async (memories: MemoryEntry[]): Promise<number> => {
if (isTauriRuntime()) {
const backendMemories = memories.map(m => ({
...m,
agent_id: m.agentId,
memory_type: m.type,
last_accessed_at: m.lastAccessedAt,
created_at: m.createdAt,
access_count: m.accessCount,
conversation_id: m.conversationId ?? null,
tags: JSON.stringify(m.tags),
embedding: null,
}));
return tauriInvoke('memory.import', () =>
intelligence.memory.import(backendMemories as PersistentMemory[])
);
}
return fallbackMemory.import(memories);
},
dbPath: async (): Promise<string> => {
if (isTauriRuntime()) {
return tauriInvoke('memory.dbPath', () => intelligence.memory.dbPath());
}
return fallbackMemory.dbPath();
},
buildContext: async (
agentId: string,
query: string,
maxTokens?: number,
): Promise<{ systemPromptAddition: string; totalTokens: number; memoriesUsed: number }> => {
if (isTauriRuntime()) {
return tauriInvoke('memory.buildContext', () =>
intelligence.memory.buildContext(agentId, query, maxTokens ?? null)
);
}
// Browser/dev fallback: use basic search
const memories = await fallbackMemory.search({
agentId,
query,
limit: 8,
minImportance: 3,
});
const addition = memories.length > 0
? `## 相关记忆\n${memories.map(m => `- [${m.type}] ${m.content}`).join('\n')}`
: '';
return { systemPromptAddition: addition, totalTokens: 0, memoriesUsed: memories.length };
},
},
heartbeat: {
init: async (agentId: string, config?: HeartbeatConfig): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.init', () => intelligence.heartbeat.init(agentId, config));
} else {
await fallbackHeartbeat.init(agentId, config);
}
},
start: async (agentId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.start', () => intelligence.heartbeat.start(agentId));
} else {
await fallbackHeartbeat.start(agentId);
}
},
stop: async (agentId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.stop', () => intelligence.heartbeat.stop(agentId));
} else {
await fallbackHeartbeat.stop(agentId);
}
},
tick: async (agentId: string): Promise<HeartbeatResult> => {
if (isTauriRuntime()) {
return tauriInvoke('heartbeat.tick', () => intelligence.heartbeat.tick(agentId));
}
return fallbackHeartbeat.tick(agentId);
},
getConfig: async (agentId: string): Promise<HeartbeatConfig> => {
if (isTauriRuntime()) {
return tauriInvoke('heartbeat.getConfig', () => intelligence.heartbeat.getConfig(agentId));
}
return fallbackHeartbeat.getConfig(agentId);
},
updateConfig: async (agentId: string, config: HeartbeatConfig): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.updateConfig', () =>
intelligence.heartbeat.updateConfig(agentId, config)
);
} else {
await fallbackHeartbeat.updateConfig(agentId, config);
}
},
getHistory: async (agentId: string, limit?: number): Promise<HeartbeatResult[]> => {
if (isTauriRuntime()) {
return tauriInvoke('heartbeat.getHistory', () =>
intelligence.heartbeat.getHistory(agentId, limit)
);
}
return fallbackHeartbeat.getHistory(agentId, limit);
},
updateMemoryStats: async (
agentId: string,
taskCount: number,
totalEntries: number,
storageSizeBytes: number
): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.updateMemoryStats', () =>
invoke('heartbeat_update_memory_stats', {
agent_id: agentId,
task_count: taskCount,
total_entries: totalEntries,
storage_size_bytes: storageSizeBytes,
})
);
} else {
// Browser/dev fallback only
const cache = {
taskCount,
totalEntries,
storageSizeBytes,
lastUpdated: new Date().toISOString(),
};
localStorage.setItem(`zclaw-memory-stats-${agentId}`, JSON.stringify(cache));
}
},
recordCorrection: async (agentId: string, correctionType: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.recordCorrection', () =>
invoke('heartbeat_record_correction', {
agent_id: agentId,
correction_type: correctionType,
})
);
} else {
// Browser/dev fallback only
const key = `zclaw-corrections-${agentId}`;
const stored = localStorage.getItem(key);
const counters = stored ? JSON.parse(stored) : {};
counters[correctionType] = (counters[correctionType] || 0) + 1;
localStorage.setItem(key, JSON.stringify(counters));
}
},
recordInteraction: async (agentId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.recordInteraction', () =>
invoke('heartbeat_record_interaction', {
agent_id: agentId,
})
);
} else {
// Browser/dev fallback only
localStorage.setItem(`zclaw-last-interaction-${agentId}`, new Date().toISOString());
}
},
},
compactor: {
estimateTokens: async (text: string): Promise<number> => {
if (isTauriRuntime()) {
return tauriInvoke('compactor.estimateTokens', () =>
intelligence.compactor.estimateTokens(text)
);
}
return fallbackCompactor.estimateTokens(text);
},
estimateMessagesTokens: async (messages: CompactableMessage[]): Promise<number> => {
if (isTauriRuntime()) {
return tauriInvoke('compactor.estimateMessagesTokens', () =>
intelligence.compactor.estimateMessagesTokens(messages)
);
}
return fallbackCompactor.estimateMessagesTokens(messages);
},
checkThreshold: async (
messages: CompactableMessage[],
config?: CompactionConfig
): Promise<CompactionCheck> => {
if (isTauriRuntime()) {
return tauriInvoke('compactor.checkThreshold', () =>
intelligence.compactor.checkThreshold(messages, config)
);
}
return fallbackCompactor.checkThreshold(messages, config);
},
compact: async (
messages: CompactableMessage[],
agentId: string,
conversationId?: string,
config?: CompactionConfig
): Promise<CompactionResult> => {
if (isTauriRuntime()) {
return tauriInvoke('compactor.compact', () =>
intelligence.compactor.compact(messages, agentId, conversationId, config)
);
}
return fallbackCompactor.compact(messages, agentId, conversationId, config);
},
},
reflection: {
init: async (config?: ReflectionConfig): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('reflection.init', () => intelligence.reflection.init(config));
} else {
await fallbackReflection.init(config);
}
},
recordConversation: async (): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('reflection.recordConversation', () =>
intelligence.reflection.recordConversation()
);
} else {
await fallbackReflection.recordConversation();
}
},
shouldReflect: async (): Promise<boolean> => {
if (isTauriRuntime()) {
return tauriInvoke('reflection.shouldReflect', () =>
intelligence.reflection.shouldReflect()
);
}
return fallbackReflection.shouldReflect();
},
reflect: async (agentId: string, memories: MemoryEntryForAnalysis[]): Promise<ReflectionResult> => {
if (isTauriRuntime()) {
return tauriInvoke('reflection.reflect', () =>
intelligence.reflection.reflect(agentId, memories)
);
}
return fallbackReflection.reflect(agentId, memories);
},
getHistory: async (limit?: number, agentId?: string): Promise<ReflectionResult[]> => {
if (isTauriRuntime()) {
return tauriInvoke('reflection.getHistory', () =>
intelligence.reflection.getHistory(limit, agentId)
);
}
return fallbackReflection.getHistory(limit, agentId);
},
getState: async (): Promise<ReflectionState> => {
if (isTauriRuntime()) {
return tauriInvoke('reflection.getState', () => intelligence.reflection.getState());
}
return fallbackReflection.getState();
},
},
identity: {
get: async (agentId: string): Promise<IdentityFiles> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.get', () => intelligence.identity.get(agentId));
}
return fallbackIdentity.get(agentId);
},
getFile: async (agentId: string, file: string): Promise<string> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.getFile', () => intelligence.identity.getFile(agentId, file));
}
return fallbackIdentity.getFile(agentId, file);
},
buildPrompt: async (agentId: string, memoryContext?: string): Promise<string> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.buildPrompt', () =>
intelligence.identity.buildPrompt(agentId, memoryContext)
);
}
return fallbackIdentity.buildPrompt(agentId, memoryContext);
},
updateUserProfile: async (agentId: string, content: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.updateUserProfile', () =>
intelligence.identity.updateUserProfile(agentId, content)
);
} else {
await fallbackIdentity.updateUserProfile(agentId, content);
}
},
appendUserProfile: async (agentId: string, addition: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.appendUserProfile', () =>
intelligence.identity.appendUserProfile(agentId, addition)
);
} else {
await fallbackIdentity.appendUserProfile(agentId, addition);
}
},
proposeChange: async (
agentId: string,
file: 'soul' | 'instructions',
suggestedContent: string,
reason: string
): Promise<IdentityChangeProposal> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.proposeChange', () =>
intelligence.identity.proposeChange(agentId, file, suggestedContent, reason)
);
}
return fallbackIdentity.proposeChange(agentId, file, suggestedContent, reason);
},
approveProposal: async (proposalId: string): Promise<IdentityFiles> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.approveProposal', () =>
intelligence.identity.approveProposal(proposalId)
);
}
return fallbackIdentity.approveProposal(proposalId);
},
rejectProposal: async (proposalId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.rejectProposal', () =>
intelligence.identity.rejectProposal(proposalId)
);
} else {
await fallbackIdentity.rejectProposal(proposalId);
}
},
getPendingProposals: async (agentId?: string): Promise<IdentityChangeProposal[]> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.getPendingProposals', () =>
intelligence.identity.getPendingProposals(agentId)
);
}
return fallbackIdentity.getPendingProposals(agentId);
},
updateFile: async (agentId: string, file: string, content: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.updateFile', () =>
intelligence.identity.updateFile(agentId, file, content)
);
} else {
await fallbackIdentity.updateFile(agentId, file, content);
}
},
getSnapshots: async (agentId: string, limit?: number): Promise<IdentitySnapshot[]> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.getSnapshots', () =>
intelligence.identity.getSnapshots(agentId, limit)
);
}
return fallbackIdentity.getSnapshots(agentId, limit);
},
restoreSnapshot: async (agentId: string, snapshotId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.restoreSnapshot', () =>
intelligence.identity.restoreSnapshot(agentId, snapshotId)
);
} else {
await fallbackIdentity.restoreSnapshot(agentId, snapshotId);
}
},
listAgents: async (): Promise<string[]> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.listAgents', () => intelligence.identity.listAgents());
}
return fallbackIdentity.listAgents();
},
deleteAgent: async (agentId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.deleteAgent', () => intelligence.identity.deleteAgent(agentId));
} else {
await fallbackIdentity.deleteAgent(agentId);
}
},
},
};
export default intelligenceClient;

View File

@@ -56,6 +56,9 @@ export function installChatMethods(ClientClass: { prototype: KernelClient }): vo
opts?: { opts?: {
sessionKey?: string; sessionKey?: string;
agentId?: string; agentId?: string;
thinking_enabled?: boolean;
reasoning_effort?: string;
plan_mode?: boolean;
} }
): Promise<{ runId: string }> { ): Promise<{ runId: string }> {
const runId = crypto.randomUUID(); const runId = crypto.randomUUID();
@@ -68,6 +71,20 @@ export function installChatMethods(ClientClass: { prototype: KernelClient }): vo
} }
let unlisten: UnlistenFn | null = null; let unlisten: UnlistenFn | null = null;
let completed = false;
// Stream timeout — prevent hanging forever if backend never sends complete/error
const STREAM_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes
const timeoutId = setTimeout(() => {
if (!completed) {
completed = true;
log.warn('Stream timeout — no complete/error event received');
callbacks.onError('响应超时,请重试');
if (unlisten) {
unlisten();
unlisten = null;
}
}
}, STREAM_TIMEOUT_MS);
try { try {
// Set up event listener for stream chunks // Set up event listener for stream chunks
@@ -129,6 +146,8 @@ export function installChatMethods(ClientClass: { prototype: KernelClient }): vo
case 'complete': case 'complete':
log.debug('Stream complete:', streamEvent.inputTokens, streamEvent.outputTokens); log.debug('Stream complete:', streamEvent.inputTokens, streamEvent.outputTokens);
completed = true;
clearTimeout(timeoutId);
callbacks.onComplete(streamEvent.inputTokens, streamEvent.outputTokens); callbacks.onComplete(streamEvent.inputTokens, streamEvent.outputTokens);
// Clean up listener // Clean up listener
if (unlisten) { if (unlisten) {
@@ -139,6 +158,8 @@ export function installChatMethods(ClientClass: { prototype: KernelClient }): vo
case 'error': case 'error':
log.error('Stream error:', streamEvent.message); log.error('Stream error:', streamEvent.message);
completed = true;
clearTimeout(timeoutId);
callbacks.onError(streamEvent.message); callbacks.onError(streamEvent.message);
// Clean up listener // Clean up listener
if (unlisten) { if (unlisten) {
@@ -155,6 +176,9 @@ export function installChatMethods(ClientClass: { prototype: KernelClient }): vo
agentId, agentId,
sessionId, sessionId,
message, message,
thinkingEnabled: opts?.thinking_enabled,
reasoningEffort: opts?.reasoning_effort,
planMode: opts?.plan_mode,
}, },
}); });
} catch (err: unknown) { } catch (err: unknown) {

View File

@@ -403,7 +403,7 @@ export interface KernelClient {
// Chat (kernel-chat.ts) // Chat (kernel-chat.ts)
chat(message: string, opts?: { sessionKey?: string; agentId?: string }): Promise<{ runId: string; sessionId?: string; response?: string }>; chat(message: string, opts?: { sessionKey?: string; agentId?: string }): Promise<{ runId: string; sessionId?: string; response?: string }>;
chatStream(message: string, callbacks: import('./kernel-types').StreamCallbacks, opts?: { sessionKey?: string; agentId?: string }): Promise<{ runId: string }>; chatStream(message: string, callbacks: import('./kernel-types').StreamCallbacks, opts?: { sessionKey?: string; agentId?: string; thinking_enabled?: boolean; reasoning_effort?: string; plan_mode?: boolean }): Promise<{ runId: string }>;
cancelStream(runId: string): void; cancelStream(runId: string): void;
fetchDefaultAgentId(): Promise<string | null>; fetchDefaultAgentId(): Promise<string | null>;
setDefaultAgentId(agentId: string): void; setDefaultAgentId(agentId: string): void;

View File

@@ -10,6 +10,9 @@ import { useConnectionStore } from './connectionStore';
import { createLogger } from '../lib/logger'; import { createLogger } from '../lib/logger';
import { speechSynth } from '../lib/speech-synth'; import { speechSynth } from '../lib/speech-synth';
import { generateRandomString } from '../lib/crypto-utils'; import { generateRandomString } from '../lib/crypto-utils';
import type { ChatModeType, ChatModeConfig, Subtask } from '../components/ai';
import type { ToolCallStep } from '../components/ai';
import { CHAT_MODES } from '../components/ai';
const log = createLogger('ChatStore'); const log = createLogger('ChatStore');
@@ -49,6 +52,12 @@ export interface Message {
// Output files and code blocks // Output files and code blocks
files?: MessageFile[]; files?: MessageFile[];
codeBlocks?: CodeBlock[]; codeBlocks?: CodeBlock[];
// AI Enhancement fields (DeerFlow-inspired)
thinkingContent?: string; // Extended thinking/reasoning content
subtasks?: Subtask[]; // Sub-agent task tracking
toolSteps?: ToolCallStep[]; // Tool call steps chain (DeerFlow-inspired)
// Optimistic message flag (Phase 4: DeerFlow-inspired 3-phase optimistic rendering)
optimistic?: boolean; // true = awaiting server confirmation, false/undefined = confirmed
} }
export interface Conversation { export interface Conversation {
@@ -90,6 +99,14 @@ interface ChatState {
// Token usage tracking // Token usage tracking
totalInputTokens: number; totalInputTokens: number;
totalOutputTokens: number; totalOutputTokens: number;
// Chat mode (DeerFlow-inspired)
chatMode: ChatModeType;
// Follow-up suggestions
suggestions: string[];
// Artifacts (DeerFlow-inspired)
artifacts: import('../components/ai/ArtifactPanel').ArtifactFile[];
selectedArtifactId: string | null;
artifactPanelOpen: boolean;
addMessage: (message: Message) => void; addMessage: (message: Message) => void;
updateMessage: (id: string, updates: Partial<Message>) => void; updateMessage: (id: string, updates: Partial<Message>) => void;
@@ -105,6 +122,17 @@ interface ChatState {
addTokenUsage: (inputTokens: number, outputTokens: number) => void; addTokenUsage: (inputTokens: number, outputTokens: number) => void;
getTotalTokens: () => { input: number; output: number; total: number }; getTotalTokens: () => { input: number; output: number; total: number };
searchSkills: (query: string) => { results: Array<{ id: string; name: string; description: string }>; totalAvailable: number }; searchSkills: (query: string) => { results: Array<{ id: string; name: string; description: string }>; totalAvailable: number };
// Chat mode and suggestions (DeerFlow-inspired)
setChatMode: (mode: ChatModeType) => void;
getChatModeConfig: () => ChatModeConfig;
setSuggestions: (suggestions: string[]) => void;
addSubtask: (messageId: string, task: Subtask) => void;
updateSubtask: (messageId: string, taskId: string, updates: Partial<Subtask>) => void;
// Artifact management (DeerFlow-inspired)
addArtifact: (artifact: import('../components/ai/ArtifactPanel').ArtifactFile) => void;
selectArtifact: (id: string | null) => void;
setArtifactPanelOpen: (open: boolean) => void;
clearArtifacts: () => void;
} }
function generateConvId(): string { function generateConvId(): string {
@@ -189,6 +217,44 @@ function upsertActiveConversation(
return [nextConversation, ...conversations]; return [nextConversation, ...conversations];
} }
/**
* Generate follow-up suggestions based on assistant response content.
* Uses keyword heuristics to suggest contextually relevant follow-ups.
*/
function generateFollowUpSuggestions(content: string): string[] {
const suggestions: string[] = [];
const lower = content.toLowerCase();
const patterns: Array<{ keywords: string[]; suggestion: string }> = [
{ keywords: ['代码', 'code', 'function', '函数', '实现'], suggestion: '解释这段代码的工作原理' },
{ keywords: ['错误', 'error', 'bug', '问题'], suggestion: '如何调试这个问题?' },
{ keywords: ['数据', 'data', '分析', '统计'], suggestion: '可视化这些数据' },
{ keywords: ['步骤', 'step', '流程', '方案'], suggestion: '详细说明第一步该怎么做' },
{ keywords: ['可以', '建议', '推荐', '试试'], suggestion: '还有其他方案吗?' },
{ keywords: ['文件', 'file', '保存', '写入'], suggestion: '查看生成的文件内容' },
{ keywords: ['搜索', 'search', '查找', 'research'], suggestion: '搜索更多相关信息' },
];
for (const { keywords, suggestion } of patterns) {
if (keywords.some(kw => lower.includes(kw))) {
if (!suggestions.includes(suggestion)) {
suggestions.push(suggestion);
}
}
if (suggestions.length >= 3) break;
}
// Always add a generic follow-up if we have fewer than 3
const generic = ['继续深入分析', '换个角度看看', '用简单的话解释'];
while (suggestions.length < 3) {
const next = generic.find(g => !suggestions.includes(g));
if (next) suggestions.push(next);
else break;
}
return suggestions;
}
export const useChatStore = create<ChatState>()( export const useChatStore = create<ChatState>()(
persist( persist(
(set, get) => ({ (set, get) => ({
@@ -203,6 +269,11 @@ export const useChatStore = create<ChatState>()(
sessionKey: null, sessionKey: null,
totalInputTokens: 0, totalInputTokens: 0,
totalOutputTokens: 0, totalOutputTokens: 0,
chatMode: 'thinking' as ChatModeType,
suggestions: [],
artifacts: [],
selectedArtifactId: null,
artifactPanelOpen: false,
addMessage: (message: Message) => addMessage: (message: Message) =>
set((state) => ({ messages: [...state.messages, message] })), set((state) => ({ messages: [...state.messages, message] })),
@@ -331,6 +402,8 @@ export const useChatStore = create<ChatState>()(
sendMessage: async (content: string) => { sendMessage: async (content: string) => {
const { addMessage, currentAgent, sessionKey } = get(); const { addMessage, currentAgent, sessionKey } = get();
// Clear stale suggestions when user sends a new message
set({ suggestions: [] });
const effectiveSessionKey = sessionKey || crypto.randomUUID(); const effectiveSessionKey = sessionKey || crypto.randomUUID();
const effectiveAgentId = resolveGatewayAgentId(currentAgent); const effectiveAgentId = resolveGatewayAgentId(currentAgent);
const agentId = currentAgent?.id || 'zclaw-main'; const agentId = currentAgent?.id || 'zclaw-main';
@@ -386,11 +459,14 @@ export const useChatStore = create<ChatState>()(
} }
// Add user message (original content for display) // Add user message (original content for display)
// Mark as optimistic -- will be cleared when server confirms via onComplete
const streamStartTime = Date.now();
const userMsg: Message = { const userMsg: Message = {
id: `user_${Date.now()}`, id: `user_${streamStartTime}`,
role: 'user', role: 'user',
content, content,
timestamp: new Date(), timestamp: new Date(streamStartTime),
optimistic: true,
}; };
addMessage(userMsg); addMessage(userMsg);
@@ -421,6 +497,11 @@ export const useChatStore = create<ChatState>()(
// Declare runId before chatStream so callbacks can access it // Declare runId before chatStream so callbacks can access it
let runId = `run_${Date.now()}`; let runId = `run_${Date.now()}`;
// F5: Persist sessionKey before starting stream to survive page reload mid-stream
if (!get().sessionKey) {
set({ sessionKey: effectiveSessionKey });
}
// Try streaming first (ZCLAW WebSocket) // Try streaming first (ZCLAW WebSocket)
const result = await client.chatStream( const result = await client.chatStream(
enhancedContent, enhancedContent,
@@ -436,17 +517,22 @@ export const useChatStore = create<ChatState>()(
})); }));
}, },
onTool: (tool: string, input: string, output: string) => { onTool: (tool: string, input: string, output: string) => {
const toolMsg: Message = { const step: ToolCallStep = {
id: `tool_${Date.now()}_${generateRandomString(4)}`, id: `step_${Date.now()}_${generateRandomString(4)}`,
role: 'tool',
content: output || input,
timestamp: new Date(),
runId,
toolName: tool, toolName: tool,
toolInput: input, input,
toolOutput: output, output,
status: output ? 'completed' : 'running',
timestamp: new Date(),
}; };
set((state) => ({ messages: [...state.messages, toolMsg] })); // Add step to the streaming assistant message's toolSteps
set((s) => ({
messages: s.messages.map((m) =>
m.id === assistantId
? { ...m, toolSteps: [...(m.toolSteps || []), step] }
: m
),
}));
}, },
onHand: (name: string, status: string, result?: unknown) => { onHand: (name: string, status: string, result?: unknown) => {
const handMsg: Message = { const handMsg: Message = {
@@ -492,9 +578,16 @@ export const useChatStore = create<ChatState>()(
isStreaming: false, isStreaming: false,
conversations, conversations,
currentConversationId: currentConvId, currentConversationId: currentConvId,
messages: state.messages.map((m) => messages: state.messages.map((m) => {
m.id === assistantId ? { ...m, streaming: false, runId } : m if (m.id === assistantId) {
), return { ...m, streaming: false, runId };
}
// Clear optimistic flag on user messages (server confirmed)
if (m.optimistic) {
return { ...m, optimistic: false };
}
return m;
}),
}); });
// Track token usage if provided (KernelClient provides these) // Track token usage if provided (KernelClient provides these)
@@ -520,6 +613,16 @@ export const useChatStore = create<ChatState>()(
}); });
} }
}); });
// Generate follow-up suggestions (DeerFlow-inspired)
const assistantMsg = get().messages.find(m => m.id === assistantId);
if (assistantMsg?.content) {
const content = assistantMsg.content;
const suggestions = generateFollowUpSuggestions(content);
if (suggestions.length > 0) {
get().setSuggestions(suggestions);
}
}
}, },
onError: (error: string) => { onError: (error: string) => {
set((state) => ({ set((state) => ({
@@ -527,7 +630,9 @@ export const useChatStore = create<ChatState>()(
messages: state.messages.map((m) => messages: state.messages.map((m) =>
m.id === assistantId m.id === assistantId
? { ...m, content: `⚠️ ${error}`, streaming: false, error } ? { ...m, content: `⚠️ ${error}`, streaming: false, error }
: m : m.role === 'user' && m.optimistic && m.timestamp.getTime() >= streamStartTime
? { ...m, optimistic: false }
: m
), ),
})); }));
}, },
@@ -535,6 +640,9 @@ export const useChatStore = create<ChatState>()(
{ {
sessionKey: effectiveSessionKey, sessionKey: effectiveSessionKey,
agentId: effectiveAgentId, agentId: effectiveAgentId,
thinking_enabled: get().getChatModeConfig().thinking_enabled,
reasoning_effort: get().getChatModeConfig().reasoning_effort,
plan_mode: get().getChatModeConfig().plan_mode,
} }
); );
@@ -566,7 +674,9 @@ export const useChatStore = create<ChatState>()(
streaming: false, streaming: false,
error: errorMessage, error: errorMessage,
} }
: m : m.role === 'user' && m.optimistic && m.timestamp.getTime() >= streamStartTime
? { ...m, optimistic: false }
: m
), ),
})); }));
} }
@@ -592,6 +702,50 @@ export const useChatStore = create<ChatState>()(
}; };
}, },
// Chat mode (DeerFlow-inspired)
setChatMode: (mode: ChatModeType) => set({ chatMode: mode }),
getChatModeConfig: () => CHAT_MODES[get().chatMode].config,
setSuggestions: (suggestions: string[]) => set({ suggestions }),
addSubtask: (messageId: string, task: Subtask) =>
set((state) => ({
messages: state.messages.map((m) =>
m.id === messageId
? { ...m, subtasks: [...(m.subtasks || []), task] }
: m
),
})),
updateSubtask: (messageId: string, taskId: string, updates: Partial<Subtask>) =>
set((state) => ({
messages: state.messages.map((m) =>
m.id === messageId
? {
...m,
subtasks: (m.subtasks || []).map((t) =>
t.id === taskId ? { ...t, ...updates } : t
),
}
: m
),
})),
// Artifact management (DeerFlow-inspired)
addArtifact: (artifact) =>
set((state) => ({
artifacts: [...state.artifacts, artifact],
selectedArtifactId: artifact.id,
artifactPanelOpen: true,
})),
selectArtifact: (id) => set({ selectedArtifactId: id }),
setArtifactPanelOpen: (open) => set({ artifactPanelOpen: open }),
clearArtifacts: () => set({ artifacts: [], selectedArtifactId: null, artifactPanelOpen: false }),
initStreamListener: () => { initStreamListener: () => {
const client = getClient(); const client = getClient();
@@ -629,31 +783,51 @@ export const useChatStore = create<ChatState>()(
), ),
})); }));
} else if (delta.stream === 'tool') { } else if (delta.stream === 'tool') {
const toolMsg: Message = { // Add tool step to the streaming assistant message (DeerFlow-inspired steps chain)
id: `tool_${Date.now()}_${generateRandomString(4)}`, const step: ToolCallStep = {
role: 'tool', id: `step_${Date.now()}_${generateRandomString(4)}`,
content: delta.toolOutput || '', toolName: delta.tool || 'unknown',
input: delta.toolInput,
output: delta.toolOutput,
status: delta.toolOutput ? 'completed' : 'running',
timestamp: new Date(), timestamp: new Date(),
runId: delta.runId,
toolName: delta.tool,
toolInput: delta.toolInput,
toolOutput: delta.toolOutput,
}; };
set((s) => ({ messages: [...s.messages, toolMsg] })); set((s) => ({
messages: s.messages.map((m) =>
m.id === streamingMsg.id
? { ...m, toolSteps: [...(m.toolSteps || []), step] }
: m
),
}));
} else if (delta.stream === 'lifecycle') { } else if (delta.stream === 'lifecycle') {
if (delta.phase === 'end' || delta.phase === 'error') { if (delta.phase === 'end' || delta.phase === 'error') {
set((s) => ({ set((s) => ({
isStreaming: false, isStreaming: false,
messages: s.messages.map((m) => messages: s.messages.map((m) => {
m.id === streamingMsg.id if (m.id === streamingMsg.id) {
? { return {
...m, ...m,
streaming: false, streaming: false,
error: delta.phase === 'error' ? delta.error : undefined, error: delta.phase === 'error' ? delta.error : undefined,
} };
: m }
), // Clear optimistic flag on user messages (server confirmed)
if (m.optimistic) {
return { ...m, optimistic: false };
}
return m;
}),
})); }));
// Generate follow-up suggestions on stream end
if (delta.phase === 'end') {
const completedMsg = get().messages.find(m => m.id === streamingMsg.id);
if (completedMsg?.content) {
const suggestions = generateFollowUpSuggestions(completedMsg.content);
if (suggestions.length > 0) {
get().setSuggestions(suggestions);
}
}
}
} }
} else if (delta.stream === 'hand') { } else if (delta.stream === 'hand') {
// Handle Hand trigger events from ZCLAW // Handle Hand trigger events from ZCLAW
@@ -699,6 +873,7 @@ export const useChatStore = create<ChatState>()(
currentModel: state.currentModel, currentModel: state.currentModel,
currentAgentId: state.currentAgent?.id, currentAgentId: state.currentAgent?.id,
currentConversationId: state.currentConversationId, currentConversationId: state.currentConversationId,
chatMode: state.chatMode,
}), }),
onRehydrateStorage: () => (state) => { onRehydrateStorage: () => (state) => {
// Rehydrate Date objects from JSON strings // Rehydrate Date objects from JSON strings
@@ -709,6 +884,7 @@ export const useChatStore = create<ChatState>()(
for (const msg of conv.messages) { for (const msg of conv.messages) {
msg.timestamp = new Date(msg.timestamp); msg.timestamp = new Date(msg.timestamp);
msg.streaming = false; // Never restore streaming state msg.streaming = false; // Never restore streaming state
msg.optimistic = false; // Never restore optimistic flag (server already confirmed)
} }
} }
} }

View File

@@ -3,7 +3,7 @@
> **分类**: 核心功能 > **分类**: 核心功能
> **优先级**: P0 - 决定性 > **优先级**: P0 - 决定性
> **成熟度**: L4 - 生产 > **成熟度**: L4 - 生产
> **最后更新**: 2026-03-25 > **最后更新**: 2026-04-01
> **验证状态**: ✅ 代码已验证 > **验证状态**: ✅ 代码已验证
--- ---
@@ -42,12 +42,20 @@
| 文件 | 路径 | 用途 | | 文件 | 路径 | 用途 |
|------|------|------| |------|------|------|
| 主组件 | `desktop/src/components/ChatArea.tsx` | 聊天 UI | | 主组件 | `desktop/src/components/ChatArea.tsx` | 聊天 UI (DeerFlow 风格) |
| 状态管理 | `desktop/src/store/chatStore.ts` | 消息和会话状态 | | 状态管理 | `desktop/src/store/chatStore.ts` | 消息和会话状态 |
| 消息渲染 | `desktop/src/components/MessageItem.tsx` | 单条消息 | | 模式选择 | `desktop/src/components/ai/ChatMode.tsx` | 下拉式模式切换 (闪速/思考/Pro/Ultra) |
| Markdown | `desktop/src/components/MarkdownRenderer.tsx` | 轻量 Markdown 渲染 | | 流式渲染 | `desktop/src/components/ai/StreamingText.tsx` | 打字机效果文本渲染 |
| 推理块 | `desktop/src/components/ai/ReasoningBlock.tsx` | 思考过程折叠展示 |
| 工具链 | `desktop/src/components/ai/ToolCallChain.tsx` | 工具调用步骤链 |
| 任务进度 | `desktop/src/components/ai/TaskProgress.tsx` | 子任务追踪 |
| 建议芯片 | `desktop/src/components/ai/SuggestionChips.tsx` | 快捷建议 |
| 模型选择 | `desktop/src/components/ai/ModelSelector.tsx` | 模型下拉选择 |
| 对话容器 | `desktop/src/components/ai/Conversation.tsx` | 消息滚动容器 |
| 全局样式 | `desktop/src/index.css` | 暖灰色系 + DeerFlow CSS |
| Tauri 网关 | `desktop/src/lib/tauri-gateway.ts` | Tauri 原生命令 | | Tauri 网关 | `desktop/src/lib/tauri-gateway.ts` | Tauri 原生命令 |
| 内核客户端 | `desktop/src/lib/kernel-client.ts` | Kernel 通信 | | 内核客户端 | `desktop/src/lib/kernel-client.ts` | Kernel 通信 |
| Gateway 客户端 | `desktop/src/lib/gateway-client.ts` | WebSocket/REST 通信 |
--- ---
@@ -79,6 +87,7 @@
| 项目 | 参考点 | | 项目 | 参考点 |
|------|--------| |------|--------|
| DeerFlow | 卡片式输入框、下拉模式选择器、彩色快捷芯片、极简顶栏、暖灰色系 |
| ChatGPT | 流式响应、Markdown 渲染 | | ChatGPT | 流式响应、Markdown 渲染 |
| Claude | 代码块复制、消息操作 | | Claude | 代码块复制、消息操作 |
| ZCLAW | 历史消息管理 | | ZCLAW | 历史消息管理 |
@@ -254,6 +263,20 @@ case 'done':
- [x] 流式中断控制 (AbortController) - [x] 流式中断控制 (AbortController)
- [x] Agent 切换 - [x] Agent 切换
- [x] 工具调用展示 (tool, hand, workflow 消息类型) - [x] 工具调用展示 (tool, hand, workflow 消息类型)
- [x] DeerFlow 视觉风格复刻 (2026-04-01)
- 卡片式输入框白色圆角卡片textarea 上部 + 操作栏底部
- 下拉式模式选择器闪速/思考/Pro/Ultra带图标+描述+勾选
- 彩色快捷操作芯片小惊喜/写作/研究/收集/学习
- 极简顶栏对话标题 + token 计数 + 导出按钮
- 暖灰色系全局样式#faf9f6 bg, #f5f4f1 sidebar, #e8e6e1 border
- DeerFlow 风格侧边栏Logo + 新对话/对话/智能体 导航
- 对话列表增强hover 操作栏内联重命名Markdown 导出
- 虚拟化消息列表100+ 条消息自动启用 react-window
- Artifact 右侧面板可拖拽分割480px
- 流式 thinking 指示器Thinking... 动画
- 推理过程折叠展示 (ReasoningBlock)
- 工具调用链可视化 (ToolCallChain)
- 子任务进度追踪 (TaskProgress)
### 5.2 测试覆盖 ### 5.2 测试覆盖

View File

@@ -1,9 +1,9 @@
# ZCLAW 功能全景文档 # ZCLAW 功能全景文档
> **版本**: v0.8.1 > **版本**: v0.9.0
> **更新日期**: 2026-03-30 > **更新日期**: 2026-04-01
> **项目状态**: 完整 Rust Workspace 架构10 个核心 Crates70 技能Pipeline DSL + Smart Presentation + Agent Growth System + SaaS 平台 > **项目状态**: 完整 Rust Workspace 架构10 个核心 Crates70 技能Pipeline DSL + Smart Presentation + Agent Growth System + SaaS 平台 + DeerFlow 视觉风格
> **整体完成度**: ~87% (核心功能完整SaaS 平台全面上线Worker + Scheduler 系统上线,记忆闭环接通) > **整体完成度**: ~89% (核心功能完整SaaS 平台全面上线,DeerFlow 前端视觉复刻完成,Worker + Scheduler 系统上线,记忆闭环接通)
--- ---
@@ -167,6 +167,7 @@ zclaw-saas — 独立运行 (Axum + PostgreSQL, 端口 8080) — 97%
| 日期 | 版本 | 变更内容 | | 日期 | 版本 | 变更内容 |
|------|------|---------| |------|------|---------|
| 2026-04-01 | v0.9.0 | DeerFlow 前端视觉复刻:卡片式输入框、下拉模式选择器(闪速/思考/Pro/Ultra、彩色快捷操作芯片、极简顶栏+token计数+导出、暖灰色系全局样式(#faf9f6/#f5f4f1/#e8e6e1、DeerFlow 风格侧边栏、推理/工具链/子任务可视化、Artifact 右侧面板、虚拟化消息列表、Gateway 流式 hang 修复onclose code 1000 → onComplete、WebView2 textarea 边框修复CSS !important |
| 2026-03-30 | v0.8.1 | Sprint 5 "稳定清扫": Axum CLOSE_WAIT 修复 (CancellationToken + TCP keepalive + SO_LINGER)E2E 测试重新启用 (去掉 test.skip)dead code 注解审计 (36→<10) | | 2026-03-30 | v0.8.1 | Sprint 5 "稳定清扫": Axum CLOSE_WAIT 修复 (CancellationToken + TCP keepalive + SO_LINGER)E2E 测试重新启用 (去掉 test.skip)dead code 注解审计 (36→<10) |
| 2026-03-29 | v0.8.0 | SaaS 后端架构重构完成Worker 系统 (5 Worker + mpsc 异步调度)声明式 Scheduler (TOML 配置)SQL 迁移系统 (Schema v6 + TIMESTAMPTZ)多环境配置 (ZCLAW_ENV)连接池优化 (50 max/5 min)速率限制优化 (无锁 AtomicU32)记忆闭环修复extraction_adapter.rs 实现 TauriExtractionDriverBREAK-01 已修复 | | 2026-03-29 | v0.8.0 | SaaS 后端架构重构完成Worker 系统 (5 Worker + mpsc 异步调度)声明式 Scheduler (TOML 配置)SQL 迁移系统 (Schema v6 + TIMESTAMPTZ)多环境配置 (ZCLAW_ENV)连接池优化 (50 max/5 min)速率限制优化 (无锁 AtomicU32)记忆闭环修复extraction_adapter.rs 实现 TauriExtractionDriverBREAK-01 已修复 |
| 2026-03-29 | v0.7.0 | 文档同步SKILL 数量 70, Tauri 命令 130+ ( Browser/Intelligence/Memory/CLI/SecureStorage), Hands 11 (9 启用+2 禁用), 智能层完成度修正 | | 2026-03-29 | v0.7.0 | 文档同步SKILL 数量 70, Tauri 命令 130+ ( Browser/Intelligence/Memory/CLI/SecureStorage), Hands 11 (9 启用+2 禁用), 智能层完成度修正 |