feat: 实现循环防护和安全验证功能
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

refactor(loop_guard): 为LoopGuard添加Clone派生
feat(capabilities): 实现CapabilityManager.validate()安全验证
fix(agentStore): 添加token用量追踪
chore: 删除未实现的Predictor/Lead HAND.toml文件
style(Credits): 移除假数据并标注开发中状态
refactor(Skills): 动态加载技能卡片
perf(configStore): 为定时任务添加localStorage降级
docs: 更新功能文档和版本变更记录
This commit is contained in:
iven
2026-03-27 07:56:53 +08:00
parent 0d4fa96b82
commit eed347e1a6
14 changed files with 724 additions and 476 deletions

View File

@@ -3,13 +3,6 @@ import { useState } from 'react';
export function Credits() {
const [filter, setFilter] = useState<'all' | 'consume' | 'earn'>('all');
const logs = [
{ id: 1, action: 'AutoClaw网页搜索', date: '2026年03月11日 12:02:02', amount: -6 },
{ id: 2, action: 'AutoClaw网页搜索', date: '2026年03月11日 12:01:58', amount: -6 },
{ id: 3, action: 'AutoClaw网页搜索', date: '2026年03月11日 12:01:46', amount: -6 },
{ id: 4, action: 'AutoClaw网页搜索', date: '2026年03月11日 12:01:43', amount: -6 },
];
return (
<div className="max-w-3xl">
<div className="flex justify-between items-center mb-6">
@@ -24,9 +17,10 @@ export function Credits() {
</div>
</div>
<div className="text-center mb-8">
<div className="text-center mb-8 py-12">
<div className="text-xs text-gray-500 mb-1"></div>
<div className="text-3xl font-bold text-gray-900">2268</div>
<div className="text-3xl font-bold text-gray-900">--</div>
<div className="text-xs text-gray-400 mt-2"></div>
</div>
<div className="p-1 mb-6 flex rounded-lg bg-gray-50 border border-gray-100 shadow-sm">
@@ -50,18 +44,9 @@ export function Credits() {
</button>
</div>
<div className="bg-white rounded-xl border border-gray-200 divide-y divide-gray-100 shadow-sm">
{logs.map((log) => (
<div key={log.id} className="flex justify-between items-center p-4">
<div>
<div className="text-sm text-gray-700">{log.action}</div>
<div className="text-xs text-gray-500 mt-1">{log.date}</div>
</div>
<div className={`font-medium ${log.amount < 0 ? 'text-gray-500' : 'text-green-500'}`}>
{log.amount > 0 ? '+' : ''}{log.amount}
</div>
</div>
))}
<div className="bg-white rounded-xl border border-gray-200 p-8 text-center">
<div className="text-sm text-gray-400"></div>
<div className="text-xs text-gray-300 mt-1">使</div>
</div>
</div>
);

View File

@@ -2,67 +2,7 @@ import { useEffect, useState } from 'react';
import { useConnectionStore } from '../../store/connectionStore';
import { useConfigStore } from '../../store/configStore';
import { silentErrorHandler } from '../../lib/error-utils';
import { Wrench, Zap, FileCode, Globe, Mail, Database, Search, MessageSquare } from 'lucide-react';
// ZCLAW 内置系统技能
const SYSTEM_SKILLS = [
{
id: 'code-assistant',
name: '代码助手',
description: '代码编写、调试、重构和优化',
category: '开发',
icon: FileCode,
},
{
id: 'web-search',
name: '网络搜索',
description: '实时搜索互联网信息',
category: '信息',
icon: Search,
},
{
id: 'file-manager',
name: '文件管理',
description: '文件读写、搜索和整理',
category: '系统',
icon: Database,
},
{
id: 'web-browsing',
name: '网页浏览',
description: '访问和解析网页内容',
category: '信息',
icon: Globe,
},
{
id: 'email-handler',
name: '邮件处理',
description: '发送和管理电子邮件',
category: '通讯',
icon: Mail,
},
{
id: 'chat-skill',
name: '对话技能',
description: '自然语言对话和问答',
category: '交互',
icon: MessageSquare,
},
{
id: 'automation',
name: '自动化任务',
description: '自动化工作流程执行',
category: '系统',
icon: Zap,
},
{
id: 'tool-executor',
name: '工具执行器',
description: '执行系统命令和脚本',
category: '系统',
icon: Wrench,
},
];
import { Wrench } from 'lucide-react';
export function Skills() {
const connectionState = useConnectionStore((s) => s.connectionState);
@@ -116,35 +56,52 @@ export function Skills() {
</div>
)}
{/* 系统技能 */}
{/* 系统技能 — 从 skillsCatalog 动态加载,未连接时展示说明 */}
<div className="mb-6">
<h3 className="text-sm font-semibold text-gray-700 mb-3">ZCLAW </h3>
<div className="grid grid-cols-2 gap-3">
{SYSTEM_SKILLS.map((skill) => {
const Icon = skill.icon;
return (
<h3 className="text-sm font-semibold text-gray-700 mb-3">
ZCLAW
<span className="text-[10px] px-1.5 py-0.5 bg-blue-50 text-blue-600 rounded ml-2">
{connected ? `已加载 ${skillsCatalog.filter(s => s.source === 'builtin').length}` : '未连接'}
</span>
</h3>
{connected && skillsCatalog.length > 0 ? (
<div className="grid grid-cols-2 gap-3">
{skillsCatalog.slice(0, 16).map((skill) => (
<div
key={skill.id}
className="bg-white rounded-xl border border-gray-200 p-4 shadow-sm hover:shadow-md transition-shadow"
>
<div className="flex items-start gap-3">
<div className="w-10 h-10 bg-gradient-to-br from-blue-500 to-purple-500 rounded-lg flex items-center justify-center flex-shrink-0">
<Icon className="w-5 h-5 text-white" />
<Wrench className="w-5 h-5 text-white" />
</div>
<div className="flex-1 min-w-0">
<div className="flex items-center gap-2">
<span className="text-sm font-medium text-gray-900">{skill.name}</span>
<span className="text-[10px] px-1.5 py-0.5 bg-purple-50 text-purple-600 rounded">
{skill.category}
</span>
{skill.source && (
<span className="text-[10px] px-1.5 py-0.5 bg-purple-50 text-purple-600 rounded">
{skill.source === 'builtin' ? '内置' : '额外'}
</span>
)}
</div>
<p className="text-xs text-gray-500 mt-1">{skill.description}</p>
<p className="text-xs text-gray-500 mt-1">{skill.description || skill.path || skill.id}</p>
</div>
</div>
</div>
);
})}
</div>
))}
{skillsCatalog.length > 16 && (
<div className="text-xs text-gray-400 text-center col-span-2 py-2">
{skillsCatalog.length - 16}
</div>
)}
</div>
) : (
<div className="bg-white rounded-xl border border-gray-200 p-6 text-center shadow-sm">
<p className="text-sm text-gray-400">
{connected ? '暂无可用技能' : '连接后端后自动加载系统技能列表'}
</p>
</div>
)}
</div>
<div className="bg-white rounded-xl border border-gray-200 p-6 mb-6 shadow-sm">

View File

@@ -3,7 +3,16 @@
*
* Provides a unified API for intelligence operations that:
* - Uses Rust backend (via Tauri commands) when running in Tauri environment
* - Falls back to localStorage-based implementation in browser environment
* - Falls back to localStorage-based implementation in browser/dev environment
*
* Degradation strategy:
* - In Tauri mode: if a Tauri invoke fails, the error is logged and re-thrown.
* The caller is responsible for handling the error. We do NOT silently fall
* back to localStorage, because that would give users degraded functionality
* (localStorage instead of SQLite, rule-based instead of LLM-based, no-op
* instead of real execution) without any indication that something is wrong.
* - In browser/dev mode: localStorage fallback is the intended behavior for
* development and testing without a Tauri backend.
*
* This replaces direct usage of:
* - agent-memory.ts
@@ -38,6 +47,8 @@
import { invoke } from '@tauri-apps/api/core';
import { isTauriRuntime } from './tauri-gateway';
import {
intelligence,
type MemoryEntryInput,
@@ -62,15 +73,6 @@ import {
type IdentitySnapshot,
} from './intelligence-backend';
// === Environment Detection ===
/**
* Check if running in Tauri environment
*/
export function isTauriEnv(): boolean {
return typeof window !== 'undefined' && '__TAURI__' in window;
}
// === Frontend Types (for backward compatibility) ===
export type MemoryType = 'fact' | 'preference' | 'lesson' | 'context' | 'task';
@@ -982,75 +984,91 @@ const fallbackHeartbeat = {
// === Unified Client Export ===
/**
* Unified intelligence client that automatically selects backend or fallback
* Helper: wrap a Tauri invoke call so that failures are logged and re-thrown
* instead of silently falling back to localStorage implementations.
*/
function tauriInvoke<T>(label: string, fn: () => Promise<T>): Promise<T> {
return fn().catch((e: unknown) => {
console.warn(`[IntelligenceClient] Tauri invoke failed (${label}):`, e);
throw e;
});
}
/**
* Unified intelligence client that automatically selects backend or fallback.
*
* - In Tauri mode: calls Rust backend via invoke(). On failure, logs a warning
* and re-throws -- does NOT fall back to localStorage.
* - In browser/dev mode: uses localStorage-based fallback implementations.
*/
export const intelligenceClient = {
memory: {
init: async (): Promise<void> => {
if (isTauriEnv()) {
await intelligence.memory.init();
if (isTauriRuntime()) {
await tauriInvoke('memory.init', () => intelligence.memory.init());
} else {
await fallbackMemory.init();
}
},
store: async (entry: MemoryEntryInput): Promise<string> => {
if (isTauriEnv()) {
return intelligence.memory.store(entry);
if (isTauriRuntime()) {
return tauriInvoke('memory.store', () => intelligence.memory.store(entry));
}
return fallbackMemory.store(entry);
},
get: async (id: string): Promise<MemoryEntry | null> => {
if (isTauriEnv()) {
const result = await intelligence.memory.get(id);
if (isTauriRuntime()) {
const result = await tauriInvoke('memory.get', () => intelligence.memory.get(id));
return result ? toFrontendMemory(result) : null;
}
return fallbackMemory.get(id);
},
search: async (options: MemorySearchOptions): Promise<MemoryEntry[]> => {
if (isTauriEnv()) {
const results = await intelligence.memory.search(toBackendSearchOptions(options));
if (isTauriRuntime()) {
const results = await tauriInvoke('memory.search', () =>
intelligence.memory.search(toBackendSearchOptions(options))
);
return results.map(toFrontendMemory);
}
return fallbackMemory.search(options);
},
delete: async (id: string): Promise<void> => {
if (isTauriEnv()) {
await intelligence.memory.delete(id);
if (isTauriRuntime()) {
await tauriInvoke('memory.delete', () => intelligence.memory.delete(id));
} else {
await fallbackMemory.delete(id);
}
},
deleteAll: async (agentId: string): Promise<number> => {
if (isTauriEnv()) {
return intelligence.memory.deleteAll(agentId);
if (isTauriRuntime()) {
return tauriInvoke('memory.deleteAll', () => intelligence.memory.deleteAll(agentId));
}
return fallbackMemory.deleteAll(agentId);
},
stats: async (): Promise<MemoryStats> => {
if (isTauriEnv()) {
const stats = await intelligence.memory.stats();
if (isTauriRuntime()) {
const stats = await tauriInvoke('memory.stats', () => intelligence.memory.stats());
return toFrontendStats(stats);
}
return fallbackMemory.stats();
},
export: async (): Promise<MemoryEntry[]> => {
if (isTauriEnv()) {
const results = await intelligence.memory.export();
if (isTauriRuntime()) {
const results = await tauriInvoke('memory.export', () => intelligence.memory.export());
return results.map(toFrontendMemory);
}
return fallbackMemory.export();
},
import: async (memories: MemoryEntry[]): Promise<number> => {
if (isTauriEnv()) {
// Convert to backend format
if (isTauriRuntime()) {
const backendMemories = memories.map(m => ({
...m,
agent_id: m.agentId,
@@ -1062,14 +1080,16 @@ export const intelligenceClient = {
tags: JSON.stringify(m.tags),
embedding: null,
}));
return intelligence.memory.import(backendMemories as PersistentMemory[]);
return tauriInvoke('memory.import', () =>
intelligence.memory.import(backendMemories as PersistentMemory[])
);
}
return fallbackMemory.import(memories);
},
dbPath: async (): Promise<string> => {
if (isTauriEnv()) {
return intelligence.memory.dbPath();
if (isTauriRuntime()) {
return tauriInvoke('memory.dbPath', () => intelligence.memory.dbPath());
}
return fallbackMemory.dbPath();
},
@@ -1079,10 +1099,12 @@ export const intelligenceClient = {
query: string,
maxTokens?: number,
): Promise<{ systemPromptAddition: string; totalTokens: number; memoriesUsed: number }> => {
if (isTauriEnv()) {
return intelligence.memory.buildContext(agentId, query, maxTokens ?? null);
if (isTauriRuntime()) {
return tauriInvoke('memory.buildContext', () =>
intelligence.memory.buildContext(agentId, query, maxTokens ?? null)
);
}
// Fallback: use basic search
// Browser/dev fallback: use basic search
const memories = await fallbackMemory.search({
agentId,
query,
@@ -1098,54 +1120,58 @@ export const intelligenceClient = {
heartbeat: {
init: async (agentId: string, config?: HeartbeatConfig): Promise<void> => {
if (isTauriEnv()) {
await intelligence.heartbeat.init(agentId, config);
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.init', () => intelligence.heartbeat.init(agentId, config));
} else {
await fallbackHeartbeat.init(agentId, config);
}
},
start: async (agentId: string): Promise<void> => {
if (isTauriEnv()) {
await intelligence.heartbeat.start(agentId);
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.start', () => intelligence.heartbeat.start(agentId));
} else {
await fallbackHeartbeat.start(agentId);
}
},
stop: async (agentId: string): Promise<void> => {
if (isTauriEnv()) {
await intelligence.heartbeat.stop(agentId);
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.stop', () => intelligence.heartbeat.stop(agentId));
} else {
await fallbackHeartbeat.stop(agentId);
}
},
tick: async (agentId: string): Promise<HeartbeatResult> => {
if (isTauriEnv()) {
return intelligence.heartbeat.tick(agentId);
if (isTauriRuntime()) {
return tauriInvoke('heartbeat.tick', () => intelligence.heartbeat.tick(agentId));
}
return fallbackHeartbeat.tick(agentId);
},
getConfig: async (agentId: string): Promise<HeartbeatConfig> => {
if (isTauriEnv()) {
return intelligence.heartbeat.getConfig(agentId);
if (isTauriRuntime()) {
return tauriInvoke('heartbeat.getConfig', () => intelligence.heartbeat.getConfig(agentId));
}
return fallbackHeartbeat.getConfig(agentId);
},
updateConfig: async (agentId: string, config: HeartbeatConfig): Promise<void> => {
if (isTauriEnv()) {
await intelligence.heartbeat.updateConfig(agentId, config);
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.updateConfig', () =>
intelligence.heartbeat.updateConfig(agentId, config)
);
} else {
await fallbackHeartbeat.updateConfig(agentId, config);
}
},
getHistory: async (agentId: string, limit?: number): Promise<HeartbeatResult[]> => {
if (isTauriEnv()) {
return intelligence.heartbeat.getHistory(agentId, limit);
if (isTauriRuntime()) {
return tauriInvoke('heartbeat.getHistory', () =>
intelligence.heartbeat.getHistory(agentId, limit)
);
}
return fallbackHeartbeat.getHistory(agentId, limit);
},
@@ -1156,61 +1182,74 @@ export const intelligenceClient = {
totalEntries: number,
storageSizeBytes: number
): Promise<void> => {
if (isTauriEnv()) {
await invoke('heartbeat_update_memory_stats', {
agent_id: agentId,
task_count: taskCount,
total_entries: totalEntries,
storage_size_bytes: storageSizeBytes,
});
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.updateMemoryStats', () =>
invoke('heartbeat_update_memory_stats', {
agent_id: agentId,
task_count: taskCount,
total_entries: totalEntries,
storage_size_bytes: storageSizeBytes,
})
);
} else {
// Browser/dev fallback only
const cache = {
taskCount,
totalEntries,
storageSizeBytes,
lastUpdated: new Date().toISOString(),
};
localStorage.setItem(`zclaw-memory-stats-${agentId}`, JSON.stringify(cache));
}
// Fallback: store in localStorage for non-Tauri environment
const cache = {
taskCount,
totalEntries,
storageSizeBytes,
lastUpdated: new Date().toISOString(),
};
localStorage.setItem(`zclaw-memory-stats-${agentId}`, JSON.stringify(cache));
},
recordCorrection: async (agentId: string, correctionType: string): Promise<void> => {
if (isTauriEnv()) {
await invoke('heartbeat_record_correction', {
agent_id: agentId,
correction_type: correctionType,
});
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.recordCorrection', () =>
invoke('heartbeat_record_correction', {
agent_id: agentId,
correction_type: correctionType,
})
);
} else {
// Browser/dev fallback only
const key = `zclaw-corrections-${agentId}`;
const stored = localStorage.getItem(key);
const counters = stored ? JSON.parse(stored) : {};
counters[correctionType] = (counters[correctionType] || 0) + 1;
localStorage.setItem(key, JSON.stringify(counters));
}
// Fallback: store in localStorage for non-Tauri environment
const key = `zclaw-corrections-${agentId}`;
const stored = localStorage.getItem(key);
const counters = stored ? JSON.parse(stored) : {};
counters[correctionType] = (counters[correctionType] || 0) + 1;
localStorage.setItem(key, JSON.stringify(counters));
},
recordInteraction: async (agentId: string): Promise<void> => {
if (isTauriEnv()) {
await invoke('heartbeat_record_interaction', {
agent_id: agentId,
});
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.recordInteraction', () =>
invoke('heartbeat_record_interaction', {
agent_id: agentId,
})
);
} else {
// Browser/dev fallback only
localStorage.setItem(`zclaw-last-interaction-${agentId}`, new Date().toISOString());
}
// Fallback: store in localStorage for non-Tauri environment
localStorage.setItem(`zclaw-last-interaction-${agentId}`, new Date().toISOString());
},
},
compactor: {
estimateTokens: async (text: string): Promise<number> => {
if (isTauriEnv()) {
return intelligence.compactor.estimateTokens(text);
if (isTauriRuntime()) {
return tauriInvoke('compactor.estimateTokens', () =>
intelligence.compactor.estimateTokens(text)
);
}
return fallbackCompactor.estimateTokens(text);
},
estimateMessagesTokens: async (messages: CompactableMessage[]): Promise<number> => {
if (isTauriEnv()) {
return intelligence.compactor.estimateMessagesTokens(messages);
if (isTauriRuntime()) {
return tauriInvoke('compactor.estimateMessagesTokens', () =>
intelligence.compactor.estimateMessagesTokens(messages)
);
}
return fallbackCompactor.estimateMessagesTokens(messages);
},
@@ -1219,8 +1258,10 @@ export const intelligenceClient = {
messages: CompactableMessage[],
config?: CompactionConfig
): Promise<CompactionCheck> => {
if (isTauriEnv()) {
return intelligence.compactor.checkThreshold(messages, config);
if (isTauriRuntime()) {
return tauriInvoke('compactor.checkThreshold', () =>
intelligence.compactor.checkThreshold(messages, config)
);
}
return fallbackCompactor.checkThreshold(messages, config);
},
@@ -1231,8 +1272,10 @@ export const intelligenceClient = {
conversationId?: string,
config?: CompactionConfig
): Promise<CompactionResult> => {
if (isTauriEnv()) {
return intelligence.compactor.compact(messages, agentId, conversationId, config);
if (isTauriRuntime()) {
return tauriInvoke('compactor.compact', () =>
intelligence.compactor.compact(messages, agentId, conversationId, config)
);
}
return fallbackCompactor.compact(messages, agentId, conversationId, config);
},
@@ -1240,45 +1283,53 @@ export const intelligenceClient = {
reflection: {
init: async (config?: ReflectionConfig): Promise<void> => {
if (isTauriEnv()) {
await intelligence.reflection.init(config);
if (isTauriRuntime()) {
await tauriInvoke('reflection.init', () => intelligence.reflection.init(config));
} else {
await fallbackReflection.init(config);
}
},
recordConversation: async (): Promise<void> => {
if (isTauriEnv()) {
await intelligence.reflection.recordConversation();
if (isTauriRuntime()) {
await tauriInvoke('reflection.recordConversation', () =>
intelligence.reflection.recordConversation()
);
} else {
await fallbackReflection.recordConversation();
}
},
shouldReflect: async (): Promise<boolean> => {
if (isTauriEnv()) {
return intelligence.reflection.shouldReflect();
if (isTauriRuntime()) {
return tauriInvoke('reflection.shouldReflect', () =>
intelligence.reflection.shouldReflect()
);
}
return fallbackReflection.shouldReflect();
},
reflect: async (agentId: string, memories: MemoryEntryForAnalysis[]): Promise<ReflectionResult> => {
if (isTauriEnv()) {
return intelligence.reflection.reflect(agentId, memories);
if (isTauriRuntime()) {
return tauriInvoke('reflection.reflect', () =>
intelligence.reflection.reflect(agentId, memories)
);
}
return fallbackReflection.reflect(agentId, memories);
},
getHistory: async (limit?: number): Promise<ReflectionResult[]> => {
if (isTauriEnv()) {
return intelligence.reflection.getHistory(limit);
if (isTauriRuntime()) {
return tauriInvoke('reflection.getHistory', () =>
intelligence.reflection.getHistory(limit)
);
}
return fallbackReflection.getHistory(limit);
},
getState: async (): Promise<ReflectionState> => {
if (isTauriEnv()) {
return intelligence.reflection.getState();
if (isTauriRuntime()) {
return tauriInvoke('reflection.getState', () => intelligence.reflection.getState());
}
return fallbackReflection.getState();
},
@@ -1286,37 +1337,43 @@ export const intelligenceClient = {
identity: {
get: async (agentId: string): Promise<IdentityFiles> => {
if (isTauriEnv()) {
return intelligence.identity.get(agentId);
if (isTauriRuntime()) {
return tauriInvoke('identity.get', () => intelligence.identity.get(agentId));
}
return fallbackIdentity.get(agentId);
},
getFile: async (agentId: string, file: string): Promise<string> => {
if (isTauriEnv()) {
return intelligence.identity.getFile(agentId, file);
if (isTauriRuntime()) {
return tauriInvoke('identity.getFile', () => intelligence.identity.getFile(agentId, file));
}
return fallbackIdentity.getFile(agentId, file);
},
buildPrompt: async (agentId: string, memoryContext?: string): Promise<string> => {
if (isTauriEnv()) {
return intelligence.identity.buildPrompt(agentId, memoryContext);
if (isTauriRuntime()) {
return tauriInvoke('identity.buildPrompt', () =>
intelligence.identity.buildPrompt(agentId, memoryContext)
);
}
return fallbackIdentity.buildPrompt(agentId, memoryContext);
},
updateUserProfile: async (agentId: string, content: string): Promise<void> => {
if (isTauriEnv()) {
await intelligence.identity.updateUserProfile(agentId, content);
if (isTauriRuntime()) {
await tauriInvoke('identity.updateUserProfile', () =>
intelligence.identity.updateUserProfile(agentId, content)
);
} else {
await fallbackIdentity.updateUserProfile(agentId, content);
}
},
appendUserProfile: async (agentId: string, addition: string): Promise<void> => {
if (isTauriEnv()) {
await intelligence.identity.appendUserProfile(agentId, addition);
if (isTauriRuntime()) {
await tauriInvoke('identity.appendUserProfile', () =>
intelligence.identity.appendUserProfile(agentId, addition)
);
} else {
await fallbackIdentity.appendUserProfile(agentId, addition);
}
@@ -1328,67 +1385,81 @@ export const intelligenceClient = {
suggestedContent: string,
reason: string
): Promise<IdentityChangeProposal> => {
if (isTauriEnv()) {
return intelligence.identity.proposeChange(agentId, file, suggestedContent, reason);
if (isTauriRuntime()) {
return tauriInvoke('identity.proposeChange', () =>
intelligence.identity.proposeChange(agentId, file, suggestedContent, reason)
);
}
return fallbackIdentity.proposeChange(agentId, file, suggestedContent, reason);
},
approveProposal: async (proposalId: string): Promise<IdentityFiles> => {
if (isTauriEnv()) {
return intelligence.identity.approveProposal(proposalId);
if (isTauriRuntime()) {
return tauriInvoke('identity.approveProposal', () =>
intelligence.identity.approveProposal(proposalId)
);
}
return fallbackIdentity.approveProposal(proposalId);
},
rejectProposal: async (proposalId: string): Promise<void> => {
if (isTauriEnv()) {
await intelligence.identity.rejectProposal(proposalId);
if (isTauriRuntime()) {
await tauriInvoke('identity.rejectProposal', () =>
intelligence.identity.rejectProposal(proposalId)
);
} else {
await fallbackIdentity.rejectProposal(proposalId);
}
},
getPendingProposals: async (agentId?: string): Promise<IdentityChangeProposal[]> => {
if (isTauriEnv()) {
return intelligence.identity.getPendingProposals(agentId);
if (isTauriRuntime()) {
return tauriInvoke('identity.getPendingProposals', () =>
intelligence.identity.getPendingProposals(agentId)
);
}
return fallbackIdentity.getPendingProposals(agentId);
},
updateFile: async (agentId: string, file: string, content: string): Promise<void> => {
if (isTauriEnv()) {
await intelligence.identity.updateFile(agentId, file, content);
if (isTauriRuntime()) {
await tauriInvoke('identity.updateFile', () =>
intelligence.identity.updateFile(agentId, file, content)
);
} else {
await fallbackIdentity.updateFile(agentId, file, content);
}
},
getSnapshots: async (agentId: string, limit?: number): Promise<IdentitySnapshot[]> => {
if (isTauriEnv()) {
return intelligence.identity.getSnapshots(agentId, limit);
if (isTauriRuntime()) {
return tauriInvoke('identity.getSnapshots', () =>
intelligence.identity.getSnapshots(agentId, limit)
);
}
return fallbackIdentity.getSnapshots(agentId, limit);
},
restoreSnapshot: async (agentId: string, snapshotId: string): Promise<void> => {
if (isTauriEnv()) {
await intelligence.identity.restoreSnapshot(agentId, snapshotId);
if (isTauriRuntime()) {
await tauriInvoke('identity.restoreSnapshot', () =>
intelligence.identity.restoreSnapshot(agentId, snapshotId)
);
} else {
await fallbackIdentity.restoreSnapshot(agentId, snapshotId);
}
},
listAgents: async (): Promise<string[]> => {
if (isTauriEnv()) {
return intelligence.identity.listAgents();
if (isTauriRuntime()) {
return tauriInvoke('identity.listAgents', () => intelligence.identity.listAgents());
}
return fallbackIdentity.listAgents();
},
deleteAgent: async (agentId: string): Promise<void> => {
if (isTauriEnv()) {
await intelligence.identity.deleteAgent(agentId);
if (isTauriRuntime()) {
await tauriInvoke('identity.deleteAgent', () => intelligence.identity.deleteAgent(agentId));
} else {
await fallbackIdentity.deleteAgent(agentId);
}

View File

@@ -212,6 +212,7 @@ export const useAgentStore = create<AgentStore>((set, get) => ({
loadUsageStats: async () => {
try {
const { conversations } = useChatStore.getState();
const tokenData = useChatStore.getState().getTotalTokens();
let totalMessages = 0;
for (const conversation of conversations) {
@@ -225,7 +226,7 @@ export const useAgentStore = create<AgentStore>((set, get) => ({
const stats: UsageStats = {
totalSessions: conversations.length,
totalMessages,
totalTokens: 0,
totalTokens: tokenData.total,
byModel: {},
};

View File

@@ -85,6 +85,9 @@ interface ChatState {
isLoading: boolean;
currentModel: string;
sessionKey: string | null;
// Token usage tracking
totalInputTokens: number;
totalOutputTokens: number;
addMessage: (message: Message) => void;
updateMessage: (id: string, updates: Partial<Message>) => void;
@@ -97,6 +100,8 @@ interface ChatState {
newConversation: () => void;
switchConversation: (id: string) => void;
deleteConversation: (id: string) => void;
addTokenUsage: (inputTokens: number, outputTokens: number) => void;
getTotalTokens: () => { input: number; output: number; total: number };
searchSkills: (query: string) => { results: Array<{ id: string; name: string; description: string }>; totalAvailable: number };
}
@@ -194,8 +199,10 @@ export const useChatStore = create<ChatState>()(
isLoading: false,
currentModel: 'glm-4-flash',
sessionKey: null,
totalInputTokens: 0,
totalOutputTokens: 0,
addMessage: (message) =>
addMessage: (message: Message) =>
set((state) => ({ messages: [...state.messages, message] })),
updateMessage: (id, updates) =>
@@ -432,7 +439,7 @@ export const useChatStore = create<ChatState>()(
};
set((state) => ({ messages: [...state.messages, handMsg] }));
},
onComplete: () => {
onComplete: (inputTokens?: number, outputTokens?: number) => {
const state = get();
// Save conversation to persist across refresh
@@ -448,6 +455,11 @@ export const useChatStore = create<ChatState>()(
),
});
// Track token usage if provided (KernelClient provides these)
if (inputTokens !== undefined && outputTokens !== undefined) {
get().addTokenUsage(inputTokens, outputTokens);
}
// Async memory extraction after stream completes
const msgs = get().messages
.filter(m => m.role === 'user' || m.role === 'assistant')
@@ -518,6 +530,17 @@ export const useChatStore = create<ChatState>()(
}
},
addTokenUsage: (inputTokens: number, outputTokens: number) =>
set((state) => ({
totalInputTokens: state.totalInputTokens + inputTokens,
totalOutputTokens: state.totalOutputTokens + outputTokens,
})),
getTotalTokens: () => {
const { totalInputTokens, totalOutputTokens } = get();
return { input: totalInputTokens, output: totalOutputTokens, total: totalInputTokens + totalOutputTokens };
},
searchSkills: (query: string) => {
const discovery = getSkillDiscovery();
const result = discovery.searchSkills(query);

View File

@@ -395,9 +395,18 @@ export const useConfigStore = create<ConfigStateSlice & ConfigActionsSlice>((set
try {
const result = await client.listScheduledTasks();
set({ scheduledTasks: result?.tasks || [] });
const tasks = result?.tasks || [];
set({ scheduledTasks: tasks });
// Persist to localStorage as fallback
try { localStorage.setItem('zclaw-scheduled-tasks', JSON.stringify(tasks)); } catch { /* ignore */ }
} catch {
// Ignore if heartbeat.tasks not available
// Fallback: load from localStorage
try {
const stored = localStorage.getItem('zclaw-scheduled-tasks');
if (stored) {
set({ scheduledTasks: JSON.parse(stored) });
}
} catch { /* ignore */ }
}
},
@@ -416,9 +425,11 @@ export const useConfigStore = create<ConfigStateSlice & ConfigActionsSlice>((set
nextRun: result.nextRun,
description: result.description,
};
set((state) => ({
scheduledTasks: [...state.scheduledTasks, newTask],
}));
set((state) => {
const tasks = [...state.scheduledTasks, newTask];
try { localStorage.setItem('zclaw-scheduled-tasks', JSON.stringify(tasks)); } catch { /* ignore */ }
return { scheduledTasks: tasks };
});
return newTask;
} catch (err: unknown) {
const errorMessage = err instanceof Error ? err.message : 'Failed to create scheduled task';
@@ -602,8 +613,23 @@ function createConfigClientFromKernel(client: KernelClient): ConfigStoreClient {
return null;
}
},
getQuickConfig: async () => ({ quickConfig: {} }),
saveQuickConfig: async () => null,
getQuickConfig: async () => {
// Read from localStorage in kernel mode
try {
const stored = localStorage.getItem('zclaw-quick-config');
if (stored) {
return { quickConfig: JSON.parse(stored) };
}
} catch { /* ignore */ }
return { quickConfig: {} };
},
saveQuickConfig: async (config) => {
// Persist to localStorage in kernel mode
try {
localStorage.setItem('zclaw-quick-config', JSON.stringify(config));
} catch { /* ignore */ }
return { quickConfig: config };
},
listSkills: async () => {
try {
const result = await client.listSkills();

View File

@@ -522,13 +522,180 @@ export function createHandClientFromGateway(client: GatewayClient): HandClient {
};
}
// === Kernel Client Adapter ===
import type { KernelClient } from '../lib/kernel-client';
/**
* Helper to create a HandClient adapter from a KernelClient.
* Maps KernelClient methods (Tauri invoke) to the HandClient interface.
*/
function createHandClientFromKernel(client: KernelClient): HandClient {
return {
listHands: async () => {
try {
const result = await client.listHands();
// KernelClient returns typed objects; cast to Record<string, unknown> for HandClient compatibility
const hands: Array<Record<string, unknown>> = result.hands.map((h) => ({
id: h.id || h.name,
name: h.name,
description: h.description,
status: h.status,
requirements_met: h.requirements_met,
category: h.category,
icon: h.icon,
tool_count: h.tool_count,
tools: h.tools,
metric_count: h.metric_count,
metrics: h.metrics,
}));
return { hands };
} catch {
return null;
}
},
getHand: async (name: string) => {
try {
const result = await client.getHand(name);
return result as Record<string, unknown> || null;
} catch {
return null;
}
},
listHandRuns: async (name: string, opts) => {
try {
const result = await client.listHandRuns(name, opts);
return result as unknown as { runs?: RawHandRun[] } | null;
} catch {
return null;
}
},
triggerHand: async (name: string, params) => {
try {
const result = await client.triggerHand(name, params);
return { runId: result.runId, status: result.status };
} catch {
return null;
}
},
approveHand: async (name: string, runId: string, approved: boolean, reason?: string) => {
return client.approveHand(name, runId, approved, reason);
},
cancelHand: async (name: string, runId: string) => {
return client.cancelHand(name, runId);
},
listTriggers: async () => {
try {
const result = await client.listTriggers();
if (!result?.triggers) return { triggers: [] };
// Map KernelClient trigger shape to HandClient Trigger shape
const triggers: Trigger[] = result.triggers.map((t) => ({
id: t.id,
type: t.triggerType,
enabled: t.enabled,
}));
return { triggers };
} catch {
return { triggers: [] };
}
},
getTrigger: async (id: string) => {
try {
const result = await client.getTrigger(id);
if (!result) return null;
return {
id: result.id,
type: result.triggerType,
enabled: result.enabled,
} as Trigger;
} catch {
return null;
}
},
createTrigger: async (trigger) => {
try {
const result = await client.createTrigger({
id: `${trigger.type}_${Date.now()}`,
name: trigger.name || trigger.type,
handId: trigger.handName || '',
triggerType: { type: trigger.type },
enabled: trigger.enabled,
description: trigger.config ? JSON.stringify(trigger.config) : undefined,
});
return result ? { id: result.id } : null;
} catch {
return null;
}
},
updateTrigger: async (id: string, updates) => {
const result = await client.updateTrigger(id, {
name: updates.name,
enabled: updates.enabled,
handId: updates.handName,
triggerType: updates.config ? { type: (updates.config as Record<string, unknown>).type as string } : undefined,
});
return { id: result.id };
},
deleteTrigger: async (id: string) => {
await client.deleteTrigger(id);
return { status: 'deleted' };
},
listApprovals: async () => {
try {
const result = await client.listApprovals();
// Map KernelClient approval shape to HandClient RawApproval shape
const approvals: RawApproval[] = (result?.approvals || []).map((a) => ({
id: a.id,
hand_id: a.handId,
status: a.status,
requestedAt: a.createdAt,
}));
return { approvals };
} catch {
return { approvals: [] };
}
},
respondToApproval: async (approvalId: string, approved: boolean, reason?: string) => {
await client.respondToApproval(approvalId, approved, reason);
return { status: approved ? 'approved' : 'rejected' };
},
};
}
// === Client Injection ===
/**
* Sets the client for the hand store.
* Called by the coordinator during initialization.
* Detects whether the client is a KernelClient (Tauri) or GatewayClient (browser).
*/
export function setHandStoreClient(client: unknown): void {
const handClient = createHandClientFromGateway(client as GatewayClient);
let handClient: HandClient;
// Check if it's a KernelClient (has listHands method that returns typed objects)
if (client && typeof client === 'object' && 'listHands' in client) {
handClient = createHandClientFromKernel(client as KernelClient);
} else if (client && typeof client === 'object') {
// It's a GatewayClient
handClient = createHandClientFromGateway(client as GatewayClient);
} else {
// Fallback: return a stub client that gracefully handles all calls
handClient = {
listHands: async () => null,
getHand: async () => null,
listHandRuns: async () => null,
triggerHand: async () => null,
approveHand: async () => ({ status: 'error' }),
cancelHand: async () => ({ status: 'error' }),
listTriggers: async () => ({ triggers: [] }),
getTrigger: async () => null,
createTrigger: async () => null,
updateTrigger: async () => ({ id: '' }),
deleteTrigger: async () => ({ status: 'error' }),
listApprovals: async () => ({ approvals: [] }),
respondToApproval: async () => ({ status: 'error' }),
};
}
useHandStore.getState().setHandStoreClient(handClient);
}

View File

@@ -1,5 +1,7 @@
import { create } from 'zustand';
import { invoke } from '@tauri-apps/api/core';
import type { GatewayClient } from '../lib/gateway-client';
import type { KernelClient } from '../lib/kernel-client';
// === Core Types (previously imported from gatewayStore) ===
@@ -327,11 +329,168 @@ function createWorkflowClientFromGateway(client: GatewayClient): WorkflowClient
};
}
// === Pipeline types (from Tauri backend) ===
interface PipelineInfo {
id: string;
displayName: string;
description: string;
category: string;
industry: string;
tags: string[];
icon: string;
version: string;
author: string;
inputs: Array<{
name: string;
inputType: string;
required: boolean;
label: string;
placeholder?: string;
default?: unknown;
options: string[];
}>;
}
interface RunPipelineResponse {
runId: string;
pipelineId: string;
status: string;
}
interface PipelineRunResponse {
runId: string;
pipelineId: string;
status: string;
currentStep?: string;
percentage: number;
message: string;
outputs?: unknown;
error?: string;
startedAt: string;
endedAt?: string;
}
/**
* Helper to create a WorkflowClient adapter from a KernelClient.
* Uses direct Tauri invoke() calls to pipeline_commands since KernelClient
* does not have workflow methods (workflows in Tauri mode are pipelines).
*/
function createWorkflowClientFromKernel(_client: KernelClient): WorkflowClient {
return {
listWorkflows: async () => {
try {
const pipelines = await invoke<PipelineInfo[]>('pipeline_list', {});
if (!pipelines) return null;
return {
workflows: pipelines.map((p) => ({
id: p.id,
name: p.displayName || p.id,
steps: p.inputs.length,
description: p.description,
createdAt: undefined,
})),
};
} catch {
return null;
}
},
getWorkflow: async (id: string) => {
try {
const pipeline = await invoke<PipelineInfo>('pipeline_get', { pipelineId: id });
return {
id: pipeline.id,
name: pipeline.displayName || pipeline.id,
description: pipeline.description,
steps: pipeline.inputs.map((input) => ({
handName: input.inputType,
name: input.label,
params: input.default ? { default: input.default } : undefined,
})),
createdAt: undefined,
} satisfies WorkflowDetail;
} catch {
return null;
}
},
createWorkflow: async () => {
throw new Error('Workflow creation not supported in KernelClient mode. Pipelines are file-based YAML definitions.');
},
updateWorkflow: async () => {
throw new Error('Workflow update not supported in KernelClient mode. Pipelines are file-based YAML definitions.');
},
deleteWorkflow: async () => {
throw new Error('Workflow deletion not supported in KernelClient mode. Pipelines are file-based YAML definitions.');
},
executeWorkflow: async (id: string, input?: Record<string, unknown>) => {
try {
const result = await invoke<RunPipelineResponse>('pipeline_run', {
request: { pipelineId: id, inputs: input || {} },
});
return { runId: result.runId, status: result.status };
} catch {
return null;
}
},
cancelWorkflow: async (_workflowId: string, runId: string) => {
try {
await invoke('pipeline_cancel', { runId });
return { status: 'cancelled' };
} catch {
return { status: 'error' };
}
},
listWorkflowRuns: async (workflowId: string) => {
try {
const runs = await invoke<PipelineRunResponse[]>('pipeline_runs', {});
// Filter runs by pipeline ID and map to RawWorkflowRun shape
const filteredRuns: RawWorkflowRun[] = runs
.filter((r) => r.pipelineId === workflowId)
.map((r) => ({
run_id: r.runId,
workflow_id: r.pipelineId,
status: r.status,
started_at: r.startedAt,
completed_at: r.endedAt,
current_step: r.currentStep ? Math.round(r.percentage) : undefined,
error: r.error,
result: r.outputs,
}));
return { runs: filteredRuns };
} catch {
return { runs: [] };
}
},
};
}
/**
* Sets the client for the workflow store.
* Called by the coordinator during initialization.
* Detects whether the client is a KernelClient (Tauri) or GatewayClient (browser).
*/
export function setWorkflowStoreClient(client: unknown): void {
const workflowClient = createWorkflowClientFromGateway(client as GatewayClient);
let workflowClient: WorkflowClient;
// Check if it's a KernelClient (has listHands method, which KernelClient has but GatewayClient doesn't)
if (client && typeof client === 'object' && 'listHands' in client) {
workflowClient = createWorkflowClientFromKernel(client as KernelClient);
} else if (client && typeof client === 'object') {
// It's a GatewayClient
workflowClient = createWorkflowClientFromGateway(client as GatewayClient);
} else {
// Fallback: return a stub client that gracefully handles all calls
workflowClient = {
listWorkflows: async () => null,
getWorkflow: async () => null,
createWorkflow: async () => null,
updateWorkflow: async () => null,
deleteWorkflow: async () => ({ status: 'error' }),
executeWorkflow: async () => null,
cancelWorkflow: async () => ({ status: 'error' }),
listWorkflowRuns: async () => null,
};
}
useWorkflowStore.getState().setWorkflowStoreClient(workflowClient);
}