feat(desktop): DeerFlow visual redesign + stream hang fix + intelligence client
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

DeerFlow frontend visual overhaul:
- Card-style input box (white rounded card, textarea top, actions bottom)
- Dropdown mode selector (闪速/思考/Pro/Ultra with icons+descriptions)
- Colored quick-action chips (小惊喜/写作/研究/收集/学习)
- Minimal top bar (title + token count + export)
- Warm gray color system (#faf9f6 bg, #f5f4f1 sidebar, #e8e6e1 border)
- DeerFlow-style sidebar (新对话/对话/智能体 nav)
- Reasoning block, tool call chain, task progress visualization
- Streaming text, model selector, suggestion chips components
- Resizable artifact panel with drag handle
- Virtualized message list for 100+ messages

Bug fixes:
- Stream hang: GatewayClient onclose code 1000 now calls onComplete
- WebView2 textarea border: CSS !important override for UA styles
- Gateway stream event handling (response/phase/tool_call types)

Intelligence client:
- Unified client with fallback drivers (compactor/heartbeat/identity/memory/reflection)
- Gateway API types and type conversions
This commit is contained in:
iven
2026-04-01 22:03:07 +08:00
parent e3b93ff96d
commit 73ff5e8c5e
43 changed files with 4817 additions and 905 deletions

View File

@@ -0,0 +1,61 @@
/**
* Intelligence Layer - LocalStorage Compactor Fallback
*
* Provides rule-based compaction for browser/dev environment.
*/
import type { CompactableMessage, CompactionResult, CompactionCheck, CompactionConfig } from '../intelligence-backend';
export const fallbackCompactor = {
async estimateTokens(text: string): Promise<number> {
// Simple heuristic: ~4 chars per token for English, ~1.5 for CJK
const cjkChars = (text.match(/[\u4e00-\u9fff\u3040-\u30ff]/g) ?? []).length;
const otherChars = text.length - cjkChars;
return Math.ceil(cjkChars * 1.5 + otherChars / 4);
},
async estimateMessagesTokens(messages: CompactableMessage[]): Promise<number> {
let total = 0;
for (const m of messages) {
total += await fallbackCompactor.estimateTokens(m.content);
}
return total;
},
async checkThreshold(
messages: CompactableMessage[],
config?: CompactionConfig
): Promise<CompactionCheck> {
const threshold = config?.soft_threshold_tokens ?? 15000;
const currentTokens = await fallbackCompactor.estimateMessagesTokens(messages);
return {
should_compact: currentTokens >= threshold,
current_tokens: currentTokens,
threshold,
urgency: currentTokens >= (config?.hard_threshold_tokens ?? 20000) ? 'hard' :
currentTokens >= threshold ? 'soft' : 'none',
};
},
async compact(
messages: CompactableMessage[],
_agentId: string,
_conversationId?: string,
config?: CompactionConfig
): Promise<CompactionResult> {
// Simple rule-based compaction: keep last N messages
const keepRecent = config?.keep_recent_messages ?? 10;
const retained = messages.slice(-keepRecent);
return {
compacted_messages: retained,
summary: `[Compacted ${messages.length - retained.length} earlier messages]`,
original_count: messages.length,
retained_count: retained.length,
flushed_memories: 0,
tokens_before_compaction: await fallbackCompactor.estimateMessagesTokens(messages),
tokens_after_compaction: await fallbackCompactor.estimateMessagesTokens(retained),
};
},
};

View File

@@ -0,0 +1,54 @@
/**
* Intelligence Layer - LocalStorage Heartbeat Fallback
*
* Provides no-op heartbeat for browser/dev environment.
*/
import type { HeartbeatConfig, HeartbeatResult } from '../intelligence-backend';
export const fallbackHeartbeat = {
_configs: new Map<string, HeartbeatConfig>(),
async init(agentId: string, config?: HeartbeatConfig): Promise<void> {
if (config) {
fallbackHeartbeat._configs.set(agentId, config);
}
},
async start(_agentId: string): Promise<void> {
// No-op for fallback (no background tasks in browser)
},
async stop(_agentId: string): Promise<void> {
// No-op
},
async tick(_agentId: string): Promise<HeartbeatResult> {
return {
status: 'ok',
alerts: [],
checked_items: 0,
timestamp: new Date().toISOString(),
};
},
async getConfig(agentId: string): Promise<HeartbeatConfig> {
return fallbackHeartbeat._configs.get(agentId) ?? {
enabled: false,
interval_minutes: 30,
quiet_hours_start: null,
quiet_hours_end: null,
notify_channel: 'ui',
proactivity_level: 'standard',
max_alerts_per_tick: 5,
};
},
async updateConfig(agentId: string, config: HeartbeatConfig): Promise<void> {
fallbackHeartbeat._configs.set(agentId, config);
},
async getHistory(_agentId: string, _limit?: number): Promise<HeartbeatResult[]> {
return [];
},
};

View File

@@ -0,0 +1,239 @@
/**
* Intelligence Layer - LocalStorage Identity Fallback
*
* Provides localStorage-based identity management for browser/dev environment.
*/
import { createLogger } from '../logger';
import type { IdentityFiles, IdentityChangeProposal, IdentitySnapshot } from '../intelligence-backend';
const logger = createLogger('intelligence-client');
const IDENTITY_STORAGE_KEY = 'zclaw-fallback-identities';
const PROPOSALS_STORAGE_KEY = 'zclaw-fallback-proposals';
const SNAPSHOTS_STORAGE_KEY = 'zclaw-fallback-snapshots';
function loadIdentitiesFromStorage(): Map<string, IdentityFiles> {
try {
const stored = localStorage.getItem(IDENTITY_STORAGE_KEY);
if (stored) {
const parsed = JSON.parse(stored) as Record<string, IdentityFiles>;
return new Map(Object.entries(parsed));
}
} catch (e) {
logger.warn('Failed to load identities from localStorage', { error: e });
}
return new Map();
}
function saveIdentitiesToStorage(identities: Map<string, IdentityFiles>): void {
try {
const obj = Object.fromEntries(identities);
localStorage.setItem(IDENTITY_STORAGE_KEY, JSON.stringify(obj));
} catch (e) {
logger.warn('Failed to save identities to localStorage', { error: e });
}
}
function loadProposalsFromStorage(): IdentityChangeProposal[] {
try {
const stored = localStorage.getItem(PROPOSALS_STORAGE_KEY);
if (stored) {
return JSON.parse(stored) as IdentityChangeProposal[];
}
} catch (e) {
logger.warn('Failed to load proposals from localStorage', { error: e });
}
return [];
}
function saveProposalsToStorage(proposals: IdentityChangeProposal[]): void {
try {
localStorage.setItem(PROPOSALS_STORAGE_KEY, JSON.stringify(proposals));
} catch (e) {
logger.warn('Failed to save proposals to localStorage', { error: e });
}
}
function loadSnapshotsFromStorage(): IdentitySnapshot[] {
try {
const stored = localStorage.getItem(SNAPSHOTS_STORAGE_KEY);
if (stored) {
return JSON.parse(stored) as IdentitySnapshot[];
}
} catch (e) {
logger.warn('Failed to load snapshots from localStorage', { error: e });
}
return [];
}
function saveSnapshotsToStorage(snapshots: IdentitySnapshot[]): void {
try {
localStorage.setItem(SNAPSHOTS_STORAGE_KEY, JSON.stringify(snapshots));
} catch (e) {
logger.warn('Failed to save snapshots to localStorage', { error: e });
}
}
// Module-level state initialized from localStorage
const fallbackIdentities = loadIdentitiesFromStorage();
const fallbackProposals = loadProposalsFromStorage();
let fallbackSnapshots = loadSnapshotsFromStorage();
export const fallbackIdentity = {
async get(agentId: string): Promise<IdentityFiles> {
if (!fallbackIdentities.has(agentId)) {
const defaults: IdentityFiles = {
soul: '# Agent Soul\n\nA helpful AI assistant.',
instructions: '# Instructions\n\nBe helpful and concise.',
user_profile: '# User Profile\n\nNo profile yet.',
};
fallbackIdentities.set(agentId, defaults);
saveIdentitiesToStorage(fallbackIdentities);
}
return fallbackIdentities.get(agentId)!;
},
async getFile(agentId: string, file: string): Promise<string> {
const files = await fallbackIdentity.get(agentId);
return files[file as keyof IdentityFiles] ?? '';
},
async buildPrompt(agentId: string, memoryContext?: string): Promise<string> {
const files = await fallbackIdentity.get(agentId);
let prompt = `${files.soul}\n\n## Instructions\n${files.instructions}\n\n## User Profile\n${files.user_profile}`;
if (memoryContext) {
prompt += `\n\n## Memory Context\n${memoryContext}`;
}
return prompt;
},
async updateUserProfile(agentId: string, content: string): Promise<void> {
const files = await fallbackIdentity.get(agentId);
files.user_profile = content;
fallbackIdentities.set(agentId, files);
saveIdentitiesToStorage(fallbackIdentities);
},
async appendUserProfile(agentId: string, addition: string): Promise<void> {
const files = await fallbackIdentity.get(agentId);
files.user_profile += `\n\n${addition}`;
fallbackIdentities.set(agentId, files);
saveIdentitiesToStorage(fallbackIdentities);
},
async proposeChange(
agentId: string,
file: 'soul' | 'instructions',
suggestedContent: string,
reason: string
): Promise<IdentityChangeProposal> {
const files = await fallbackIdentity.get(agentId);
const proposal: IdentityChangeProposal = {
id: `prop_${Date.now()}`,
agent_id: agentId,
file,
reason,
current_content: files[file] ?? '',
suggested_content: suggestedContent,
status: 'pending',
created_at: new Date().toISOString(),
};
fallbackProposals.push(proposal);
saveProposalsToStorage(fallbackProposals);
return proposal;
},
async approveProposal(proposalId: string): Promise<IdentityFiles> {
const proposal = fallbackProposals.find(p => p.id === proposalId);
if (!proposal) throw new Error('Proposal not found');
const files = await fallbackIdentity.get(proposal.agent_id);
// Create snapshot before applying change
const snapshot: IdentitySnapshot = {
id: `snap_${Date.now()}`,
agent_id: proposal.agent_id,
files: { ...files },
timestamp: new Date().toISOString(),
reason: `Before applying: ${proposal.reason}`,
};
fallbackSnapshots.unshift(snapshot);
// Keep only last 20 snapshots per agent
const agentSnapshots = fallbackSnapshots.filter(s => s.agent_id === proposal.agent_id);
if (agentSnapshots.length > 20) {
const toRemove = agentSnapshots.slice(20);
fallbackSnapshots = fallbackSnapshots.filter(s => !toRemove.includes(s));
}
saveSnapshotsToStorage(fallbackSnapshots);
proposal.status = 'approved';
files[proposal.file] = proposal.suggested_content;
fallbackIdentities.set(proposal.agent_id, files);
saveIdentitiesToStorage(fallbackIdentities);
saveProposalsToStorage(fallbackProposals);
return files;
},
async rejectProposal(proposalId: string): Promise<void> {
const proposal = fallbackProposals.find(p => p.id === proposalId);
if (proposal) {
proposal.status = 'rejected';
saveProposalsToStorage(fallbackProposals);
}
},
async getPendingProposals(agentId?: string): Promise<IdentityChangeProposal[]> {
return fallbackProposals.filter(p =>
p.status === 'pending' && (!agentId || p.agent_id === agentId)
);
},
async updateFile(agentId: string, file: string, content: string): Promise<void> {
const files = await fallbackIdentity.get(agentId);
if (file in files) {
// IdentityFiles has known properties, update safely
const key = file as keyof IdentityFiles;
if (key in files) {
files[key] = content;
fallbackIdentities.set(agentId, files);
saveIdentitiesToStorage(fallbackIdentities);
}
}
},
async getSnapshots(agentId: string, limit?: number): Promise<IdentitySnapshot[]> {
const agentSnapshots = fallbackSnapshots.filter(s => s.agent_id === agentId);
return agentSnapshots.slice(0, limit ?? 10);
},
async restoreSnapshot(agentId: string, snapshotId: string): Promise<void> {
const snapshot = fallbackSnapshots.find(s => s.id === snapshotId && s.agent_id === agentId);
if (!snapshot) throw new Error('Snapshot not found');
// Create a snapshot of current state before restore
const currentFiles = await fallbackIdentity.get(agentId);
const beforeRestoreSnapshot: IdentitySnapshot = {
id: `snap_${Date.now()}`,
agent_id: agentId,
files: { ...currentFiles },
timestamp: new Date().toISOString(),
reason: 'Auto-backup before restore',
};
fallbackSnapshots.unshift(beforeRestoreSnapshot);
saveSnapshotsToStorage(fallbackSnapshots);
// Restore the snapshot
fallbackIdentities.set(agentId, { ...snapshot.files });
saveIdentitiesToStorage(fallbackIdentities);
},
async listAgents(): Promise<string[]> {
return Array.from(fallbackIdentities.keys());
},
async deleteAgent(agentId: string): Promise<void> {
fallbackIdentities.delete(agentId);
},
};

View File

@@ -0,0 +1,165 @@
/**
* Intelligence Layer - LocalStorage Memory Fallback
*
* Provides localStorage-based memory operations for browser/dev environment.
*/
import { createLogger } from '../logger';
import { generateRandomString } from '../crypto-utils';
import type { MemoryEntry, MemorySearchOptions, MemoryStats, MemoryType, MemorySource } from './types';
const logger = createLogger('intelligence-client');
import type { MemoryEntryInput } from '../intelligence-backend';
const FALLBACK_STORAGE_KEY = 'zclaw-intelligence-fallback';
interface FallbackMemoryStore {
memories: MemoryEntry[];
}
function getFallbackStore(): FallbackMemoryStore {
try {
const stored = localStorage.getItem(FALLBACK_STORAGE_KEY);
if (stored) {
return JSON.parse(stored);
}
} catch (e) {
logger.debug('Failed to read fallback store from localStorage', { error: e });
}
return { memories: [] };
}
function saveFallbackStore(store: FallbackMemoryStore): void {
try {
localStorage.setItem(FALLBACK_STORAGE_KEY, JSON.stringify(store));
} catch (e) {
logger.warn('Failed to save fallback store to localStorage', { error: e });
}
}
export const fallbackMemory = {
async init(): Promise<void> {
// No-op for localStorage
},
async store(entry: MemoryEntryInput): Promise<string> {
const store = getFallbackStore();
const id = `mem_${Date.now()}_${generateRandomString(6)}`;
const now = new Date().toISOString();
const memory: MemoryEntry = {
id,
agentId: entry.agent_id,
content: entry.content,
type: entry.memory_type as MemoryType,
importance: entry.importance ?? 5,
source: (entry.source as MemorySource) ?? 'auto',
tags: entry.tags ?? [],
createdAt: now,
lastAccessedAt: now,
accessCount: 0,
conversationId: entry.conversation_id,
};
store.memories.push(memory);
saveFallbackStore(store);
return id;
},
async get(id: string): Promise<MemoryEntry | null> {
const store = getFallbackStore();
return store.memories.find(m => m.id === id) ?? null;
},
async search(options: MemorySearchOptions): Promise<MemoryEntry[]> {
const store = getFallbackStore();
let results = store.memories;
if (options.agentId) {
results = results.filter(m => m.agentId === options.agentId);
}
if (options.type) {
results = results.filter(m => m.type === options.type);
}
if (options.minImportance !== undefined) {
results = results.filter(m => m.importance >= options.minImportance!);
}
if (options.query) {
const queryLower = options.query.toLowerCase();
results = results.filter(m =>
m.content.toLowerCase().includes(queryLower) ||
m.tags.some(t => t.toLowerCase().includes(queryLower))
);
}
if (options.limit) {
results = results.slice(0, options.limit);
}
return results;
},
async delete(id: string): Promise<void> {
const store = getFallbackStore();
store.memories = store.memories.filter(m => m.id !== id);
saveFallbackStore(store);
},
async deleteAll(agentId: string): Promise<number> {
const store = getFallbackStore();
const before = store.memories.length;
store.memories = store.memories.filter(m => m.agentId !== agentId);
saveFallbackStore(store);
return before - store.memories.length;
},
async stats(): Promise<MemoryStats> {
const store = getFallbackStore();
const byType: Record<string, number> = {};
const byAgent: Record<string, number> = {};
for (const m of store.memories) {
byType[m.type] = (byType[m.type] ?? 0) + 1;
byAgent[m.agentId] = (byAgent[m.agentId] ?? 0) + 1;
}
const sorted = [...store.memories].sort((a, b) =>
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
);
// Estimate storage size from serialized data
let storageSizeBytes = 0;
try {
const serialized = JSON.stringify(store.memories);
storageSizeBytes = new Blob([serialized]).size;
} catch (e) {
logger.debug('Failed to estimate storage size', { error: e });
}
return {
totalEntries: store.memories.length,
byType,
byAgent,
oldestEntry: sorted[0]?.createdAt ?? null,
newestEntry: sorted[sorted.length - 1]?.createdAt ?? null,
storageSizeBytes,
};
},
async export(): Promise<MemoryEntry[]> {
const store = getFallbackStore();
return store.memories;
},
async import(memories: MemoryEntry[]): Promise<number> {
const store = getFallbackStore();
store.memories.push(...memories);
saveFallbackStore(store);
return memories.length;
},
async dbPath(): Promise<string> {
return 'localStorage://zclaw-intelligence-fallback';
},
};

View File

@@ -0,0 +1,167 @@
/**
* Intelligence Layer - LocalStorage Reflection Fallback
*
* Provides rule-based reflection for browser/dev environment.
*/
import type {
ReflectionResult,
ReflectionState,
ReflectionConfig,
PatternObservation,
ImprovementSuggestion,
ReflectionIdentityProposal,
MemoryEntryForAnalysis,
} from '../intelligence-backend';
export const fallbackReflection = {
_conversationCount: 0,
_lastReflection: null as string | null,
_history: [] as ReflectionResult[],
async init(_config?: ReflectionConfig): Promise<void> {
// No-op
},
async recordConversation(): Promise<void> {
fallbackReflection._conversationCount++;
},
async shouldReflect(): Promise<boolean> {
return fallbackReflection._conversationCount >= 5;
},
async reflect(agentId: string, memories: MemoryEntryForAnalysis[]): Promise<ReflectionResult> {
fallbackReflection._conversationCount = 0;
fallbackReflection._lastReflection = new Date().toISOString();
// Analyze patterns (simple rule-based implementation)
const patterns: PatternObservation[] = [];
const improvements: ImprovementSuggestion[] = [];
const identityProposals: ReflectionIdentityProposal[] = [];
// Count memory types
const typeCounts: Record<string, number> = {};
for (const m of memories) {
typeCounts[m.memory_type] = (typeCounts[m.memory_type] || 0) + 1;
}
// Pattern: Too many tasks
const taskCount = typeCounts['task'] || 0;
if (taskCount >= 5) {
const taskMemories = memories.filter(m => m.memory_type === 'task').slice(0, 3);
patterns.push({
observation: `积累了 ${taskCount} 个待办任务,可能存在任务管理不善`,
frequency: taskCount,
sentiment: 'negative',
evidence: taskMemories.map(m => m.content),
});
improvements.push({
area: '任务管理',
suggestion: '清理已完成的任务记忆,对长期未处理的任务降低重要性',
priority: 'high',
});
}
// Pattern: Strong preference accumulation
const prefCount = typeCounts['preference'] || 0;
if (prefCount >= 5) {
const prefMemories = memories.filter(m => m.memory_type === 'preference').slice(0, 3);
patterns.push({
observation: `已记录 ${prefCount} 个用户偏好,对用户习惯有较好理解`,
frequency: prefCount,
sentiment: 'positive',
evidence: prefMemories.map(m => m.content),
});
}
// Pattern: Lessons learned
const lessonCount = typeCounts['lesson'] || 0;
if (lessonCount >= 5) {
patterns.push({
observation: `积累了 ${lessonCount} 条经验教训,知识库在成长`,
frequency: lessonCount,
sentiment: 'positive',
evidence: memories.filter(m => m.memory_type === 'lesson').slice(0, 3).map(m => m.content),
});
}
// Pattern: High-access important memories
const highAccessMemories = memories.filter(m => m.access_count >= 5 && m.importance >= 7);
if (highAccessMemories.length >= 3) {
patterns.push({
observation: `${highAccessMemories.length} 条高频访问的重要记忆,核心知识正在形成`,
frequency: highAccessMemories.length,
sentiment: 'positive',
evidence: highAccessMemories.slice(0, 3).map(m => m.content),
});
}
// Pattern: Low importance memories accumulating
const lowImportanceCount = memories.filter(m => m.importance <= 3).length;
if (lowImportanceCount > 20) {
patterns.push({
observation: `${lowImportanceCount} 条低重要性记忆,建议清理`,
frequency: lowImportanceCount,
sentiment: 'neutral',
evidence: [],
});
improvements.push({
area: '记忆管理',
suggestion: '执行记忆清理移除30天以上未访问且重要性低于3的记忆',
priority: 'medium',
});
}
// Generate identity proposal if negative patterns exist
const negativePatterns = patterns.filter(p => p.sentiment === 'negative');
if (negativePatterns.length >= 2) {
const additions = negativePatterns.map(p => `- 注意: ${p.observation}`).join('\n');
identityProposals.push({
agent_id: agentId,
field: 'instructions',
current_value: '...',
proposed_value: `\n\n## 自我反思改进\n${additions}`,
reason: `基于 ${negativePatterns.length} 个负面模式观察,建议在指令中增加自我改进提醒`,
});
}
// Suggestion: User profile enrichment
if (prefCount < 3) {
improvements.push({
area: '用户理解',
suggestion: '主动在对话中了解用户偏好(沟通风格、技术栈、工作习惯),丰富用户画像',
priority: 'medium',
});
}
const result: ReflectionResult = {
patterns,
improvements,
identity_proposals: identityProposals,
new_memories: patterns.filter(p => p.frequency >= 3).length + improvements.filter(i => i.priority === 'high').length,
timestamp: new Date().toISOString(),
};
// Store in history
fallbackReflection._history.push(result);
if (fallbackReflection._history.length > 20) {
fallbackReflection._history = fallbackReflection._history.slice(-10);
}
return result;
},
async getHistory(limit?: number, _agentId?: string): Promise<ReflectionResult[]> {
const l = limit ?? 10;
return fallbackReflection._history.slice(-l).reverse();
},
async getState(): Promise<ReflectionState> {
return {
conversations_since_reflection: fallbackReflection._conversationCount,
last_reflection_time: fallbackReflection._lastReflection,
last_reflection_agent_id: null,
};
},
};

View File

@@ -0,0 +1,72 @@
/**
* Intelligence Layer - Barrel Re-export
*
* Re-exports everything from sub-modules to maintain backward compatibility.
* Existing imports like `import { intelligenceClient } from './intelligence-client'`
* continue to work unchanged because TypeScript resolves directory imports
* through this index.ts file.
*/
// Types
export type {
MemoryType,
MemorySource,
MemoryEntry,
MemorySearchOptions,
MemoryStats,
BehaviorPattern,
PatternTypeVariant,
PatternContext,
WorkflowRecommendation,
MeshConfig,
MeshAnalysisResult,
ActivityType,
EvolutionChangeType,
InsightCategory,
IdentityFileType,
ProposalStatus,
EvolutionProposal,
ProfileUpdate,
EvolutionInsight,
EvolutionResult,
PersonaEvolverConfig,
PersonaEvolverState,
} from './types';
export {
getPatternTypeString,
} from './types';
// Re-exported types from intelligence-backend
export type {
HeartbeatConfig,
HeartbeatResult,
HeartbeatAlert,
CompactableMessage,
CompactionResult,
CompactionCheck,
CompactionConfig,
PatternObservation,
ImprovementSuggestion,
ReflectionResult,
ReflectionState,
ReflectionConfig,
ReflectionIdentityProposal,
IdentityFiles,
IdentityChangeProposal,
IdentitySnapshot,
MemoryEntryForAnalysis,
} from './types';
// Type conversion utilities
export {
toFrontendMemory,
toBackendMemoryInput,
toBackendSearchOptions,
toFrontendStats,
parseTags,
} from './type-conversions';
// Unified client
export { intelligenceClient } from './unified-client';
export { intelligenceClient as default } from './unified-client';

View File

@@ -0,0 +1,101 @@
/**
* Intelligence Layer - Type Conversion Utilities
*
* Functions for converting between frontend and backend data formats.
*/
import { intelligence } from '../intelligence-backend';
import type {
MemoryEntryInput,
PersistentMemory,
MemorySearchOptions as BackendSearchOptions,
MemoryStats as BackendMemoryStats,
} from '../intelligence-backend';
import { createLogger } from '../logger';
import type { MemoryEntry, MemorySearchOptions, MemoryStats, MemoryType, MemorySource } from './types';
const logger = createLogger('intelligence-client');
// Re-import intelligence for use in conversions (already imported above but
// the `intelligence` binding is needed by unified-client.ts indirectly).
export { intelligence };
export type { MemoryEntryInput, PersistentMemory, BackendSearchOptions, BackendMemoryStats };
/**
* Convert backend PersistentMemory to frontend MemoryEntry format
*/
export function toFrontendMemory(backend: PersistentMemory): MemoryEntry {
return {
id: backend.id,
agentId: backend.agent_id,
content: backend.content,
type: backend.memory_type as MemoryType,
importance: backend.importance,
source: backend.source as MemorySource,
tags: parseTags(backend.tags),
createdAt: backend.created_at,
lastAccessedAt: backend.last_accessed_at,
accessCount: backend.access_count,
conversationId: backend.conversation_id ?? undefined,
};
}
/**
* Convert frontend MemoryEntry to backend MemoryEntryInput format
*/
export function toBackendMemoryInput(entry: Omit<MemoryEntry, 'id' | 'createdAt' | 'lastAccessedAt' | 'accessCount'>): MemoryEntryInput {
return {
agent_id: entry.agentId,
memory_type: entry.type,
content: entry.content,
importance: entry.importance,
source: entry.source,
tags: entry.tags,
conversation_id: entry.conversationId,
};
}
/**
* Convert frontend search options to backend format
*/
export function toBackendSearchOptions(options: MemorySearchOptions): BackendSearchOptions {
return {
agent_id: options.agentId,
memory_type: options.type,
tags: options.tags,
query: options.query,
limit: options.limit,
min_importance: options.minImportance,
};
}
/**
* Convert backend stats to frontend format
*/
export function toFrontendStats(backend: BackendMemoryStats): MemoryStats {
return {
totalEntries: backend.total_entries,
byType: backend.by_type,
byAgent: backend.by_agent,
oldestEntry: backend.oldest_entry,
newestEntry: backend.newest_entry,
storageSizeBytes: backend.storage_size_bytes ?? 0,
};
}
/**
* Parse tags from backend (JSON string or array)
*/
export function parseTags(tags: string | string[]): string[] {
if (Array.isArray(tags)) return tags;
if (!tags) return [];
try {
return JSON.parse(tags);
} catch (e) {
logger.debug('JSON parse failed for tags, using fallback', { error: e });
return [];
}
}

View File

@@ -0,0 +1,199 @@
/**
* Intelligence Layer - Type Definitions
*
* All frontend types, mesh types, persona evolver types,
* and re-exports from intelligence-backend.
*/
// === Re-export types from intelligence-backend ===
export type {
HeartbeatConfig,
HeartbeatResult,
HeartbeatAlert,
CompactableMessage,
CompactionResult,
CompactionCheck,
CompactionConfig,
PatternObservation,
ImprovementSuggestion,
ReflectionResult,
ReflectionState,
ReflectionConfig,
ReflectionIdentityProposal,
IdentityFiles,
IdentityChangeProposal,
IdentitySnapshot,
MemoryEntryForAnalysis,
} from '../intelligence-backend';
// === Frontend Types (for backward compatibility) ===
export type MemoryType = 'fact' | 'preference' | 'lesson' | 'context' | 'task';
export type MemorySource = 'auto' | 'user' | 'reflection' | 'llm-reflection';
export interface MemoryEntry {
id: string;
agentId: string;
content: string;
type: MemoryType;
importance: number;
source: MemorySource;
tags: string[];
createdAt: string;
lastAccessedAt: string;
accessCount: number;
conversationId?: string;
}
export interface MemorySearchOptions {
agentId?: string;
type?: MemoryType;
types?: MemoryType[];
tags?: string[];
query?: string;
limit?: number;
minImportance?: number;
}
export interface MemoryStats {
totalEntries: number;
byType: Record<string, number>;
byAgent: Record<string, number>;
oldestEntry: string | null;
newestEntry: string | null;
storageSizeBytes: number;
}
// === Mesh Types ===
export type PatternTypeVariant =
| { type: 'SkillCombination'; skill_ids: string[] }
| { type: 'TemporalTrigger'; hand_id: string; time_pattern: string }
| { type: 'TaskPipelineMapping'; task_type: string; pipeline_id: string }
| { type: 'InputPattern'; keywords: string[]; intent: string };
export interface BehaviorPattern {
id: string;
pattern_type: PatternTypeVariant;
frequency: number;
last_occurrence: string;
first_occurrence: string;
confidence: number;
context: PatternContext;
}
export function getPatternTypeString(patternType: PatternTypeVariant): string {
if (typeof patternType === 'string') {
return patternType;
}
return patternType.type;
}
export interface PatternContext {
skill_ids?: string[];
recent_topics?: string[];
intent?: string;
time_of_day?: number;
day_of_week?: number;
}
export interface WorkflowRecommendation {
id: string;
pipeline_id: string;
confidence: number;
reason: string;
suggested_inputs: Record<string, unknown>;
patterns_matched: string[];
timestamp: string;
}
export interface MeshConfig {
enabled: boolean;
min_confidence: number;
max_recommendations: number;
analysis_window_hours: number;
}
export interface MeshAnalysisResult {
recommendations: WorkflowRecommendation[];
patterns_detected: number;
timestamp: string;
}
export type ActivityType =
| { type: 'skill_used'; skill_ids: string[] }
| { type: 'pipeline_executed'; task_type: string; pipeline_id: string }
| { type: 'input_received'; keywords: string[]; intent: string };
// === Persona Evolver Types ===
export type EvolutionChangeType =
| 'instruction_addition'
| 'instruction_refinement'
| 'trait_addition'
| 'style_adjustment'
| 'domain_expansion';
export type InsightCategory =
| 'communication_style'
| 'technical_expertise'
| 'task_efficiency'
| 'user_preference'
| 'knowledge_gap';
export type IdentityFileType = 'soul' | 'instructions';
export type ProposalStatus = 'pending' | 'approved' | 'rejected';
export interface EvolutionProposal {
id: string;
agent_id: string;
target_file: IdentityFileType;
change_type: EvolutionChangeType;
reason: string;
current_content: string;
proposed_content: string;
confidence: number;
evidence: string[];
status: ProposalStatus;
created_at: string;
}
export interface ProfileUpdate {
section: string;
previous: string;
updated: string;
source: string;
}
export interface EvolutionInsight {
category: InsightCategory;
observation: string;
recommendation: string;
confidence: number;
}
export interface EvolutionResult {
agent_id: string;
timestamp: string;
profile_updates: ProfileUpdate[];
proposals: EvolutionProposal[];
insights: EvolutionInsight[];
evolved: boolean;
}
export interface PersonaEvolverConfig {
auto_profile_update: boolean;
min_preferences_for_update: number;
min_conversations_for_evolution: number;
enable_instruction_refinement: boolean;
enable_soul_evolution: boolean;
max_proposals_per_cycle: number;
}
export interface PersonaEvolverState {
last_evolution: string | null;
total_evolutions: number;
pending_proposals: number;
profile_enrichment_score: number;
}

View File

@@ -0,0 +1,561 @@
/**
* Intelligence Layer Unified Client
*
* Provides a unified API for intelligence operations that:
* - Uses Rust backend (via Tauri commands) when running in Tauri environment
* - Falls back to localStorage-based implementation in browser/dev environment
*
* Degradation strategy:
* - In Tauri mode: if a Tauri invoke fails, the error is logged and re-thrown.
* The caller is responsible for handling the error. We do NOT silently fall
* back to localStorage, because that would give users degraded functionality
* (localStorage instead of SQLite, rule-based instead of LLM-based, no-op
* instead of real execution) without any indication that something is wrong.
* - In browser/dev mode: localStorage fallback is the intended behavior for
* development and testing without a Tauri backend.
*
* This replaces direct usage of:
* - agent-memory.ts
* - heartbeat-engine.ts
* - context-compactor.ts
* - reflection-engine.ts
* - agent-identity.ts
*
* Usage:
* ```typescript
* import { intelligenceClient, toFrontendMemory, toBackendMemoryInput } from './intelligence-client';
*
* // Store memory
* const id = await intelligenceClient.memory.store({
* agent_id: 'agent-1',
* memory_type: 'fact',
* content: 'User prefers concise responses',
* importance: 7,
* });
*
* // Search memories
* const memories = await intelligenceClient.memory.search({
* agent_id: 'agent-1',
* query: 'user preference',
* limit: 10,
* });
*
* // Convert to frontend format if needed
* const frontendMemories = memories.map(toFrontendMemory);
* ```
*/
import { invoke } from '@tauri-apps/api/core';
import { isTauriRuntime } from '../tauri-gateway';
import { intelligence } from './type-conversions';
import type { PersistentMemory } from '../intelligence-backend';
import type {
HeartbeatConfig,
HeartbeatResult,
CompactableMessage,
CompactionResult,
CompactionCheck,
CompactionConfig,
ReflectionConfig,
ReflectionResult,
ReflectionState,
MemoryEntryForAnalysis,
IdentityFiles,
IdentityChangeProposal,
IdentitySnapshot,
} from '../intelligence-backend';
import type { MemoryEntry, MemorySearchOptions, MemoryStats } from './types';
import { toFrontendMemory, toBackendSearchOptions, toFrontendStats } from './type-conversions';
import { fallbackMemory } from './fallback-memory';
import { fallbackCompactor } from './fallback-compactor';
import { fallbackReflection } from './fallback-reflection';
import { fallbackIdentity } from './fallback-identity';
import { fallbackHeartbeat } from './fallback-heartbeat';
/**
* Helper: wrap a Tauri invoke call so that failures are logged and re-thrown
* instead of silently falling back to localStorage implementations.
*/
function tauriInvoke<T>(label: string, fn: () => Promise<T>): Promise<T> {
return fn().catch((e: unknown) => {
console.warn(`[IntelligenceClient] Tauri invoke failed (${label}):`, e);
throw e;
});
}
/**
* Unified intelligence client that automatically selects backend or fallback.
*
* - In Tauri mode: calls Rust backend via invoke(). On failure, logs a warning
* and re-throws -- does NOT fall back to localStorage.
* - In browser/dev mode: uses localStorage-based fallback implementations.
*/
export const intelligenceClient = {
memory: {
init: async (): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('memory.init', () => intelligence.memory.init());
} else {
await fallbackMemory.init();
}
},
store: async (entry: import('../intelligence-backend').MemoryEntryInput): Promise<string> => {
if (isTauriRuntime()) {
return tauriInvoke('memory.store', () => intelligence.memory.store(entry));
}
return fallbackMemory.store(entry);
},
get: async (id: string): Promise<MemoryEntry | null> => {
if (isTauriRuntime()) {
const result = await tauriInvoke('memory.get', () => intelligence.memory.get(id));
return result ? toFrontendMemory(result) : null;
}
return fallbackMemory.get(id);
},
search: async (options: MemorySearchOptions): Promise<MemoryEntry[]> => {
if (isTauriRuntime()) {
const results = await tauriInvoke('memory.search', () =>
intelligence.memory.search(toBackendSearchOptions(options))
);
return results.map(toFrontendMemory);
}
return fallbackMemory.search(options);
},
delete: async (id: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('memory.delete', () => intelligence.memory.delete(id));
} else {
await fallbackMemory.delete(id);
}
},
deleteAll: async (agentId: string): Promise<number> => {
if (isTauriRuntime()) {
return tauriInvoke('memory.deleteAll', () => intelligence.memory.deleteAll(agentId));
}
return fallbackMemory.deleteAll(agentId);
},
stats: async (): Promise<MemoryStats> => {
if (isTauriRuntime()) {
const stats = await tauriInvoke('memory.stats', () => intelligence.memory.stats());
return toFrontendStats(stats);
}
return fallbackMemory.stats();
},
export: async (): Promise<MemoryEntry[]> => {
if (isTauriRuntime()) {
const results = await tauriInvoke('memory.export', () => intelligence.memory.export());
return results.map(toFrontendMemory);
}
return fallbackMemory.export();
},
import: async (memories: MemoryEntry[]): Promise<number> => {
if (isTauriRuntime()) {
const backendMemories = memories.map(m => ({
...m,
agent_id: m.agentId,
memory_type: m.type,
last_accessed_at: m.lastAccessedAt,
created_at: m.createdAt,
access_count: m.accessCount,
conversation_id: m.conversationId ?? null,
tags: JSON.stringify(m.tags),
embedding: null,
}));
return tauriInvoke('memory.import', () =>
intelligence.memory.import(backendMemories as PersistentMemory[])
);
}
return fallbackMemory.import(memories);
},
dbPath: async (): Promise<string> => {
if (isTauriRuntime()) {
return tauriInvoke('memory.dbPath', () => intelligence.memory.dbPath());
}
return fallbackMemory.dbPath();
},
buildContext: async (
agentId: string,
query: string,
maxTokens?: number,
): Promise<{ systemPromptAddition: string; totalTokens: number; memoriesUsed: number }> => {
if (isTauriRuntime()) {
return tauriInvoke('memory.buildContext', () =>
intelligence.memory.buildContext(agentId, query, maxTokens ?? null)
);
}
// Browser/dev fallback: use basic search
const memories = await fallbackMemory.search({
agentId,
query,
limit: 8,
minImportance: 3,
});
const addition = memories.length > 0
? `## 相关记忆\n${memories.map(m => `- [${m.type}] ${m.content}`).join('\n')}`
: '';
return { systemPromptAddition: addition, totalTokens: 0, memoriesUsed: memories.length };
},
},
heartbeat: {
init: async (agentId: string, config?: HeartbeatConfig): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.init', () => intelligence.heartbeat.init(agentId, config));
} else {
await fallbackHeartbeat.init(agentId, config);
}
},
start: async (agentId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.start', () => intelligence.heartbeat.start(agentId));
} else {
await fallbackHeartbeat.start(agentId);
}
},
stop: async (agentId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.stop', () => intelligence.heartbeat.stop(agentId));
} else {
await fallbackHeartbeat.stop(agentId);
}
},
tick: async (agentId: string): Promise<HeartbeatResult> => {
if (isTauriRuntime()) {
return tauriInvoke('heartbeat.tick', () => intelligence.heartbeat.tick(agentId));
}
return fallbackHeartbeat.tick(agentId);
},
getConfig: async (agentId: string): Promise<HeartbeatConfig> => {
if (isTauriRuntime()) {
return tauriInvoke('heartbeat.getConfig', () => intelligence.heartbeat.getConfig(agentId));
}
return fallbackHeartbeat.getConfig(agentId);
},
updateConfig: async (agentId: string, config: HeartbeatConfig): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.updateConfig', () =>
intelligence.heartbeat.updateConfig(agentId, config)
);
} else {
await fallbackHeartbeat.updateConfig(agentId, config);
}
},
getHistory: async (agentId: string, limit?: number): Promise<HeartbeatResult[]> => {
if (isTauriRuntime()) {
return tauriInvoke('heartbeat.getHistory', () =>
intelligence.heartbeat.getHistory(agentId, limit)
);
}
return fallbackHeartbeat.getHistory(agentId, limit);
},
updateMemoryStats: async (
agentId: string,
taskCount: number,
totalEntries: number,
storageSizeBytes: number
): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.updateMemoryStats', () =>
invoke('heartbeat_update_memory_stats', {
agent_id: agentId,
task_count: taskCount,
total_entries: totalEntries,
storage_size_bytes: storageSizeBytes,
})
);
} else {
// Browser/dev fallback only
const cache = {
taskCount,
totalEntries,
storageSizeBytes,
lastUpdated: new Date().toISOString(),
};
localStorage.setItem(`zclaw-memory-stats-${agentId}`, JSON.stringify(cache));
}
},
recordCorrection: async (agentId: string, correctionType: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.recordCorrection', () =>
invoke('heartbeat_record_correction', {
agent_id: agentId,
correction_type: correctionType,
})
);
} else {
// Browser/dev fallback only
const key = `zclaw-corrections-${agentId}`;
const stored = localStorage.getItem(key);
const counters = stored ? JSON.parse(stored) : {};
counters[correctionType] = (counters[correctionType] || 0) + 1;
localStorage.setItem(key, JSON.stringify(counters));
}
},
recordInteraction: async (agentId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('heartbeat.recordInteraction', () =>
invoke('heartbeat_record_interaction', {
agent_id: agentId,
})
);
} else {
// Browser/dev fallback only
localStorage.setItem(`zclaw-last-interaction-${agentId}`, new Date().toISOString());
}
},
},
compactor: {
estimateTokens: async (text: string): Promise<number> => {
if (isTauriRuntime()) {
return tauriInvoke('compactor.estimateTokens', () =>
intelligence.compactor.estimateTokens(text)
);
}
return fallbackCompactor.estimateTokens(text);
},
estimateMessagesTokens: async (messages: CompactableMessage[]): Promise<number> => {
if (isTauriRuntime()) {
return tauriInvoke('compactor.estimateMessagesTokens', () =>
intelligence.compactor.estimateMessagesTokens(messages)
);
}
return fallbackCompactor.estimateMessagesTokens(messages);
},
checkThreshold: async (
messages: CompactableMessage[],
config?: CompactionConfig
): Promise<CompactionCheck> => {
if (isTauriRuntime()) {
return tauriInvoke('compactor.checkThreshold', () =>
intelligence.compactor.checkThreshold(messages, config)
);
}
return fallbackCompactor.checkThreshold(messages, config);
},
compact: async (
messages: CompactableMessage[],
agentId: string,
conversationId?: string,
config?: CompactionConfig
): Promise<CompactionResult> => {
if (isTauriRuntime()) {
return tauriInvoke('compactor.compact', () =>
intelligence.compactor.compact(messages, agentId, conversationId, config)
);
}
return fallbackCompactor.compact(messages, agentId, conversationId, config);
},
},
reflection: {
init: async (config?: ReflectionConfig): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('reflection.init', () => intelligence.reflection.init(config));
} else {
await fallbackReflection.init(config);
}
},
recordConversation: async (): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('reflection.recordConversation', () =>
intelligence.reflection.recordConversation()
);
} else {
await fallbackReflection.recordConversation();
}
},
shouldReflect: async (): Promise<boolean> => {
if (isTauriRuntime()) {
return tauriInvoke('reflection.shouldReflect', () =>
intelligence.reflection.shouldReflect()
);
}
return fallbackReflection.shouldReflect();
},
reflect: async (agentId: string, memories: MemoryEntryForAnalysis[]): Promise<ReflectionResult> => {
if (isTauriRuntime()) {
return tauriInvoke('reflection.reflect', () =>
intelligence.reflection.reflect(agentId, memories)
);
}
return fallbackReflection.reflect(agentId, memories);
},
getHistory: async (limit?: number, agentId?: string): Promise<ReflectionResult[]> => {
if (isTauriRuntime()) {
return tauriInvoke('reflection.getHistory', () =>
intelligence.reflection.getHistory(limit, agentId)
);
}
return fallbackReflection.getHistory(limit, agentId);
},
getState: async (): Promise<ReflectionState> => {
if (isTauriRuntime()) {
return tauriInvoke('reflection.getState', () => intelligence.reflection.getState());
}
return fallbackReflection.getState();
},
},
identity: {
get: async (agentId: string): Promise<IdentityFiles> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.get', () => intelligence.identity.get(agentId));
}
return fallbackIdentity.get(agentId);
},
getFile: async (agentId: string, file: string): Promise<string> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.getFile', () => intelligence.identity.getFile(agentId, file));
}
return fallbackIdentity.getFile(agentId, file);
},
buildPrompt: async (agentId: string, memoryContext?: string): Promise<string> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.buildPrompt', () =>
intelligence.identity.buildPrompt(agentId, memoryContext)
);
}
return fallbackIdentity.buildPrompt(agentId, memoryContext);
},
updateUserProfile: async (agentId: string, content: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.updateUserProfile', () =>
intelligence.identity.updateUserProfile(agentId, content)
);
} else {
await fallbackIdentity.updateUserProfile(agentId, content);
}
},
appendUserProfile: async (agentId: string, addition: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.appendUserProfile', () =>
intelligence.identity.appendUserProfile(agentId, addition)
);
} else {
await fallbackIdentity.appendUserProfile(agentId, addition);
}
},
proposeChange: async (
agentId: string,
file: 'soul' | 'instructions',
suggestedContent: string,
reason: string
): Promise<IdentityChangeProposal> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.proposeChange', () =>
intelligence.identity.proposeChange(agentId, file, suggestedContent, reason)
);
}
return fallbackIdentity.proposeChange(agentId, file, suggestedContent, reason);
},
approveProposal: async (proposalId: string): Promise<IdentityFiles> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.approveProposal', () =>
intelligence.identity.approveProposal(proposalId)
);
}
return fallbackIdentity.approveProposal(proposalId);
},
rejectProposal: async (proposalId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.rejectProposal', () =>
intelligence.identity.rejectProposal(proposalId)
);
} else {
await fallbackIdentity.rejectProposal(proposalId);
}
},
getPendingProposals: async (agentId?: string): Promise<IdentityChangeProposal[]> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.getPendingProposals', () =>
intelligence.identity.getPendingProposals(agentId)
);
}
return fallbackIdentity.getPendingProposals(agentId);
},
updateFile: async (agentId: string, file: string, content: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.updateFile', () =>
intelligence.identity.updateFile(agentId, file, content)
);
} else {
await fallbackIdentity.updateFile(agentId, file, content);
}
},
getSnapshots: async (agentId: string, limit?: number): Promise<IdentitySnapshot[]> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.getSnapshots', () =>
intelligence.identity.getSnapshots(agentId, limit)
);
}
return fallbackIdentity.getSnapshots(agentId, limit);
},
restoreSnapshot: async (agentId: string, snapshotId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.restoreSnapshot', () =>
intelligence.identity.restoreSnapshot(agentId, snapshotId)
);
} else {
await fallbackIdentity.restoreSnapshot(agentId, snapshotId);
}
},
listAgents: async (): Promise<string[]> => {
if (isTauriRuntime()) {
return tauriInvoke('identity.listAgents', () => intelligence.identity.listAgents());
}
return fallbackIdentity.listAgents();
},
deleteAgent: async (agentId: string): Promise<void> => {
if (isTauriRuntime()) {
await tauriInvoke('identity.deleteAgent', () => intelligence.identity.deleteAgent(agentId));
} else {
await fallbackIdentity.deleteAgent(agentId);
}
},
},
};
export default intelligenceClient;