feat(intelligence): complete migration to Rust backend
- Unify all intelligence modules to use intelligenceClient - Delete legacy TS implementations (agent-memory, reflection-engine, heartbeat-engine, context-compactor, agent-identity, memory-index) - Update all consumers to use snake_case backend types - Remove deprecated llm-integration.test.ts This eliminates code duplication between frontend and backend, resolves localStorage limitations, and enables persistent intelligence features. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
955
desktop/src/lib/intelligence-client.ts
Normal file
955
desktop/src/lib/intelligence-client.ts
Normal file
@@ -0,0 +1,955 @@
|
||||
/**
|
||||
* Intelligence Layer Unified Client
|
||||
*
|
||||
* Provides a unified API for intelligence operations that:
|
||||
* - Uses Rust backend (via Tauri commands) when running in Tauri environment
|
||||
* - Falls back to localStorage-based implementation in browser environment
|
||||
*
|
||||
* This replaces direct usage of:
|
||||
* - agent-memory.ts
|
||||
* - heartbeat-engine.ts
|
||||
* - context-compactor.ts
|
||||
* - reflection-engine.ts
|
||||
* - agent-identity.ts
|
||||
*
|
||||
* Usage:
|
||||
* ```typescript
|
||||
* import { intelligenceClient, toFrontendMemory, toBackendMemoryInput } from './intelligence-client';
|
||||
*
|
||||
* // Store memory
|
||||
* const id = await intelligenceClient.memory.store({
|
||||
* agent_id: 'agent-1',
|
||||
* memory_type: 'fact',
|
||||
* content: 'User prefers concise responses',
|
||||
* importance: 7,
|
||||
* });
|
||||
*
|
||||
* // Search memories
|
||||
* const memories = await intelligenceClient.memory.search({
|
||||
* agent_id: 'agent-1',
|
||||
* query: 'user preference',
|
||||
* limit: 10,
|
||||
* });
|
||||
*
|
||||
* // Convert to frontend format if needed
|
||||
* const frontendMemories = memories.map(toFrontendMemory);
|
||||
* ```
|
||||
*/
|
||||
|
||||
import {
|
||||
intelligence,
|
||||
type MemoryEntryInput,
|
||||
type PersistentMemory,
|
||||
type MemorySearchOptions as BackendSearchOptions,
|
||||
type MemoryStats as BackendMemoryStats,
|
||||
type HeartbeatConfig,
|
||||
type HeartbeatResult,
|
||||
type CompactableMessage,
|
||||
type CompactionResult,
|
||||
type CompactionCheck,
|
||||
type CompactionConfig,
|
||||
type MemoryEntryForAnalysis,
|
||||
type ReflectionResult,
|
||||
type ReflectionState,
|
||||
type ReflectionConfig,
|
||||
type IdentityFiles,
|
||||
type IdentityChangeProposal,
|
||||
type IdentitySnapshot,
|
||||
} from './intelligence-backend';
|
||||
|
||||
// === Environment Detection ===
|
||||
|
||||
/**
|
||||
* Check if running in Tauri environment
|
||||
*/
|
||||
export function isTauriEnv(): boolean {
|
||||
return typeof window !== 'undefined' && '__TAURI__' in window;
|
||||
}
|
||||
|
||||
// === Frontend Types (for backward compatibility) ===
|
||||
|
||||
export type MemoryType = 'fact' | 'preference' | 'lesson' | 'context' | 'task';
|
||||
export type MemorySource = 'auto' | 'user' | 'reflection' | 'llm-reflection';
|
||||
|
||||
export interface MemoryEntry {
|
||||
id: string;
|
||||
agentId: string;
|
||||
content: string;
|
||||
type: MemoryType;
|
||||
importance: number;
|
||||
source: MemorySource;
|
||||
tags: string[];
|
||||
createdAt: string;
|
||||
lastAccessedAt: string;
|
||||
accessCount: number;
|
||||
conversationId?: string;
|
||||
}
|
||||
|
||||
export interface MemorySearchOptions {
|
||||
agentId?: string;
|
||||
type?: MemoryType;
|
||||
types?: MemoryType[];
|
||||
tags?: string[];
|
||||
query?: string;
|
||||
limit?: number;
|
||||
minImportance?: number;
|
||||
}
|
||||
|
||||
export interface MemoryStats {
|
||||
totalEntries: number;
|
||||
byType: Record<string, number>;
|
||||
byAgent: Record<string, number>;
|
||||
oldestEntry: string | null;
|
||||
newestEntry: string | null;
|
||||
}
|
||||
|
||||
// === Re-export types from intelligence-backend ===
|
||||
|
||||
export type {
|
||||
HeartbeatConfig,
|
||||
HeartbeatResult,
|
||||
HeartbeatAlert,
|
||||
CompactableMessage,
|
||||
CompactionResult,
|
||||
CompactionCheck,
|
||||
CompactionConfig,
|
||||
PatternObservation,
|
||||
ImprovementSuggestion,
|
||||
ReflectionResult,
|
||||
ReflectionState,
|
||||
ReflectionConfig,
|
||||
IdentityFiles,
|
||||
IdentityChangeProposal,
|
||||
IdentitySnapshot,
|
||||
} from './intelligence-backend';
|
||||
|
||||
// === Type Conversion Utilities ===
|
||||
|
||||
/**
|
||||
* Convert backend PersistentMemory to frontend MemoryEntry format
|
||||
*/
|
||||
export function toFrontendMemory(backend: PersistentMemory): MemoryEntry {
|
||||
return {
|
||||
id: backend.id,
|
||||
agentId: backend.agent_id,
|
||||
content: backend.content,
|
||||
type: backend.memory_type as MemoryType,
|
||||
importance: backend.importance,
|
||||
source: backend.source as MemorySource,
|
||||
tags: parseTags(backend.tags),
|
||||
createdAt: backend.created_at,
|
||||
lastAccessedAt: backend.last_accessed_at,
|
||||
accessCount: backend.access_count,
|
||||
conversationId: backend.conversation_id ?? undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert frontend MemoryEntry to backend MemoryEntryInput format
|
||||
*/
|
||||
export function toBackendMemoryInput(entry: Omit<MemoryEntry, 'id' | 'createdAt' | 'lastAccessedAt' | 'accessCount'>): MemoryEntryInput {
|
||||
return {
|
||||
agent_id: entry.agentId,
|
||||
memory_type: entry.type,
|
||||
content: entry.content,
|
||||
importance: entry.importance,
|
||||
source: entry.source,
|
||||
tags: entry.tags,
|
||||
conversation_id: entry.conversationId,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert frontend search options to backend format
|
||||
*/
|
||||
export function toBackendSearchOptions(options: MemorySearchOptions): BackendSearchOptions {
|
||||
return {
|
||||
agent_id: options.agentId,
|
||||
memory_type: options.type,
|
||||
tags: options.tags,
|
||||
query: options.query,
|
||||
limit: options.limit,
|
||||
min_importance: options.minImportance,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert backend stats to frontend format
|
||||
*/
|
||||
export function toFrontendStats(backend: BackendMemoryStats): MemoryStats {
|
||||
return {
|
||||
totalEntries: backend.total_memories,
|
||||
byType: backend.by_type,
|
||||
byAgent: backend.by_agent,
|
||||
oldestEntry: backend.oldest_memory,
|
||||
newestEntry: backend.newest_memory,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse tags from backend (JSON string or array)
|
||||
*/
|
||||
function parseTags(tags: string | string[]): string[] {
|
||||
if (Array.isArray(tags)) return tags;
|
||||
if (!tags) return [];
|
||||
try {
|
||||
return JSON.parse(tags);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// === LocalStorage Fallback Implementation ===
|
||||
|
||||
const FALLBACK_STORAGE_KEY = 'zclaw-intelligence-fallback';
|
||||
|
||||
interface FallbackMemoryStore {
|
||||
memories: MemoryEntry[];
|
||||
}
|
||||
|
||||
function getFallbackStore(): FallbackMemoryStore {
|
||||
try {
|
||||
const stored = localStorage.getItem(FALLBACK_STORAGE_KEY);
|
||||
if (stored) {
|
||||
return JSON.parse(stored);
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
return { memories: [] };
|
||||
}
|
||||
|
||||
function saveFallbackStore(store: FallbackMemoryStore): void {
|
||||
try {
|
||||
localStorage.setItem(FALLBACK_STORAGE_KEY, JSON.stringify(store));
|
||||
} catch {
|
||||
console.warn('[IntelligenceClient] Failed to save to localStorage');
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback Memory API
|
||||
const fallbackMemory = {
|
||||
async init(): Promise<void> {
|
||||
// No-op for localStorage
|
||||
},
|
||||
|
||||
async store(entry: MemoryEntryInput): Promise<string> {
|
||||
const store = getFallbackStore();
|
||||
const id = `mem_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
|
||||
const now = new Date().toISOString();
|
||||
|
||||
const memory: MemoryEntry = {
|
||||
id,
|
||||
agentId: entry.agent_id,
|
||||
content: entry.content,
|
||||
type: entry.memory_type as MemoryType,
|
||||
importance: entry.importance ?? 5,
|
||||
source: (entry.source as MemorySource) ?? 'auto',
|
||||
tags: entry.tags ?? [],
|
||||
createdAt: now,
|
||||
lastAccessedAt: now,
|
||||
accessCount: 0,
|
||||
conversationId: entry.conversation_id,
|
||||
};
|
||||
|
||||
store.memories.push(memory);
|
||||
saveFallbackStore(store);
|
||||
return id;
|
||||
},
|
||||
|
||||
async get(id: string): Promise<MemoryEntry | null> {
|
||||
const store = getFallbackStore();
|
||||
return store.memories.find(m => m.id === id) ?? null;
|
||||
},
|
||||
|
||||
async search(options: MemorySearchOptions): Promise<MemoryEntry[]> {
|
||||
const store = getFallbackStore();
|
||||
let results = store.memories;
|
||||
|
||||
if (options.agentId) {
|
||||
results = results.filter(m => m.agentId === options.agentId);
|
||||
}
|
||||
if (options.type) {
|
||||
results = results.filter(m => m.type === options.type);
|
||||
}
|
||||
if (options.minImportance !== undefined) {
|
||||
results = results.filter(m => m.importance >= options.minImportance!);
|
||||
}
|
||||
if (options.query) {
|
||||
const queryLower = options.query.toLowerCase();
|
||||
results = results.filter(m =>
|
||||
m.content.toLowerCase().includes(queryLower) ||
|
||||
m.tags.some(t => t.toLowerCase().includes(queryLower))
|
||||
);
|
||||
}
|
||||
if (options.limit) {
|
||||
results = results.slice(0, options.limit);
|
||||
}
|
||||
|
||||
return results;
|
||||
},
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
const store = getFallbackStore();
|
||||
store.memories = store.memories.filter(m => m.id !== id);
|
||||
saveFallbackStore(store);
|
||||
},
|
||||
|
||||
async deleteAll(agentId: string): Promise<number> {
|
||||
const store = getFallbackStore();
|
||||
const before = store.memories.length;
|
||||
store.memories = store.memories.filter(m => m.agentId !== agentId);
|
||||
saveFallbackStore(store);
|
||||
return before - store.memories.length;
|
||||
},
|
||||
|
||||
async stats(): Promise<MemoryStats> {
|
||||
const store = getFallbackStore();
|
||||
const byType: Record<string, number> = {};
|
||||
const byAgent: Record<string, number> = {};
|
||||
|
||||
for (const m of store.memories) {
|
||||
byType[m.type] = (byType[m.type] ?? 0) + 1;
|
||||
byAgent[m.agentId] = (byAgent[m.agentId] ?? 0) + 1;
|
||||
}
|
||||
|
||||
const sorted = [...store.memories].sort((a, b) =>
|
||||
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
|
||||
);
|
||||
|
||||
return {
|
||||
totalEntries: store.memories.length,
|
||||
byType,
|
||||
byAgent,
|
||||
oldestEntry: sorted[0]?.createdAt ?? null,
|
||||
newestEntry: sorted[sorted.length - 1]?.createdAt ?? null,
|
||||
};
|
||||
},
|
||||
|
||||
async export(): Promise<MemoryEntry[]> {
|
||||
const store = getFallbackStore();
|
||||
return store.memories;
|
||||
},
|
||||
|
||||
async import(memories: MemoryEntry[]): Promise<number> {
|
||||
const store = getFallbackStore();
|
||||
store.memories.push(...memories);
|
||||
saveFallbackStore(store);
|
||||
return memories.length;
|
||||
},
|
||||
|
||||
async dbPath(): Promise<string> {
|
||||
return 'localStorage://zclaw-intelligence-fallback';
|
||||
},
|
||||
};
|
||||
|
||||
// Fallback Compactor API
|
||||
const fallbackCompactor = {
|
||||
async estimateTokens(text: string): Promise<number> {
|
||||
// Simple heuristic: ~4 chars per token for English, ~1.5 for CJK
|
||||
const cjkChars = (text.match(/[\u4e00-\u9fff\u3040-\u30ff]/g) ?? []).length;
|
||||
const otherChars = text.length - cjkChars;
|
||||
return Math.ceil(cjkChars * 1.5 + otherChars / 4);
|
||||
},
|
||||
|
||||
async estimateMessagesTokens(messages: CompactableMessage[]): Promise<number> {
|
||||
let total = 0;
|
||||
for (const m of messages) {
|
||||
total += await fallbackCompactor.estimateTokens(m.content);
|
||||
}
|
||||
return total;
|
||||
},
|
||||
|
||||
async checkThreshold(
|
||||
messages: CompactableMessage[],
|
||||
config?: CompactionConfig
|
||||
): Promise<CompactionCheck> {
|
||||
const threshold = config?.soft_threshold_tokens ?? 15000;
|
||||
const currentTokens = await fallbackCompactor.estimateMessagesTokens(messages);
|
||||
|
||||
return {
|
||||
should_compact: currentTokens >= threshold,
|
||||
current_tokens: currentTokens,
|
||||
threshold,
|
||||
urgency: currentTokens >= (config?.hard_threshold_tokens ?? 20000) ? 'hard' :
|
||||
currentTokens >= threshold ? 'soft' : 'none',
|
||||
};
|
||||
},
|
||||
|
||||
async compact(
|
||||
messages: CompactableMessage[],
|
||||
_agentId: string,
|
||||
_conversationId?: string,
|
||||
config?: CompactionConfig
|
||||
): Promise<CompactionResult> {
|
||||
// Simple rule-based compaction: keep last N messages
|
||||
const keepRecent = config?.keep_recent_messages ?? 10;
|
||||
const retained = messages.slice(-keepRecent);
|
||||
|
||||
return {
|
||||
compacted_messages: retained,
|
||||
summary: `[Compacted ${messages.length - retained.length} earlier messages]`,
|
||||
original_count: messages.length,
|
||||
retained_count: retained.length,
|
||||
flushed_memories: 0,
|
||||
tokens_before_compaction: await fallbackCompactor.estimateMessagesTokens(messages),
|
||||
tokens_after_compaction: await fallbackCompactor.estimateMessagesTokens(retained),
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
// Fallback Reflection API
|
||||
const fallbackReflection = {
|
||||
_conversationCount: 0,
|
||||
_lastReflection: null as string | null,
|
||||
|
||||
async init(_config?: ReflectionConfig): Promise<void> {
|
||||
// No-op
|
||||
},
|
||||
|
||||
async recordConversation(): Promise<void> {
|
||||
fallbackReflection._conversationCount++;
|
||||
},
|
||||
|
||||
async shouldReflect(): Promise<boolean> {
|
||||
return fallbackReflection._conversationCount >= 5;
|
||||
},
|
||||
|
||||
async reflect(_agentId: string, _memories: MemoryEntryForAnalysis[]): Promise<ReflectionResult> {
|
||||
fallbackReflection._conversationCount = 0;
|
||||
fallbackReflection._lastReflection = new Date().toISOString();
|
||||
|
||||
return {
|
||||
patterns: [],
|
||||
improvements: [],
|
||||
identity_proposals: [],
|
||||
new_memories: 0,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
},
|
||||
|
||||
async getHistory(_limit?: number): Promise<ReflectionResult[]> {
|
||||
return [];
|
||||
},
|
||||
|
||||
async getState(): Promise<ReflectionState> {
|
||||
return {
|
||||
conversations_since_reflection: fallbackReflection._conversationCount,
|
||||
last_reflection_time: fallbackReflection._lastReflection,
|
||||
last_reflection_agent_id: null,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
// Fallback Identity API
|
||||
const fallbackIdentities = new Map<string, IdentityFiles>();
|
||||
const fallbackProposals: IdentityChangeProposal[] = [];
|
||||
|
||||
const fallbackIdentity = {
|
||||
async get(agentId: string): Promise<IdentityFiles> {
|
||||
if (!fallbackIdentities.has(agentId)) {
|
||||
fallbackIdentities.set(agentId, {
|
||||
soul: '# Agent Soul\n\nA helpful AI assistant.',
|
||||
instructions: '# Instructions\n\nBe helpful and concise.',
|
||||
user_profile: '# User Profile\n\nNo profile yet.',
|
||||
});
|
||||
}
|
||||
return fallbackIdentities.get(agentId)!;
|
||||
},
|
||||
|
||||
async getFile(agentId: string, file: string): Promise<string> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
return files[file as keyof IdentityFiles] ?? '';
|
||||
},
|
||||
|
||||
async buildPrompt(agentId: string, memoryContext?: string): Promise<string> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
let prompt = `${files.soul}\n\n## Instructions\n${files.instructions}\n\n## User Profile\n${files.user_profile}`;
|
||||
if (memoryContext) {
|
||||
prompt += `\n\n## Memory Context\n${memoryContext}`;
|
||||
}
|
||||
return prompt;
|
||||
},
|
||||
|
||||
async updateUserProfile(agentId: string, content: string): Promise<void> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
files.user_profile = content;
|
||||
fallbackIdentities.set(agentId, files);
|
||||
},
|
||||
|
||||
async appendUserProfile(agentId: string, addition: string): Promise<void> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
files.user_profile += `\n\n${addition}`;
|
||||
fallbackIdentities.set(agentId, files);
|
||||
},
|
||||
|
||||
async proposeChange(
|
||||
agentId: string,
|
||||
file: 'soul' | 'instructions',
|
||||
suggestedContent: string,
|
||||
reason: string
|
||||
): Promise<IdentityChangeProposal> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
const proposal: IdentityChangeProposal = {
|
||||
id: `prop_${Date.now()}`,
|
||||
agent_id: agentId,
|
||||
file,
|
||||
reason,
|
||||
current_content: files[file] ?? '',
|
||||
suggested_content: suggestedContent,
|
||||
status: 'pending',
|
||||
created_at: new Date().toISOString(),
|
||||
};
|
||||
fallbackProposals.push(proposal);
|
||||
return proposal;
|
||||
},
|
||||
|
||||
async approveProposal(proposalId: string): Promise<IdentityFiles> {
|
||||
const proposal = fallbackProposals.find(p => p.id === proposalId);
|
||||
if (!proposal) throw new Error('Proposal not found');
|
||||
|
||||
proposal.status = 'approved';
|
||||
const files = await fallbackIdentity.get(proposal.agent_id);
|
||||
files[proposal.file] = proposal.suggested_content;
|
||||
fallbackIdentities.set(proposal.agent_id, files);
|
||||
return files;
|
||||
},
|
||||
|
||||
async rejectProposal(proposalId: string): Promise<void> {
|
||||
const proposal = fallbackProposals.find(p => p.id === proposalId);
|
||||
if (proposal) {
|
||||
proposal.status = 'rejected';
|
||||
}
|
||||
},
|
||||
|
||||
async getPendingProposals(agentId?: string): Promise<IdentityChangeProposal[]> {
|
||||
return fallbackProposals.filter(p =>
|
||||
p.status === 'pending' && (!agentId || p.agent_id === agentId)
|
||||
);
|
||||
},
|
||||
|
||||
async updateFile(agentId: string, file: string, content: string): Promise<void> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
if (file in files) {
|
||||
// IdentityFiles has known properties, update safely
|
||||
const key = file as keyof IdentityFiles;
|
||||
if (key in files) {
|
||||
files[key] = content;
|
||||
fallbackIdentities.set(agentId, files);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
async getSnapshots(_agentId: string, _limit?: number): Promise<IdentitySnapshot[]> {
|
||||
return [];
|
||||
},
|
||||
|
||||
async restoreSnapshot(_agentId: string, _snapshotId: string): Promise<void> {
|
||||
// No-op for fallback
|
||||
},
|
||||
|
||||
async listAgents(): Promise<string[]> {
|
||||
return Array.from(fallbackIdentities.keys());
|
||||
},
|
||||
|
||||
async deleteAgent(agentId: string): Promise<void> {
|
||||
fallbackIdentities.delete(agentId);
|
||||
},
|
||||
};
|
||||
|
||||
// Fallback Heartbeat API
|
||||
const fallbackHeartbeat = {
|
||||
_configs: new Map<string, HeartbeatConfig>(),
|
||||
|
||||
async init(agentId: string, config?: HeartbeatConfig): Promise<void> {
|
||||
if (config) {
|
||||
fallbackHeartbeat._configs.set(agentId, config);
|
||||
}
|
||||
},
|
||||
|
||||
async start(_agentId: string): Promise<void> {
|
||||
// No-op for fallback (no background tasks in browser)
|
||||
},
|
||||
|
||||
async stop(_agentId: string): Promise<void> {
|
||||
// No-op
|
||||
},
|
||||
|
||||
async tick(_agentId: string): Promise<HeartbeatResult> {
|
||||
return {
|
||||
status: 'ok',
|
||||
alerts: [],
|
||||
checked_items: 0,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
},
|
||||
|
||||
async getConfig(agentId: string): Promise<HeartbeatConfig> {
|
||||
return fallbackHeartbeat._configs.get(agentId) ?? {
|
||||
enabled: false,
|
||||
interval_minutes: 30,
|
||||
quiet_hours_start: null,
|
||||
quiet_hours_end: null,
|
||||
notify_channel: 'ui',
|
||||
proactivity_level: 'standard',
|
||||
max_alerts_per_tick: 5,
|
||||
};
|
||||
},
|
||||
|
||||
async updateConfig(agentId: string, config: HeartbeatConfig): Promise<void> {
|
||||
fallbackHeartbeat._configs.set(agentId, config);
|
||||
},
|
||||
|
||||
async getHistory(_agentId: string, _limit?: number): Promise<HeartbeatResult[]> {
|
||||
return [];
|
||||
},
|
||||
};
|
||||
|
||||
// === Unified Client Export ===
|
||||
|
||||
/**
|
||||
* Unified intelligence client that automatically selects backend or fallback
|
||||
*/
|
||||
export const intelligenceClient = {
|
||||
memory: {
|
||||
init: async (): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.memory.init();
|
||||
} else {
|
||||
await fallbackMemory.init();
|
||||
}
|
||||
},
|
||||
|
||||
store: async (entry: MemoryEntryInput): Promise<string> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.memory.store(entry);
|
||||
}
|
||||
return fallbackMemory.store(entry);
|
||||
},
|
||||
|
||||
get: async (id: string): Promise<MemoryEntry | null> => {
|
||||
if (isTauriEnv()) {
|
||||
const result = await intelligence.memory.get(id);
|
||||
return result ? toFrontendMemory(result) : null;
|
||||
}
|
||||
return fallbackMemory.get(id);
|
||||
},
|
||||
|
||||
search: async (options: MemorySearchOptions): Promise<MemoryEntry[]> => {
|
||||
if (isTauriEnv()) {
|
||||
const results = await intelligence.memory.search(toBackendSearchOptions(options));
|
||||
return results.map(toFrontendMemory);
|
||||
}
|
||||
return fallbackMemory.search(options);
|
||||
},
|
||||
|
||||
delete: async (id: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.memory.delete(id);
|
||||
} else {
|
||||
await fallbackMemory.delete(id);
|
||||
}
|
||||
},
|
||||
|
||||
deleteAll: async (agentId: string): Promise<number> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.memory.deleteAll(agentId);
|
||||
}
|
||||
return fallbackMemory.deleteAll(agentId);
|
||||
},
|
||||
|
||||
stats: async (): Promise<MemoryStats> => {
|
||||
if (isTauriEnv()) {
|
||||
const stats = await intelligence.memory.stats();
|
||||
return toFrontendStats(stats);
|
||||
}
|
||||
return fallbackMemory.stats();
|
||||
},
|
||||
|
||||
export: async (): Promise<MemoryEntry[]> => {
|
||||
if (isTauriEnv()) {
|
||||
const results = await intelligence.memory.export();
|
||||
return results.map(toFrontendMemory);
|
||||
}
|
||||
return fallbackMemory.export();
|
||||
},
|
||||
|
||||
import: async (memories: MemoryEntry[]): Promise<number> => {
|
||||
if (isTauriEnv()) {
|
||||
// Convert to backend format
|
||||
const backendMemories = memories.map(m => ({
|
||||
...m,
|
||||
agent_id: m.agentId,
|
||||
memory_type: m.type,
|
||||
last_accessed_at: m.lastAccessedAt,
|
||||
created_at: m.createdAt,
|
||||
access_count: m.accessCount,
|
||||
conversation_id: m.conversationId ?? null,
|
||||
tags: JSON.stringify(m.tags),
|
||||
embedding: null,
|
||||
}));
|
||||
return intelligence.memory.import(backendMemories as PersistentMemory[]);
|
||||
}
|
||||
return fallbackMemory.import(memories);
|
||||
},
|
||||
|
||||
dbPath: async (): Promise<string> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.memory.dbPath();
|
||||
}
|
||||
return fallbackMemory.dbPath();
|
||||
},
|
||||
},
|
||||
|
||||
heartbeat: {
|
||||
init: async (agentId: string, config?: HeartbeatConfig): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.heartbeat.init(agentId, config);
|
||||
} else {
|
||||
await fallbackHeartbeat.init(agentId, config);
|
||||
}
|
||||
},
|
||||
|
||||
start: async (agentId: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.heartbeat.start(agentId);
|
||||
} else {
|
||||
await fallbackHeartbeat.start(agentId);
|
||||
}
|
||||
},
|
||||
|
||||
stop: async (agentId: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.heartbeat.stop(agentId);
|
||||
} else {
|
||||
await fallbackHeartbeat.stop(agentId);
|
||||
}
|
||||
},
|
||||
|
||||
tick: async (agentId: string): Promise<HeartbeatResult> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.heartbeat.tick(agentId);
|
||||
}
|
||||
return fallbackHeartbeat.tick(agentId);
|
||||
},
|
||||
|
||||
getConfig: async (agentId: string): Promise<HeartbeatConfig> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.heartbeat.getConfig(agentId);
|
||||
}
|
||||
return fallbackHeartbeat.getConfig(agentId);
|
||||
},
|
||||
|
||||
updateConfig: async (agentId: string, config: HeartbeatConfig): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.heartbeat.updateConfig(agentId, config);
|
||||
} else {
|
||||
await fallbackHeartbeat.updateConfig(agentId, config);
|
||||
}
|
||||
},
|
||||
|
||||
getHistory: async (agentId: string, limit?: number): Promise<HeartbeatResult[]> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.heartbeat.getHistory(agentId, limit);
|
||||
}
|
||||
return fallbackHeartbeat.getHistory(agentId, limit);
|
||||
},
|
||||
},
|
||||
|
||||
compactor: {
|
||||
estimateTokens: async (text: string): Promise<number> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.compactor.estimateTokens(text);
|
||||
}
|
||||
return fallbackCompactor.estimateTokens(text);
|
||||
},
|
||||
|
||||
estimateMessagesTokens: async (messages: CompactableMessage[]): Promise<number> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.compactor.estimateMessagesTokens(messages);
|
||||
}
|
||||
return fallbackCompactor.estimateMessagesTokens(messages);
|
||||
},
|
||||
|
||||
checkThreshold: async (
|
||||
messages: CompactableMessage[],
|
||||
config?: CompactionConfig
|
||||
): Promise<CompactionCheck> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.compactor.checkThreshold(messages, config);
|
||||
}
|
||||
return fallbackCompactor.checkThreshold(messages, config);
|
||||
},
|
||||
|
||||
compact: async (
|
||||
messages: CompactableMessage[],
|
||||
agentId: string,
|
||||
conversationId?: string,
|
||||
config?: CompactionConfig
|
||||
): Promise<CompactionResult> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.compactor.compact(messages, agentId, conversationId, config);
|
||||
}
|
||||
return fallbackCompactor.compact(messages, agentId, conversationId, config);
|
||||
},
|
||||
},
|
||||
|
||||
reflection: {
|
||||
init: async (config?: ReflectionConfig): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.reflection.init(config);
|
||||
} else {
|
||||
await fallbackReflection.init(config);
|
||||
}
|
||||
},
|
||||
|
||||
recordConversation: async (): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.reflection.recordConversation();
|
||||
} else {
|
||||
await fallbackReflection.recordConversation();
|
||||
}
|
||||
},
|
||||
|
||||
shouldReflect: async (): Promise<boolean> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.reflection.shouldReflect();
|
||||
}
|
||||
return fallbackReflection.shouldReflect();
|
||||
},
|
||||
|
||||
reflect: async (agentId: string, memories: MemoryEntryForAnalysis[]): Promise<ReflectionResult> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.reflection.reflect(agentId, memories);
|
||||
}
|
||||
return fallbackReflection.reflect(agentId, memories);
|
||||
},
|
||||
|
||||
getHistory: async (limit?: number): Promise<ReflectionResult[]> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.reflection.getHistory(limit);
|
||||
}
|
||||
return fallbackReflection.getHistory(limit);
|
||||
},
|
||||
|
||||
getState: async (): Promise<ReflectionState> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.reflection.getState();
|
||||
}
|
||||
return fallbackReflection.getState();
|
||||
},
|
||||
},
|
||||
|
||||
identity: {
|
||||
get: async (agentId: string): Promise<IdentityFiles> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.identity.get(agentId);
|
||||
}
|
||||
return fallbackIdentity.get(agentId);
|
||||
},
|
||||
|
||||
getFile: async (agentId: string, file: string): Promise<string> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.identity.getFile(agentId, file);
|
||||
}
|
||||
return fallbackIdentity.getFile(agentId, file);
|
||||
},
|
||||
|
||||
buildPrompt: async (agentId: string, memoryContext?: string): Promise<string> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.identity.buildPrompt(agentId, memoryContext);
|
||||
}
|
||||
return fallbackIdentity.buildPrompt(agentId, memoryContext);
|
||||
},
|
||||
|
||||
updateUserProfile: async (agentId: string, content: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.identity.updateUserProfile(agentId, content);
|
||||
} else {
|
||||
await fallbackIdentity.updateUserProfile(agentId, content);
|
||||
}
|
||||
},
|
||||
|
||||
appendUserProfile: async (agentId: string, addition: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.identity.appendUserProfile(agentId, addition);
|
||||
} else {
|
||||
await fallbackIdentity.appendUserProfile(agentId, addition);
|
||||
}
|
||||
},
|
||||
|
||||
proposeChange: async (
|
||||
agentId: string,
|
||||
file: 'soul' | 'instructions',
|
||||
suggestedContent: string,
|
||||
reason: string
|
||||
): Promise<IdentityChangeProposal> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.identity.proposeChange(agentId, file, suggestedContent, reason);
|
||||
}
|
||||
return fallbackIdentity.proposeChange(agentId, file, suggestedContent, reason);
|
||||
},
|
||||
|
||||
approveProposal: async (proposalId: string): Promise<IdentityFiles> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.identity.approveProposal(proposalId);
|
||||
}
|
||||
return fallbackIdentity.approveProposal(proposalId);
|
||||
},
|
||||
|
||||
rejectProposal: async (proposalId: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.identity.rejectProposal(proposalId);
|
||||
} else {
|
||||
await fallbackIdentity.rejectProposal(proposalId);
|
||||
}
|
||||
},
|
||||
|
||||
getPendingProposals: async (agentId?: string): Promise<IdentityChangeProposal[]> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.identity.getPendingProposals(agentId);
|
||||
}
|
||||
return fallbackIdentity.getPendingProposals(agentId);
|
||||
},
|
||||
|
||||
updateFile: async (agentId: string, file: string, content: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.identity.updateFile(agentId, file, content);
|
||||
} else {
|
||||
await fallbackIdentity.updateFile(agentId, file, content);
|
||||
}
|
||||
},
|
||||
|
||||
getSnapshots: async (agentId: string, limit?: number): Promise<IdentitySnapshot[]> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.identity.getSnapshots(agentId, limit);
|
||||
}
|
||||
return fallbackIdentity.getSnapshots(agentId, limit);
|
||||
},
|
||||
|
||||
restoreSnapshot: async (agentId: string, snapshotId: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.identity.restoreSnapshot(agentId, snapshotId);
|
||||
} else {
|
||||
await fallbackIdentity.restoreSnapshot(agentId, snapshotId);
|
||||
}
|
||||
},
|
||||
|
||||
listAgents: async (): Promise<string[]> => {
|
||||
if (isTauriEnv()) {
|
||||
return intelligence.identity.listAgents();
|
||||
}
|
||||
return fallbackIdentity.listAgents();
|
||||
},
|
||||
|
||||
deleteAgent: async (agentId: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await intelligence.identity.deleteAgent(agentId);
|
||||
} else {
|
||||
await fallbackIdentity.deleteAgent(agentId);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export default intelligenceClient;
|
||||
Reference in New Issue
Block a user