fix(v13): FIX-06 PersistentMemoryStore 全量移除 — 665行死代码清理
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

- persistent.rs 611→57行: 移除 PersistentMemoryStore struct + 全部方法 + 死 embedding global
- memory_commands.rs: MemoryStoreState→Arc<Mutex<()>>, memory_init→no-op, 移除 2 @reserved 命令
- viking_commands.rs: 移除冗余 PersistentMemoryStore embedding 配置段
- lib.rs: Tauri 命令 191→189 (移除 memory_configure_embedding + memory_is_embedding_configured)
- TRUTH.md + wiki/log.md 数字同步

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
iven
2026-04-13 20:58:54 +08:00
parent fd3e7fd2cb
commit 0903a0d652
7 changed files with 41 additions and 665 deletions

View File

@@ -124,8 +124,8 @@ pub fn run() {
// Initialize browser state
let browser_state = browser::commands::BrowserState::new();
// Initialize memory store state
let memory_state: memory_commands::MemoryStoreState = std::sync::Arc::new(tokio::sync::Mutex::new(None));
// Initialize memory store state (vestigial — PersistentMemoryStore removed in V13)
let memory_state: memory_commands::MemoryStoreState = std::sync::Arc::new(tokio::sync::Mutex::new(()));
// Initialize intelligence layer state
let heartbeat_state: intelligence::HeartbeatEngineState = std::sync::Arc::new(tokio::sync::Mutex::new(std::collections::HashMap::new()));
@@ -373,8 +373,6 @@ pub fn run() {
memory_commands::memory_export,
memory_commands::memory_import,
memory_commands::memory_db_path,
memory_commands::memory_configure_embedding,
memory_commands::memory_is_embedding_configured,
memory_commands::memory_build_context,
// Intelligence Layer commands (Phase 2-3)
// Heartbeat Engine

View File

@@ -12,9 +12,5 @@ pub mod context_builder;
pub mod persistent;
pub mod crypto;
// Re-export main types for convenience
pub use persistent::{
PersistentMemory, PersistentMemoryStore, MemoryStats,
configure_embedding_client, is_embedding_configured,
EmbedFn,
};
// Re-export frontend API types for convenience
pub use persistent::{PersistentMemory, MemoryStats};

View File

@@ -1,80 +1,17 @@
//! Persistent Memory Storage - SQLite-backed memory for ZCLAW
//! Frontend API types for memory system
//!
//! This module provides persistent storage for agent memories,
//! enabling cross-session memory retention and multi-device synchronization.
//!
//! Phase 1 of Intelligence Layer Migration:
//! - Replaces localStorage with SQLite
//! - Provides memory persistence API
//! - Enables data migration from frontend
//! PersistentMemoryStore was removed in V13 audit (FIX-06):
//! all data operations now go through VikingStorage (SqliteStorage).
//! This module retains only the frontend-facing types for Tauri command responses.
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::{Mutex, OnceCell};
use uuid::Uuid;
use tauri::Manager;
use sqlx::{SqliteConnection, Connection, Row, sqlite::SqliteRow};
use chrono::Utc;
use sqlx::{Row, sqlite::SqliteRow};
/// Embedding function type: text -> vector of f32
pub type EmbedFn = Arc<dyn Fn(&str) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<Vec<f32>, String>> + Send>> + Send + Sync>;
/// Global embedding function for PersistentMemoryStore
static EMBEDDING_FN: OnceCell<EmbedFn> = OnceCell::const_new();
/// Configure the global embedding function for memory search
pub fn configure_embedding_client(fn_impl: EmbedFn) {
let _ = EMBEDDING_FN.set(fn_impl);
tracing::info!("[PersistentMemoryStore] Embedding client configured");
}
/// Check if embedding is available
pub fn is_embedding_configured() -> bool {
EMBEDDING_FN.get().is_some()
}
/// Generate embedding for text using the configured client
async fn embed_text(text: &str) -> Result<Vec<f32>, String> {
let client = EMBEDDING_FN.get()
.ok_or_else(|| "Embedding client not configured".to_string())?;
client(text).await
}
/// Deserialize f32 vector from BLOB (4 bytes per f32, little-endian)
fn deserialize_embedding(blob: &[u8]) -> Vec<f32> {
blob.chunks_exact(4)
.map(|chunk| f32::from_le_bytes([chunk[0], chunk[1], chunk[2], chunk[3]]))
.collect()
}
/// Serialize f32 vector to BLOB
fn serialize_embedding(vec: &[f32]) -> Vec<u8> {
let mut bytes = Vec::with_capacity(vec.len() * 4);
for val in vec {
bytes.extend_from_slice(&val.to_le_bytes());
}
bytes
}
/// Compute cosine similarity between two vectors
fn cosine_similarity(a: &[f32], b: &[f32]) -> f32 {
if a.is_empty() || b.is_empty() || a.len() != b.len() {
return 0.0;
}
let mut dot = 0.0f32;
let mut norm_a = 0.0f32;
let mut norm_b = 0.0f32;
for i in 0..a.len() {
dot += a[i] * b[i];
norm_a += a[i] * a[i];
norm_b += b[i] * b[i];
}
let denom = (norm_a * norm_b).sqrt();
if denom == 0.0 { 0.0 } else { (dot / denom).clamp(0.0, 1.0) }
}
/// Memory entry stored in SQLite
/// Memory entry type for frontend API compatibility.
///
/// All Tauri memory commands return this type. The actual storage backend
/// is VikingStorage (SqliteStorage); values are converted via `to_persistent()`
/// in memory_commands.rs.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PersistentMemory {
pub id: String,
@@ -113,20 +50,7 @@ impl<'r> sqlx::FromRow<'r, SqliteRow> for PersistentMemory {
}
}
/// Memory search options
#[derive(Debug, Clone, Default)]
pub struct MemorySearchQuery {
pub agent_id: Option<String>,
pub memory_type: Option<String>,
#[allow(dead_code)] // Reserved for future tag-based filtering
pub tags: Option<Vec<String>>,
pub query: Option<String>,
pub min_importance: Option<i32>,
pub limit: Option<usize>,
pub offset: Option<usize>,
}
/// Memory statistics
/// Memory statistics returned by `memory_stats` command.
#[derive(Debug, Clone, Serialize)]
pub struct MemoryStats {
pub total_entries: i64,
@@ -136,475 +60,3 @@ pub struct MemoryStats {
pub newest_entry: Option<String>,
pub storage_size_bytes: i64,
}
/// Persistent memory store backed by SQLite
pub struct PersistentMemoryStore {
path: PathBuf,
conn: Arc<Mutex<SqliteConnection>>,
}
#[allow(dead_code)]
// Migration status (V13 audit FIX-06):
// - ACTIVE: new(), configure_embedding_client() — embedding config path for chat memory search
// - LEGACY: store(), get(), search(), delete(), stats(), export_all(), import_batch() — data ops moved to VikingStorage
// - Full removal requires migrating embedding config to VikingStorage (~3h, tracked in AUDIT_TRACKER)
impl PersistentMemoryStore {
/// Create a new persistent memory store
pub async fn new(app_handle: &tauri::AppHandle) -> Result<Self, String> {
let app_dir = app_handle
.path()
.app_data_dir()
.map_err(|e| format!("Failed to get app data dir: {}", e))?;
let memory_dir = app_dir.join("memory");
std::fs::create_dir_all(&memory_dir)
.map_err(|e| format!("Failed to create memory dir: {}", e))?;
let db_path = memory_dir.join("memories.db");
Self::open(db_path).await
}
/// Open an existing memory store
pub async fn open(path: PathBuf) -> Result<Self, String> {
let db_url = format!("sqlite:{}?mode=rwc", path.display());
let conn = SqliteConnection::connect(&db_url)
.await
.map_err(|e| format!("Failed to open database: {}", e))?;
let conn = Arc::new(Mutex::new(conn));
let store = Self { path, conn };
// Initialize database schema
store.init_schema().await?;
Ok(store)
}
/// Initialize the database schema
async fn init_schema(&self) -> Result<(), String> {
let mut conn = self.conn.lock().await;
sqlx::query(
r#"
CREATE TABLE IF NOT EXISTS memories (
id TEXT PRIMARY KEY,
agent_id TEXT NOT NULL,
memory_type TEXT NOT NULL,
content TEXT NOT NULL,
importance INTEGER DEFAULT 5,
source TEXT DEFAULT 'auto',
tags TEXT DEFAULT '[]',
conversation_id TEXT,
created_at TEXT NOT NULL,
last_accessed_at TEXT NOT NULL,
access_count INTEGER DEFAULT 0,
embedding BLOB
);
CREATE INDEX IF NOT EXISTS idx_agent_id ON memories(agent_id);
CREATE INDEX IF NOT EXISTS idx_memory_type ON memories(memory_type);
CREATE INDEX IF NOT EXISTS idx_created_at ON memories(created_at);
CREATE INDEX IF NOT EXISTS idx_importance ON memories(importance);
"#,
)
.execute(&mut *conn)
.await
.map_err(|e| format!("Failed to create schema: {}", e))?;
// Create FTS5 virtual table for full-text search
let _ = sqlx::query(
r#"
CREATE VIRTUAL TABLE IF NOT EXISTS memories_fts USING fts5(
id,
content,
tokenize='unicode61'
)
"#,
)
.execute(&mut *conn)
.await;
// Migration: add overview column (L1 summary)
let _ = sqlx::query("ALTER TABLE memories ADD COLUMN overview TEXT")
.execute(&mut *conn)
.await;
Ok(())
}
/// Store a new memory
pub async fn store(&self, memory: &PersistentMemory) -> Result<(), String> {
// Generate embedding if client is configured and memory doesn't have one
let embedding = if memory.embedding.is_some() {
memory.embedding.clone()
} else if is_embedding_configured() {
match embed_text(&memory.content).await {
Ok(vec) => {
tracing::debug!("[PersistentMemoryStore] Generated embedding for {} ({} dims)", memory.id, vec.len());
Some(serialize_embedding(&vec))
}
Err(e) => {
tracing::debug!("[PersistentMemoryStore] Embedding generation failed: {}", e);
None
}
}
} else {
None
};
let mut conn = self.conn.lock().await;
sqlx::query(
r#"
INSERT INTO memories (
id, agent_id, memory_type, content, importance, source,
tags, conversation_id, created_at, last_accessed_at,
access_count, embedding, overview
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"#,
)
.bind(&memory.id)
.bind(&memory.agent_id)
.bind(&memory.memory_type)
.bind(&memory.content)
.bind(memory.importance)
.bind(&memory.source)
.bind(&memory.tags)
.bind(&memory.conversation_id)
.bind(&memory.created_at)
.bind(&memory.last_accessed_at)
.bind(memory.access_count)
.bind(&embedding)
.bind(&memory.overview)
.execute(&mut *conn)
.await
.map_err(|e| format!("Failed to store memory: {}", e))?;
// Sync FTS5 index
let _ = sqlx::query(
"INSERT OR REPLACE INTO memories_fts (id, content) VALUES (?, ?)"
)
.bind(&memory.id)
.bind(&memory.content)
.execute(&mut *conn)
.await;
Ok(())
}
/// Get a memory by ID
pub async fn get(&self, id: &str) -> Result<Option<PersistentMemory>, String> {
let mut conn = self.conn.lock().await;
let result: Option<PersistentMemory> = sqlx::query_as(
"SELECT * FROM memories WHERE id = ?",
)
.bind(id)
.fetch_optional(&mut *conn)
.await
.map_err(|e| format!("Failed to get memory: {}", e))?;
// Update access stats if found
if result.is_some() {
let now = Utc::now().to_rfc3339();
sqlx::query(
"UPDATE memories SET last_accessed_at = ?, access_count = access_count + 1 WHERE id = ?",
)
.bind(&now)
.bind(id)
.execute(&mut *conn)
.await
.ok();
}
Ok(result)
}
/// Search memories with FTS5-first strategy and semantic ranking
pub async fn search(&self, query: MemorySearchQuery) -> Result<Vec<PersistentMemory>, String> {
let mut conn = self.conn.lock().await;
// When using embedding ranking, fetch more candidates
let effective_limit = if query.query.is_some() && is_embedding_configured() {
query.limit.unwrap_or(50).max(20)
} else {
query.limit.unwrap_or(50)
};
let results = if let Some(query_text) = &query.query {
// FTS5-first search strategy
let sanitized = sanitize_fts_query(query_text);
if !sanitized.is_empty() {
// Try FTS5 MATCH first
let mut sql = String::from(
"SELECT m.* FROM memories m \
INNER JOIN memories_fts f ON m.id = f.id \
WHERE f.memories_fts MATCH ?"
);
let mut params: Vec<String> = vec![sanitized];
if let Some(agent_id) = &query.agent_id {
sql.push_str(" AND m.agent_id = ?");
params.push(agent_id.clone());
}
if let Some(memory_type) = &query.memory_type {
sql.push_str(" AND m.memory_type = ?");
params.push(memory_type.clone());
}
if let Some(min_importance) = query.min_importance {
sql.push_str(" AND m.importance >= ?");
params.push(min_importance.to_string());
}
sql.push_str(&format!(" ORDER BY f.rank LIMIT {}", effective_limit));
let mut query_builder = sqlx::query_as::<_, PersistentMemory>(&sql);
for param in params {
query_builder = query_builder.bind(param);
}
let fts_results = query_builder
.fetch_all(&mut *conn)
.await
.unwrap_or_default();
if !fts_results.is_empty() {
fts_results
} else {
// FTS5 miss — CJK LIKE fallback (unicode61 doesn't handle CJK)
let has_cjk = query_text.chars().any(|c| {
matches!(c, '\u{4E00}'..='\u{9FFF}' | '\u{3400}'..='\u{4DBF}' | '\u{F900}'..='\u{FAFF}')
});
if has_cjk {
Self::like_search(&mut conn, &query, effective_limit).await
} else {
Vec::new()
}
}
} else {
// No meaningful FTS5 terms, use LIKE fallback
Self::like_search(&mut conn, &query, effective_limit).await
}
} else {
// No text query — plain filtered scan
Self::like_search(&mut conn, &query, effective_limit).await
};
// Apply semantic ranking if query and embedding are available
let mut final_results = results;
if let Some(query_text) = &query.query {
if is_embedding_configured() {
if let Ok(query_embedding) = embed_text(query_text).await {
let mut scored: Vec<(f32, PersistentMemory)> = final_results
.into_iter()
.map(|mem| {
let score = mem.embedding.as_ref()
.map(|blob| {
let vec = deserialize_embedding(blob);
cosine_similarity(&query_embedding, &vec)
})
.unwrap_or(0.0);
(score, mem)
})
.collect();
scored.sort_by(|a, b| b.0.partial_cmp(&a.0).unwrap_or(std::cmp::Ordering::Equal));
final_results = scored.into_iter()
.take(query.limit.unwrap_or(20))
.map(|(_, mem)| mem)
.collect();
}
}
}
Ok(final_results)
}
/// LIKE-based search fallback (used for CJK queries and non-text queries)
async fn like_search(
conn: &mut sqlx::SqliteConnection,
query: &MemorySearchQuery,
limit: usize,
) -> Vec<PersistentMemory> {
let mut sql = String::from("SELECT * FROM memories WHERE 1=1");
let mut params: Vec<String> = Vec::new();
if let Some(agent_id) = &query.agent_id {
sql.push_str(" AND agent_id = ?");
params.push(agent_id.clone());
}
if let Some(memory_type) = &query.memory_type {
sql.push_str(" AND memory_type = ?");
params.push(memory_type.clone());
}
if let Some(min_importance) = query.min_importance {
sql.push_str(" AND importance >= ?");
params.push(min_importance.to_string());
}
if let Some(query_text) = &query.query {
sql.push_str(" AND content LIKE ?");
params.push(format!("%{}%", query_text));
}
sql.push_str(&format!(" LIMIT {}", limit));
let mut query_builder = sqlx::query_as::<_, PersistentMemory>(&sql);
for param in params {
query_builder = query_builder.bind(param);
}
query_builder.fetch_all(conn).await.unwrap_or_default()
}
/// Delete a memory by ID
pub async fn delete(&self, id: &str) -> Result<bool, String> {
let mut conn = self.conn.lock().await;
let result = sqlx::query("DELETE FROM memories WHERE id = ?")
.bind(id)
.execute(&mut *conn)
.await
.map_err(|e| format!("Failed to delete memory: {}", e))?;
Ok(result.rows_affected() > 0)
}
/// Delete all memories for an agent
pub async fn delete_by_agent(&self, agent_id: &str) -> Result<usize, String> {
let mut conn = self.conn.lock().await;
let result = sqlx::query("DELETE FROM memories WHERE agent_id = ?")
.bind(agent_id)
.execute(&mut *conn)
.await
.map_err(|e| format!("Failed to delete agent memories: {}", e))?;
Ok(result.rows_affected() as usize)
}
/// Get memory statistics
pub async fn stats(&self) -> Result<MemoryStats, String> {
let mut conn = self.conn.lock().await;
let total: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM memories")
.fetch_one(&mut *conn)
.await
.unwrap_or(0);
let by_type: std::collections::HashMap<String, i64> = sqlx::query_as(
"SELECT memory_type, COUNT(*) as count FROM memories GROUP BY memory_type",
)
.fetch_all(&mut *conn)
.await
.unwrap_or_default()
.into_iter()
.map(|row: (String, i64)| row)
.collect();
let by_agent: std::collections::HashMap<String, i64> = sqlx::query_as(
"SELECT agent_id, COUNT(*) as count FROM memories GROUP BY agent_id",
)
.fetch_all(&mut *conn)
.await
.unwrap_or_default()
.into_iter()
.map(|row: (String, i64)| row)
.collect();
let oldest: Option<String> = sqlx::query_scalar(
"SELECT MIN(created_at) FROM memories",
)
.fetch_optional(&mut *conn)
.await
.unwrap_or_default();
let newest: Option<String> = sqlx::query_scalar(
"SELECT MAX(created_at) FROM memories",
)
.fetch_optional(&mut *conn)
.await
.unwrap_or_default();
let storage_size: i64 = sqlx::query_scalar(
"SELECT SUM(LENGTH(content) + LENGTH(tags) + COALESCE(LENGTH(embedding), 0)) FROM memories",
)
.fetch_optional(&mut *conn)
.await
.unwrap_or(Some(0))
.unwrap_or(0);
Ok(MemoryStats {
total_entries: total,
by_type,
by_agent,
oldest_entry: oldest,
newest_entry: newest,
storage_size_bytes: storage_size,
})
}
/// Export memories for backup
pub async fn export_all(&self) -> Result<Vec<PersistentMemory>, String> {
let mut conn = self.conn.lock().await;
let memories = sqlx::query_as::<_, PersistentMemory>(
"SELECT * FROM memories ORDER BY created_at ASC",
)
.fetch_all(&mut *conn)
.await
.map_err(|e| format!("Failed to export memories: {}", e))?;
Ok(memories)
}
/// Import memories from backup
pub async fn import_batch(&self, memories: &[PersistentMemory]) -> Result<usize, String> {
let mut imported = 0;
for memory in memories {
self.store(memory).await?;
imported += 1;
}
Ok(imported)
}
/// Get the database path
pub fn path(&self) -> &PathBuf {
&self.path
}
}
/// Sanitize a user query for FTS5 MATCH syntax.
/// Strips FTS5 operators and keeps only alphanumeric + CJK tokens with length > 1.
fn sanitize_fts_query(query: &str) -> String {
let terms: Vec<String> = query
.to_lowercase()
.split(|c: char| !c.is_alphanumeric())
.filter(|s| !s.is_empty() && s.len() > 1)
.map(|s| s.to_string())
.collect();
if terms.is_empty() {
return String::new();
}
terms.join(" OR ")
}
/// Generate a unique memory ID
#[allow(dead_code)] // Legacy: VikingStorage generates its own URIs
pub fn generate_memory_id() -> String {
let uuid_str = Uuid::new_v4().to_string().replace("-", "");
let short_uuid = &uuid_str[..8];
format!("mem_{}_{}", Utc::now().timestamp(), short_uuid)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_generate_memory_id() {
let memory_id = generate_memory_id();
assert!(memory_id.starts_with("mem_"));
}
}

View File

@@ -1,21 +1,21 @@
//! Memory Commands - Tauri commands for persistent memory operations
//!
//! Unified storage: All operations delegate to VikingStorage (SqliteStorage),
//! All operations delegate to VikingStorage (SqliteStorage),
//! which provides FTS5 full-text search, TF-IDF scoring, and optional embedding.
//!
//! The previous dual-write to PersistentMemoryStore has been removed.
//! PersistentMemory type is retained for frontend API compatibility.
//! PersistentMemoryStore was removed in V13 audit (FIX-06): all data ops
//! go through VikingStorage; the old embedding global was never read.
use crate::memory::{PersistentMemory, PersistentMemoryStore, MemoryStats, configure_embedding_client, is_embedding_configured, EmbedFn};
use crate::memory::{PersistentMemory, MemoryStats};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use tauri::{AppHandle, State};
use tauri::State;
use tokio::sync::Mutex;
/// Shared memory store state
/// NOTE: PersistentMemoryStore is kept only for embedding configuration.
/// All actual storage goes through VikingStorage (SqliteStorage).
pub type MemoryStoreState = Arc<Mutex<Option<PersistentMemoryStore>>>;
/// Vestigial state — PersistentMemoryStore removed, all ops via VikingStorage.
/// Kept as `Arc<Mutex<()>>` to preserve Tauri state injection without dead types.
pub type MemoryStoreState = Arc<Mutex<()>>;
/// Memory entry for frontend API
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -43,17 +43,14 @@ pub struct MemorySearchOptions {
/// Initialize the memory store
///
/// Now a no-op for storage (VikingStorage initializes itself in viking_commands).
/// Only initializes PersistentMemoryStore for backward-compatible embedding config.
/// Vestigial — VikingStorage initializes itself in viking_commands.
/// Kept for frontend API compatibility (intelligence-backend.ts calls this).
// @connected
#[tauri::command]
pub async fn memory_init(
app_handle: AppHandle,
state: State<'_, MemoryStoreState>,
_state: State<'_, MemoryStoreState>,
) -> Result<(), String> {
let store = PersistentMemoryStore::new(&app_handle).await?;
let mut state_guard = state.lock().await;
*state_guard = Some(store);
// VikingStorage auto-initializes in viking_commands::init_viking_storage()
Ok(())
}
@@ -370,49 +367,6 @@ pub async fn memory_db_path(
Ok(db_path.to_string_lossy().to_string())
}
/// @reserved — no frontend UI yet
/// Configure embedding for PersistentMemoryStore (chat memory search)
/// This is called alongside viking_configure_embedding to enable vector search in chat flow
#[tauri::command]
pub async fn memory_configure_embedding(
provider: String,
api_key: String,
model: Option<String>,
endpoint: Option<String>,
) -> Result<bool, String> {
let config = crate::llm::EmbeddingConfig {
provider,
api_key,
endpoint,
model,
};
let client = std::sync::Arc::new(crate::llm::EmbeddingClient::new(config));
let embed_fn: EmbedFn = {
let client = client.clone();
Arc::new(move |text: &str| {
let client = client.clone();
let text = text.to_string();
Box::pin(async move {
let response = client.embed(&text).await?;
Ok(response.embedding)
})
})
};
configure_embedding_client(embed_fn);
tracing::info!("[MemoryCommands] Embedding configured");
Ok(true)
}
/// @reserved — no frontend UI yet
/// Check if embedding is configured for PersistentMemoryStore
#[tauri::command]
pub fn memory_is_embedding_configured() -> bool {
is_embedding_configured()
}
/// Build layered memory context for chat prompt injection
///
/// Uses VikingStorage (SqliteStorage) with FTS5 + TF-IDF + optional Embedding.

View File

@@ -561,7 +561,7 @@ fn parse_uri(uri: &str) -> Result<(String, MemoryType, String), String> {
}
/// Configure embedding for semantic memory search
/// Configures both SqliteStorage (VikingPanel) and PersistentMemoryStore (chat flow)
/// Configures SqliteStorage (VikingStorage) embedding for FTS5 + semantic search.
// @connected
#[tauri::command]
pub async fn viking_configure_embedding(
@@ -572,12 +572,11 @@ pub async fn viking_configure_embedding(
) -> Result<EmbeddingConfigResult, String> {
let storage = get_storage().await?;
// 1. Configure SqliteStorage (VikingPanel / VikingCommands)
let config_viking = crate::llm::EmbeddingConfig {
provider: provider.clone(),
api_key: api_key.clone(),
endpoint: endpoint.clone(),
model: model.clone(),
api_key,
endpoint,
model,
};
let client_viking = crate::llm::EmbeddingClient::new(config_viking);
@@ -588,30 +587,7 @@ pub async fn viking_configure_embedding(
.await
.map_err(|e| format!("Failed to configure embedding: {}", e))?;
// 2. Configure PersistentMemoryStore (chat flow)
let config_memory = crate::llm::EmbeddingConfig {
provider: provider.clone(),
api_key,
endpoint,
model,
};
let client_memory = std::sync::Arc::new(crate::llm::EmbeddingClient::new(config_memory));
let embed_fn: crate::memory::EmbedFn = {
let client_arc = client_memory.clone();
std::sync::Arc::new(move |text: &str| {
let client = client_arc.clone();
let text = text.to_string();
Box::pin(async move {
let response = client.embed(&text).await?;
Ok(response.embedding)
})
})
};
crate::memory::configure_embedding_client(embed_fn);
tracing::info!("[VikingCommands] Embedding configured with provider: {} (both storage systems)", provider);
tracing::info!("[VikingCommands] Embedding configured with provider: {}", provider);
Ok(EmbeddingConfigResult {
provider,