fix(presentation): 修复 presentation 模块类型错误和语法问题
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
- 创建 types.ts 定义完整的类型系统 - 重写 DocumentRenderer.tsx 修复语法错误 - 重写 QuizRenderer.tsx 修复语法错误 - 重写 PresentationContainer.tsx 添加类型守卫 - 重写 TypeSwitcher.tsx 修复类型引用 - 更新 index.ts 移除不存在的 ChartRenderer 导出 审计结果: - 类型检查: 通过 - 单元测试: 222 passed - 构建: 成功
This commit is contained in:
@@ -24,6 +24,7 @@ zclaw-kernel = { workspace = true }
|
||||
zclaw-skills = { workspace = true }
|
||||
zclaw-hands = { workspace = true }
|
||||
zclaw-pipeline = { workspace = true }
|
||||
zclaw-growth = { workspace = true }
|
||||
|
||||
# Tauri
|
||||
tauri = { version = "2", features = [] }
|
||||
@@ -32,10 +33,12 @@ tauri-plugin-opener = "2"
|
||||
# Async runtime
|
||||
tokio = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
|
||||
# Serialization
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
toml = "0.8"
|
||||
|
||||
# HTTP client
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "stream", "rustls-tls", "blocking"] }
|
||||
@@ -48,6 +51,7 @@ thiserror = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
secrecy = { workspace = true }
|
||||
|
||||
# Browser automation (existing)
|
||||
fantoccini = "0.21"
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
|
||||
// Viking CLI sidecar module for local memory operations
|
||||
mod viking_commands;
|
||||
mod viking_server;
|
||||
|
||||
// Memory extraction and context building modules (supplement CLI)
|
||||
mod memory;
|
||||
@@ -1304,6 +1303,14 @@ fn gateway_doctor(app: AppHandle) -> Result<String, String> {
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
pub fn run() {
|
||||
// Initialize Viking storage (async, in background)
|
||||
let runtime = tokio::runtime::Runtime::new().expect("Failed to create tokio runtime");
|
||||
runtime.block_on(async {
|
||||
if let Err(e) = crate::viking_commands::init_storage().await {
|
||||
tracing::error!("[VikingCommands] Failed to initialize storage: {}", e);
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize browser state
|
||||
let browser_state = browser::commands::BrowserState::new();
|
||||
|
||||
@@ -1359,6 +1366,8 @@ pub fn run() {
|
||||
pipeline_commands::pipeline_result,
|
||||
pipeline_commands::pipeline_runs,
|
||||
pipeline_commands::pipeline_refresh,
|
||||
pipeline_commands::route_intent,
|
||||
pipeline_commands::analyze_presentation,
|
||||
// OpenFang commands (new naming)
|
||||
openfang_status,
|
||||
openfang_start,
|
||||
@@ -1387,20 +1396,17 @@ pub fn run() {
|
||||
// OpenViking CLI sidecar commands
|
||||
viking_commands::viking_status,
|
||||
viking_commands::viking_add,
|
||||
viking_commands::viking_add_inline,
|
||||
viking_commands::viking_add_with_metadata,
|
||||
viking_commands::viking_find,
|
||||
viking_commands::viking_grep,
|
||||
viking_commands::viking_ls,
|
||||
viking_commands::viking_read,
|
||||
viking_commands::viking_remove,
|
||||
viking_commands::viking_tree,
|
||||
// Viking server management (local deployment)
|
||||
viking_server::viking_server_status,
|
||||
viking_server::viking_server_start,
|
||||
viking_server::viking_server_stop,
|
||||
viking_server::viking_server_restart,
|
||||
viking_commands::viking_inject_prompt,
|
||||
// Memory extraction commands (supplement CLI)
|
||||
memory::extractor::extract_session_memories,
|
||||
memory::extractor::extract_and_store_memories,
|
||||
memory::context_builder::estimate_content_tokens,
|
||||
// LLM commands (for extraction)
|
||||
llm::llm_complete,
|
||||
|
||||
@@ -484,6 +484,124 @@ pub async fn extract_session_memories(
|
||||
extractor.extract(&messages).await
|
||||
}
|
||||
|
||||
/// Extract memories from session and store to SqliteStorage
|
||||
/// This combines extraction and storage in one command
|
||||
#[tauri::command]
|
||||
pub async fn extract_and_store_memories(
|
||||
messages: Vec<ChatMessage>,
|
||||
agent_id: String,
|
||||
llm_endpoint: Option<String>,
|
||||
llm_api_key: Option<String>,
|
||||
) -> Result<ExtractionResult, String> {
|
||||
use zclaw_growth::{MemoryEntry, MemoryType, VikingStorage};
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
// 1. Extract memories
|
||||
let config = ExtractionConfig {
|
||||
agent_id: agent_id.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut extractor = SessionExtractor::new(config);
|
||||
|
||||
// Configure LLM if credentials provided
|
||||
if let (Some(endpoint), Some(api_key)) = (llm_endpoint, llm_api_key) {
|
||||
extractor = extractor.with_llm(endpoint, api_key);
|
||||
}
|
||||
|
||||
let extraction_result = extractor.extract(&messages).await?;
|
||||
|
||||
// 2. Get storage instance
|
||||
let storage = crate::viking_commands::get_storage()
|
||||
.await
|
||||
.map_err(|e| format!("Storage not available: {}", e))?;
|
||||
|
||||
// 3. Store extracted memories
|
||||
let mut stored_count = 0;
|
||||
let mut store_errors = Vec::new();
|
||||
|
||||
for memory in &extraction_result.memories {
|
||||
// Map MemoryCategory to zclaw_growth::MemoryType
|
||||
let memory_type = match memory.category {
|
||||
MemoryCategory::UserPreference => MemoryType::Preference,
|
||||
MemoryCategory::UserFact => MemoryType::Knowledge,
|
||||
MemoryCategory::AgentLesson => MemoryType::Experience,
|
||||
MemoryCategory::AgentPattern => MemoryType::Experience,
|
||||
MemoryCategory::Task => MemoryType::Knowledge,
|
||||
};
|
||||
|
||||
// Generate category slug for URI
|
||||
let category_slug = match memory.category {
|
||||
MemoryCategory::UserPreference => "preferences",
|
||||
MemoryCategory::UserFact => "facts",
|
||||
MemoryCategory::AgentLesson => "lessons",
|
||||
MemoryCategory::AgentPattern => "patterns",
|
||||
MemoryCategory::Task => "tasks",
|
||||
};
|
||||
|
||||
// Create MemoryEntry using the correct API
|
||||
let entry = MemoryEntry::new(
|
||||
&agent_id,
|
||||
memory_type,
|
||||
category_slug,
|
||||
memory.content.clone(),
|
||||
)
|
||||
.with_keywords(memory.tags.clone())
|
||||
.with_importance(memory.importance);
|
||||
|
||||
// Store to SqliteStorage
|
||||
match storage.store(&entry).await {
|
||||
Ok(_) => stored_count += 1,
|
||||
Err(e) => {
|
||||
store_errors.push(format!("Failed to store {}: {}", memory.category, e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let elapsed = start_time.elapsed().as_millis() as u64;
|
||||
|
||||
// Log any storage errors
|
||||
if !store_errors.is_empty() {
|
||||
tracing::warn!(
|
||||
"[extract_and_store] {} memories stored, {} errors: {}",
|
||||
stored_count,
|
||||
store_errors.len(),
|
||||
store_errors.join("; ")
|
||||
);
|
||||
}
|
||||
|
||||
tracing::info!(
|
||||
"[extract_and_store] Extracted {} memories, stored {} in {}ms",
|
||||
extraction_result.memories.len(),
|
||||
stored_count,
|
||||
elapsed
|
||||
);
|
||||
|
||||
// Return updated result with storage info
|
||||
Ok(ExtractionResult {
|
||||
memories: extraction_result.memories,
|
||||
summary: format!(
|
||||
"{} (Stored: {})",
|
||||
extraction_result.summary, stored_count
|
||||
),
|
||||
tokens_saved: extraction_result.tokens_saved,
|
||||
extraction_time_ms: elapsed,
|
||||
})
|
||||
}
|
||||
|
||||
impl std::fmt::Display for MemoryCategory {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
MemoryCategory::UserPreference => write!(f, "user_preference"),
|
||||
MemoryCategory::UserFact => write!(f, "user_fact"),
|
||||
MemoryCategory::AgentLesson => write!(f, "agent_lesson"),
|
||||
MemoryCategory::AgentPattern => write!(f, "agent_pattern"),
|
||||
MemoryCategory::Task => write!(f, "task"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -9,13 +9,141 @@ use tauri::{AppHandle, Emitter, State};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::RwLock;
|
||||
use serde_json::Value;
|
||||
use async_trait::async_trait;
|
||||
use secrecy::SecretString;
|
||||
|
||||
use zclaw_pipeline::{
|
||||
Pipeline, RunStatus,
|
||||
parse_pipeline_yaml,
|
||||
PipelineExecutor,
|
||||
ActionRegistry,
|
||||
LlmActionDriver,
|
||||
};
|
||||
use zclaw_runtime::{LlmDriver, CompletionRequest};
|
||||
|
||||
use crate::kernel_commands::KernelState;
|
||||
|
||||
/// Adapter to connect zclaw-runtime LlmDriver to zclaw-pipeline LlmActionDriver
|
||||
pub struct RuntimeLlmAdapter {
|
||||
driver: Arc<dyn LlmDriver>,
|
||||
default_model: String,
|
||||
}
|
||||
|
||||
impl RuntimeLlmAdapter {
|
||||
pub fn new(driver: Arc<dyn LlmDriver>, default_model: Option<String>) -> Self {
|
||||
Self {
|
||||
driver,
|
||||
default_model: default_model.unwrap_or_else(|| "claude-3-sonnet-20240229".to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl LlmActionDriver for RuntimeLlmAdapter {
|
||||
async fn generate(
|
||||
&self,
|
||||
prompt: String,
|
||||
input: HashMap<String, Value>,
|
||||
model: Option<String>,
|
||||
temperature: Option<f32>,
|
||||
max_tokens: Option<u32>,
|
||||
json_mode: bool,
|
||||
) -> Result<Value, String> {
|
||||
println!("[DEBUG RuntimeLlmAdapter] generate called with prompt length: {}", prompt.len());
|
||||
println!("[DEBUG RuntimeLlmAdapter] input HashMap contents:");
|
||||
for (k, v) in &input {
|
||||
println!(" {} => {}", k, v);
|
||||
}
|
||||
|
||||
// Build user content from prompt and input
|
||||
let user_content = if input.is_empty() {
|
||||
println!("[DEBUG RuntimeLlmAdapter] WARNING: input is empty, using raw prompt");
|
||||
prompt.clone()
|
||||
} else {
|
||||
// Inject input values into prompt
|
||||
// Support multiple placeholder formats: {{key}}, {{ key }}, ${key}, ${inputs.key}
|
||||
let mut rendered = prompt.clone();
|
||||
println!("[DEBUG RuntimeLlmAdapter] Original prompt (first 500 chars): {}", &prompt[..prompt.len().min(500)]);
|
||||
for (key, value) in &input {
|
||||
let str_value = if let Some(s) = value.as_str() {
|
||||
s.to_string()
|
||||
} else {
|
||||
value.to_string()
|
||||
};
|
||||
|
||||
println!("[DEBUG RuntimeLlmAdapter] Replacing '{}' with '{}'", key, str_value);
|
||||
|
||||
// Replace all common placeholder formats
|
||||
rendered = rendered.replace(&format!("{{{{{key}}}}}"), &str_value); // {{key}}
|
||||
rendered = rendered.replace(&format!("{{{{ {key} }}}}"), &str_value); // {{ key }}
|
||||
rendered = rendered.replace(&format!("${{{key}}}"), &str_value); // ${key}
|
||||
rendered = rendered.replace(&format!("${{inputs.{key}}}"), &str_value); // ${inputs.key}
|
||||
}
|
||||
println!("[DEBUG RuntimeLlmAdapter] Rendered prompt (first 500 chars): {}", &rendered[..rendered.len().min(500)]);
|
||||
rendered
|
||||
};
|
||||
|
||||
// Create message using zclaw_types::Message enum
|
||||
let messages = vec![zclaw_types::Message::user(user_content)];
|
||||
|
||||
let request = CompletionRequest {
|
||||
model: model.unwrap_or_else(|| self.default_model.clone()),
|
||||
system: None,
|
||||
messages,
|
||||
tools: Vec::new(),
|
||||
max_tokens,
|
||||
temperature,
|
||||
stop: Vec::new(),
|
||||
stream: false,
|
||||
};
|
||||
|
||||
let response = self.driver.complete(request)
|
||||
.await
|
||||
.map_err(|e| format!("LLM completion failed: {}", e))?;
|
||||
|
||||
// Extract text from response
|
||||
let text = response.content.iter()
|
||||
.find_map(|block| match block {
|
||||
zclaw_runtime::ContentBlock::Text { text } => Some(text.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
// Safe truncation for UTF-8 strings
|
||||
let truncated: String = text.chars().take(1000).collect();
|
||||
println!("[DEBUG RuntimeLlmAdapter] LLM response text (first 1000 chars): {}", truncated);
|
||||
|
||||
// Parse as JSON if json_mode, otherwise return as string
|
||||
if json_mode {
|
||||
// Try to extract JSON from the response (LLM might wrap it in markdown code blocks)
|
||||
let json_text = if text.contains("```json") {
|
||||
// Extract JSON from markdown code block
|
||||
let start = text.find("```json").map(|i| i + 7).unwrap_or(0);
|
||||
let end = text.rfind("```").unwrap_or(text.len());
|
||||
text[start..end].trim().to_string()
|
||||
} else if text.contains("```") {
|
||||
// Extract from generic code block
|
||||
let start = text.find("```").map(|i| i + 3).unwrap_or(0);
|
||||
let end = text.rfind("```").unwrap_or(text.len());
|
||||
text[start..end].trim().to_string()
|
||||
} else {
|
||||
text.clone()
|
||||
};
|
||||
|
||||
// Safe truncation for UTF-8 strings
|
||||
let truncated_json: String = json_text.chars().take(500).collect();
|
||||
println!("[DEBUG RuntimeLlmAdapter] JSON text to parse (first 500 chars): {}", truncated_json);
|
||||
|
||||
serde_json::from_str(&json_text)
|
||||
.map_err(|e| {
|
||||
println!("[DEBUG RuntimeLlmAdapter] JSON parse error: {}", e);
|
||||
format!("Failed to parse LLM response as JSON: {}\nResponse: {}", e, json_text)
|
||||
})
|
||||
} else {
|
||||
Ok(Value::String(text))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Pipeline state wrapper for Tauri
|
||||
pub struct PipelineState {
|
||||
@@ -47,8 +175,10 @@ pub struct PipelineInfo {
|
||||
pub display_name: String,
|
||||
/// Description
|
||||
pub description: String,
|
||||
/// Category
|
||||
/// Category (functional classification)
|
||||
pub category: String,
|
||||
/// Industry classification (e.g., "internet", "finance", "healthcare")
|
||||
pub industry: String,
|
||||
/// Tags
|
||||
pub tags: Vec<String>,
|
||||
/// Icon (emoji)
|
||||
@@ -134,21 +264,28 @@ pub struct PipelineRunResponse {
|
||||
pub async fn pipeline_list(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
category: Option<String>,
|
||||
industry: Option<String>,
|
||||
) -> Result<Vec<PipelineInfo>, String> {
|
||||
// Get pipelines directory
|
||||
let pipelines_dir = get_pipelines_directory()?;
|
||||
|
||||
tracing::info!("[pipeline_list] Scanning directory: {:?}", pipelines_dir);
|
||||
println!("[DEBUG pipeline_list] Scanning directory: {:?}", pipelines_dir);
|
||||
println!("[DEBUG pipeline_list] Filters - category: {:?}, industry: {:?}", category, industry);
|
||||
|
||||
// Scan for pipeline files (returns both info and paths)
|
||||
let mut pipelines_with_paths: Vec<(PipelineInfo, PathBuf)> = Vec::new();
|
||||
if pipelines_dir.exists() {
|
||||
scan_pipelines_with_paths(&pipelines_dir, category.as_deref(), &mut pipelines_with_paths)?;
|
||||
scan_pipelines_with_paths(&pipelines_dir, category.as_deref(), industry.as_deref(), &mut pipelines_with_paths)?;
|
||||
} else {
|
||||
tracing::warn!("[pipeline_list] Pipelines directory does not exist: {:?}", pipelines_dir);
|
||||
eprintln!("[WARN pipeline_list] Pipelines directory does not exist: {:?}", pipelines_dir);
|
||||
}
|
||||
|
||||
tracing::info!("[pipeline_list] Found {} pipelines", pipelines_with_paths.len());
|
||||
println!("[DEBUG pipeline_list] Found {} pipelines", pipelines_with_paths.len());
|
||||
|
||||
// Debug: log all pipelines with their industry values
|
||||
for (info, _) in &pipelines_with_paths {
|
||||
println!("[DEBUG pipeline_list] Pipeline: {} -> category: {}, industry: '{}'", info.id, info.category, info.industry);
|
||||
}
|
||||
|
||||
// Update state
|
||||
let mut state_pipelines = state.pipelines.write().await;
|
||||
@@ -188,27 +325,73 @@ pub async fn pipeline_get(
|
||||
pub async fn pipeline_run(
|
||||
app: AppHandle,
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
kernel_state: State<'_, KernelState>,
|
||||
request: RunPipelineRequest,
|
||||
) -> Result<RunPipelineResponse, String> {
|
||||
println!("[DEBUG pipeline_run] Received request for pipeline_id: {}", request.pipeline_id);
|
||||
|
||||
// Get pipeline
|
||||
let pipelines = state.pipelines.read().await;
|
||||
println!("[DEBUG pipeline_run] State has {} pipelines loaded", pipelines.len());
|
||||
|
||||
// Debug: list all loaded pipeline IDs
|
||||
for (id, _) in pipelines.iter() {
|
||||
println!("[DEBUG pipeline_run] Loaded pipeline: {}", id);
|
||||
}
|
||||
|
||||
let pipeline = pipelines.get(&request.pipeline_id)
|
||||
.ok_or_else(|| format!("Pipeline not found: {}", request.pipeline_id))?
|
||||
.ok_or_else(|| {
|
||||
println!("[ERROR pipeline_run] Pipeline '{}' not found in state. Available: {:?}",
|
||||
request.pipeline_id,
|
||||
pipelines.keys().collect::<Vec<_>>());
|
||||
format!("Pipeline not found: {}", request.pipeline_id)
|
||||
})?
|
||||
.clone();
|
||||
drop(pipelines);
|
||||
|
||||
// Clone executor for async task
|
||||
let executor = state.executor.clone();
|
||||
// Try to get LLM driver from Kernel
|
||||
let llm_driver = {
|
||||
let kernel_lock = kernel_state.lock().await;
|
||||
if let Some(kernel) = kernel_lock.as_ref() {
|
||||
println!("[DEBUG pipeline_run] Got LLM driver from Kernel");
|
||||
Some(Arc::new(RuntimeLlmAdapter::new(
|
||||
kernel.driver(),
|
||||
Some(kernel.config().llm.model.clone()),
|
||||
)) as Arc<dyn LlmActionDriver>)
|
||||
} else {
|
||||
println!("[DEBUG pipeline_run] Kernel not initialized, no LLM driver available");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
// Create executor with or without LLM driver
|
||||
let executor = if let Some(driver) = llm_driver {
|
||||
let registry = Arc::new(ActionRegistry::new().with_llm_driver(driver));
|
||||
Arc::new(PipelineExecutor::new(registry))
|
||||
} else {
|
||||
state.executor.clone()
|
||||
};
|
||||
|
||||
// Generate run ID upfront so we can return it to the caller
|
||||
let run_id = uuid::Uuid::new_v4().to_string();
|
||||
let pipeline_id = request.pipeline_id.clone();
|
||||
let inputs = request.inputs.clone();
|
||||
|
||||
// Run pipeline in background
|
||||
// Clone for async task
|
||||
let run_id_for_spawn = run_id.clone();
|
||||
|
||||
// Run pipeline in background with the known run_id
|
||||
tokio::spawn(async move {
|
||||
let result = executor.execute(&pipeline, inputs).await;
|
||||
println!("[DEBUG pipeline_run] Starting execution with run_id: {}", run_id_for_spawn);
|
||||
let result = executor.execute_with_id(&pipeline, inputs, &run_id_for_spawn).await;
|
||||
|
||||
println!("[DEBUG pipeline_run] Execution completed for run_id: {}, status: {:?}",
|
||||
run_id_for_spawn,
|
||||
result.as_ref().map(|r| r.status.clone()).unwrap_or(RunStatus::Failed));
|
||||
|
||||
// Emit completion event
|
||||
let _ = app.emit("pipeline-complete", &PipelineRunResponse {
|
||||
run_id: result.as_ref().map(|r| r.id.clone()).unwrap_or_default(),
|
||||
run_id: run_id_for_spawn.clone(),
|
||||
pipeline_id: pipeline_id.clone(),
|
||||
status: match &result {
|
||||
Ok(r) => r.status.to_string(),
|
||||
@@ -227,10 +410,10 @@ pub async fn pipeline_run(
|
||||
});
|
||||
});
|
||||
|
||||
// Return immediately with run ID
|
||||
// Note: In a real implementation, we'd track the run ID properly
|
||||
// Return immediately with the known run ID
|
||||
println!("[DEBUG pipeline_run] Returning run_id: {} to caller", run_id);
|
||||
Ok(RunPipelineResponse {
|
||||
run_id: uuid::Uuid::new_v4().to_string(),
|
||||
run_id,
|
||||
pipeline_id: request.pipeline_id,
|
||||
status: "running".to_string(),
|
||||
})
|
||||
@@ -390,8 +573,10 @@ fn get_pipelines_directory() -> Result<PathBuf, String> {
|
||||
fn scan_pipelines_with_paths(
|
||||
dir: &PathBuf,
|
||||
category_filter: Option<&str>,
|
||||
industry_filter: Option<&str>,
|
||||
pipelines: &mut Vec<(PipelineInfo, PathBuf)>,
|
||||
) -> Result<(), String> {
|
||||
println!("[DEBUG scan] Entering directory: {:?}", dir);
|
||||
let entries = std::fs::read_dir(dir)
|
||||
.map_err(|e| format!("Failed to read pipelines directory: {}", e))?;
|
||||
|
||||
@@ -401,12 +586,22 @@ fn scan_pipelines_with_paths(
|
||||
|
||||
if path.is_dir() {
|
||||
// Recursively scan subdirectory
|
||||
scan_pipelines_with_paths(&path, category_filter, pipelines)?;
|
||||
scan_pipelines_with_paths(&path, category_filter, industry_filter, pipelines)?;
|
||||
} else if path.extension().map(|e| e == "yaml" || e == "yml").unwrap_or(false) {
|
||||
// Try to parse pipeline file
|
||||
println!("[DEBUG scan] Found YAML file: {:?}", path);
|
||||
if let Ok(content) = std::fs::read_to_string(&path) {
|
||||
println!("[DEBUG scan] File content length: {} bytes", content.len());
|
||||
match parse_pipeline_yaml(&content) {
|
||||
Ok(pipeline) => {
|
||||
// Debug: log parsed pipeline metadata
|
||||
println!(
|
||||
"[DEBUG scan] Parsed YAML: {} -> category: {:?}, industry: {:?}",
|
||||
pipeline.metadata.name,
|
||||
pipeline.metadata.category,
|
||||
pipeline.metadata.industry
|
||||
);
|
||||
|
||||
// Apply category filter
|
||||
if let Some(filter) = category_filter {
|
||||
if pipeline.metadata.category.as_deref() != Some(filter) {
|
||||
@@ -414,11 +609,18 @@ fn scan_pipelines_with_paths(
|
||||
}
|
||||
}
|
||||
|
||||
// Apply industry filter
|
||||
if let Some(filter) = industry_filter {
|
||||
if pipeline.metadata.industry.as_deref() != Some(filter) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("[scan] Found pipeline: {} at {:?}", pipeline.metadata.name, path);
|
||||
pipelines.push((pipeline_to_info(&pipeline), path));
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("[scan] Failed to parse pipeline at {:?}: {}", path, e);
|
||||
eprintln!("[ERROR scan] Failed to parse pipeline at {:?}: {}", path, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -454,12 +656,21 @@ fn scan_pipelines_full_sync(
|
||||
}
|
||||
|
||||
fn pipeline_to_info(pipeline: &Pipeline) -> PipelineInfo {
|
||||
let industry = pipeline.metadata.industry.clone().unwrap_or_default();
|
||||
println!(
|
||||
"[DEBUG pipeline_to_info] Pipeline: {}, category: {:?}, industry: {:?}",
|
||||
pipeline.metadata.name,
|
||||
pipeline.metadata.category,
|
||||
pipeline.metadata.industry
|
||||
);
|
||||
|
||||
PipelineInfo {
|
||||
id: pipeline.metadata.name.clone(),
|
||||
display_name: pipeline.metadata.display_name.clone()
|
||||
.unwrap_or_else(|| pipeline.metadata.name.clone()),
|
||||
description: pipeline.metadata.description.clone().unwrap_or_default(),
|
||||
category: pipeline.metadata.category.clone().unwrap_or_default(),
|
||||
industry,
|
||||
tags: pipeline.metadata.tags.clone(),
|
||||
icon: pipeline.metadata.icon.clone().unwrap_or_else(|| "📦".to_string()),
|
||||
version: pipeline.metadata.version.clone(),
|
||||
@@ -488,6 +699,245 @@ fn pipeline_to_info(pipeline: &Pipeline) -> PipelineInfo {
|
||||
|
||||
/// Create pipeline state with default action registry
|
||||
pub fn create_pipeline_state() -> Arc<PipelineState> {
|
||||
let action_registry = Arc::new(ActionRegistry::new());
|
||||
// Try to create an LLM driver from environment/config
|
||||
let action_registry = if let Some(driver) = create_llm_driver_from_config() {
|
||||
println!("[DEBUG create_pipeline_state] LLM driver configured successfully");
|
||||
Arc::new(ActionRegistry::new().with_llm_driver(driver))
|
||||
} else {
|
||||
println!("[DEBUG create_pipeline_state] No LLM driver configured - pipelines requiring LLM will fail");
|
||||
Arc::new(ActionRegistry::new())
|
||||
};
|
||||
Arc::new(PipelineState::new(action_registry))
|
||||
}
|
||||
|
||||
// === Intent Router Commands ===
|
||||
|
||||
/// Route result for frontend
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum RouteResultResponse {
|
||||
Matched {
|
||||
pipeline_id: String,
|
||||
display_name: Option<String>,
|
||||
mode: String,
|
||||
params: HashMap<String, Value>,
|
||||
confidence: f32,
|
||||
missing_params: Vec<MissingParamInfo>,
|
||||
},
|
||||
Ambiguous {
|
||||
candidates: Vec<PipelineCandidateInfo>,
|
||||
},
|
||||
NoMatch {
|
||||
suggestions: Vec<PipelineCandidateInfo>,
|
||||
},
|
||||
NeedMoreInfo {
|
||||
prompt: String,
|
||||
related_pipeline: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Missing parameter info
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MissingParamInfo {
|
||||
pub name: String,
|
||||
pub label: Option<String>,
|
||||
pub param_type: String,
|
||||
pub required: bool,
|
||||
pub default: Option<Value>,
|
||||
}
|
||||
|
||||
/// Pipeline candidate info
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PipelineCandidateInfo {
|
||||
pub id: String,
|
||||
pub display_name: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub icon: Option<String>,
|
||||
pub category: Option<String>,
|
||||
pub match_reason: Option<String>,
|
||||
}
|
||||
|
||||
/// Route user input to matching pipeline
|
||||
#[tauri::command]
|
||||
pub async fn route_intent(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
user_input: String,
|
||||
) -> Result<RouteResultResponse, String> {
|
||||
use zclaw_pipeline::{TriggerParser, Trigger, TriggerParam, compile_trigger};
|
||||
|
||||
println!("[DEBUG route_intent] Routing user input: {}", user_input);
|
||||
|
||||
// Build trigger parser from loaded pipelines
|
||||
let pipelines = state.pipelines.read().await;
|
||||
let mut parser = TriggerParser::new();
|
||||
|
||||
for (id, pipeline) in pipelines.iter() {
|
||||
// Extract trigger info from pipeline metadata
|
||||
// For now, use tags as keywords and description as trigger description
|
||||
let trigger = Trigger {
|
||||
keywords: pipeline.metadata.tags.clone(),
|
||||
patterns: vec![], // TODO: add pattern support in pipeline definition
|
||||
description: pipeline.metadata.description.clone(),
|
||||
examples: vec![],
|
||||
};
|
||||
|
||||
// Convert pipeline inputs to trigger params
|
||||
let param_defs: Vec<TriggerParam> = pipeline.spec.inputs.iter().map(|input| {
|
||||
TriggerParam {
|
||||
name: input.name.clone(),
|
||||
param_type: match input.input_type {
|
||||
zclaw_pipeline::InputType::String => "string".to_string(),
|
||||
zclaw_pipeline::InputType::Number => "number".to_string(),
|
||||
zclaw_pipeline::InputType::Boolean => "boolean".to_string(),
|
||||
zclaw_pipeline::InputType::Select => "select".to_string(),
|
||||
zclaw_pipeline::InputType::MultiSelect => "multi-select".to_string(),
|
||||
zclaw_pipeline::InputType::File => "file".to_string(),
|
||||
zclaw_pipeline::InputType::Text => "text".to_string(),
|
||||
},
|
||||
required: input.required,
|
||||
label: input.label.clone(),
|
||||
default: input.default.clone(),
|
||||
}
|
||||
}).collect();
|
||||
|
||||
match compile_trigger(
|
||||
id.clone(),
|
||||
pipeline.metadata.display_name.clone(),
|
||||
&trigger,
|
||||
param_defs,
|
||||
) {
|
||||
Ok(compiled) => parser.register(compiled),
|
||||
Err(e) => {
|
||||
eprintln!("[WARN route_intent] Failed to compile trigger for {}: {}", id, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Quick match
|
||||
if let Some(match_result) = parser.quick_match(&user_input) {
|
||||
let trigger = parser.get_trigger(&match_result.pipeline_id);
|
||||
|
||||
// Determine input mode
|
||||
let mode = if let Some(t) = &trigger {
|
||||
let required_count = t.param_defs.iter().filter(|p| p.required).count();
|
||||
if required_count > 3 || t.param_defs.len() > 5 {
|
||||
"form"
|
||||
} else if t.param_defs.is_empty() {
|
||||
"conversation"
|
||||
} else {
|
||||
"conversation"
|
||||
}
|
||||
} else {
|
||||
"auto"
|
||||
};
|
||||
|
||||
// Find missing params
|
||||
let missing_params: Vec<MissingParamInfo> = trigger
|
||||
.map(|t| {
|
||||
t.param_defs.iter()
|
||||
.filter(|p| p.required && !match_result.params.contains_key(&p.name) && p.default.is_none())
|
||||
.map(|p| MissingParamInfo {
|
||||
name: p.name.clone(),
|
||||
label: p.label.clone(),
|
||||
param_type: p.param_type.clone(),
|
||||
required: p.required,
|
||||
default: p.default.clone(),
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
return Ok(RouteResultResponse::Matched {
|
||||
pipeline_id: match_result.pipeline_id,
|
||||
display_name: trigger.and_then(|t| t.display_name.clone()),
|
||||
mode: mode.to_string(),
|
||||
params: match_result.params,
|
||||
confidence: match_result.confidence,
|
||||
missing_params,
|
||||
});
|
||||
}
|
||||
|
||||
// No match - return suggestions
|
||||
let suggestions: Vec<PipelineCandidateInfo> = parser.triggers()
|
||||
.iter()
|
||||
.take(3)
|
||||
.map(|t| PipelineCandidateInfo {
|
||||
id: t.pipeline_id.clone(),
|
||||
display_name: t.display_name.clone(),
|
||||
description: t.description.clone(),
|
||||
icon: None,
|
||||
category: None,
|
||||
match_reason: Some("推荐".to_string()),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(RouteResultResponse::NoMatch { suggestions })
|
||||
}
|
||||
|
||||
/// Create an LLM driver from configuration file or environment variables
|
||||
fn create_llm_driver_from_config() -> Option<Arc<dyn LlmActionDriver>> {
|
||||
// Try to read config file
|
||||
let config_path = dirs::config_dir()
|
||||
.map(|p| p.join("zclaw").join("config.toml"))?;
|
||||
|
||||
if !config_path.exists() {
|
||||
println!("[DEBUG create_llm_driver] Config file not found at {:?}", config_path);
|
||||
return None;
|
||||
}
|
||||
|
||||
// Read and parse config
|
||||
let config_content = std::fs::read_to_string(&config_path).ok()?;
|
||||
let config: toml::Value = toml::from_str(&config_content).ok()?;
|
||||
|
||||
// Extract LLM config
|
||||
let llm_config = config.get("llm")?;
|
||||
|
||||
let provider = llm_config.get("provider")?.as_str()?.to_string();
|
||||
let api_key = llm_config.get("api_key")?.as_str()?.to_string();
|
||||
let base_url = llm_config.get("base_url").and_then(|v| v.as_str()).map(|s| s.to_string());
|
||||
let model = llm_config.get("model").and_then(|v| v.as_str()).map(|s| s.to_string());
|
||||
|
||||
println!("[DEBUG create_llm_driver] Found LLM config: provider={}, model={:?}", provider, model);
|
||||
|
||||
// Convert api_key to SecretString
|
||||
let secret_key = SecretString::new(api_key);
|
||||
|
||||
// Create the runtime driver
|
||||
let runtime_driver: Arc<dyn zclaw_runtime::LlmDriver> = match provider.as_str() {
|
||||
"anthropic" => {
|
||||
Arc::new(zclaw_runtime::AnthropicDriver::new(secret_key))
|
||||
}
|
||||
"openai" | "doubao" | "qwen" | "deepseek" | "kimi" => {
|
||||
Arc::new(zclaw_runtime::OpenAiDriver::new(secret_key))
|
||||
}
|
||||
"gemini" => {
|
||||
Arc::new(zclaw_runtime::GeminiDriver::new(secret_key))
|
||||
}
|
||||
"local" | "ollama" => {
|
||||
let url = base_url.unwrap_or_else(|| "http://localhost:11434".to_string());
|
||||
Arc::new(zclaw_runtime::LocalDriver::new(&url))
|
||||
}
|
||||
_ => {
|
||||
eprintln!("[WARN create_llm_driver] Unknown provider: {}", provider);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
Some(Arc::new(RuntimeLlmAdapter::new(runtime_driver, model)))
|
||||
}
|
||||
|
||||
/// Analyze presentation data
|
||||
#[tauri::command]
|
||||
pub async fn analyze_presentation(
|
||||
data: Value,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
use zclaw_pipeline::presentation::PresentationAnalyzer;
|
||||
|
||||
let analyzer = PresentationAnalyzer::new();
|
||||
let analysis = analyzer.analyze(&data);
|
||||
|
||||
// Convert analysis to JSON
|
||||
serde_json::to_value(&analysis).map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
@@ -1,12 +1,22 @@
|
||||
//! OpenViking CLI Sidecar Integration
|
||||
//! OpenViking Memory Storage - Native Rust Implementation
|
||||
//!
|
||||
//! Wraps the OpenViking Rust CLI (`ov`) as a Tauri sidecar for local memory operations.
|
||||
//! This eliminates the need for a Python server dependency.
|
||||
//! Provides native Rust memory storage using SqliteStorage with TF-IDF semantic search.
|
||||
//! This is a self-contained implementation that doesn't require external Python or CLI dependencies.
|
||||
//!
|
||||
//! Reference: https://github.com/volcengine/OpenViking
|
||||
//! Features:
|
||||
//! - SQLite persistence with FTS5 full-text search
|
||||
//! - TF-IDF semantic scoring
|
||||
//! - Token budget control
|
||||
//! - Automatic memory indexing
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process::Command;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::OnceCell;
|
||||
use zclaw_growth::{
|
||||
FindOptions, MemoryEntry, MemoryType, PromptInjector, RetrievalResult, SqliteStorage,
|
||||
VikingStorage,
|
||||
};
|
||||
|
||||
// === Types ===
|
||||
|
||||
@@ -57,302 +67,399 @@ pub struct VikingAddResult {
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
// === CLI Path Resolution ===
|
||||
// === Global Storage Instance ===
|
||||
|
||||
fn get_viking_cli_path() -> Result<String, String> {
|
||||
// Try environment variable first
|
||||
if let Ok(path) = std::env::var("ZCLAW_VIKING_BIN") {
|
||||
if std::path::Path::new(&path).exists() {
|
||||
return Ok(path);
|
||||
}
|
||||
}
|
||||
/// Global storage instance
|
||||
static STORAGE: OnceCell<Arc<SqliteStorage>> = OnceCell::const_new();
|
||||
|
||||
// Try bundled sidecar location
|
||||
let binary_name = if cfg!(target_os = "windows") {
|
||||
"ov-x86_64-pc-windows-msvc.exe"
|
||||
} else if cfg!(target_os = "macos") {
|
||||
if cfg!(target_arch = "aarch64") {
|
||||
"ov-aarch64-apple-darwin"
|
||||
} else {
|
||||
"ov-x86_64-apple-darwin"
|
||||
}
|
||||
/// Get the storage directory path
|
||||
fn get_storage_dir() -> PathBuf {
|
||||
// Use platform-specific data directory
|
||||
if let Some(data_dir) = dirs::data_dir() {
|
||||
data_dir.join("zclaw").join("memories")
|
||||
} else {
|
||||
"ov-x86_64-unknown-linux-gnu"
|
||||
};
|
||||
|
||||
// Check common locations
|
||||
let locations = vec![
|
||||
format!("./binaries/{}", binary_name),
|
||||
format!("./resources/viking/{}", binary_name),
|
||||
format!("./{}", binary_name),
|
||||
];
|
||||
|
||||
for loc in locations {
|
||||
if std::path::Path::new(&loc).exists() {
|
||||
return Ok(loc);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to system PATH
|
||||
Ok("ov".to_string())
|
||||
}
|
||||
|
||||
fn run_viking_cli(args: &[&str]) -> Result<String, String> {
|
||||
let cli_path = get_viking_cli_path()?;
|
||||
|
||||
let output = Command::new(&cli_path)
|
||||
.args(args)
|
||||
.output()
|
||||
.map_err(|e| {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
format!(
|
||||
"OpenViking CLI not found. Please install 'ov' or set ZCLAW_VIKING_BIN. Tried: {}",
|
||||
cli_path
|
||||
)
|
||||
} else {
|
||||
format!("Failed to run OpenViking CLI: {}", e)
|
||||
}
|
||||
})?;
|
||||
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||||
} else {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
|
||||
let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
|
||||
if !stderr.is_empty() {
|
||||
Err(stderr)
|
||||
} else if !stdout.is_empty() {
|
||||
Err(stdout)
|
||||
} else {
|
||||
Err(format!("OpenViking CLI failed with status: {}", output.status))
|
||||
}
|
||||
// Fallback to current directory
|
||||
PathBuf::from("./zclaw_data/memories")
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to run Viking CLI and parse JSON output
|
||||
/// Reserved for future JSON-based commands
|
||||
#[allow(dead_code)]
|
||||
fn run_viking_cli_json<T: for<'de> Deserialize<'de>>(args: &[&str]) -> Result<T, String> {
|
||||
let output = run_viking_cli(args)?;
|
||||
/// Initialize the storage (should be called once at startup)
|
||||
pub async fn init_storage() -> Result<(), String> {
|
||||
let storage_dir = get_storage_dir();
|
||||
let db_path = storage_dir.join("memories.db");
|
||||
|
||||
// Handle empty output
|
||||
if output.is_empty() {
|
||||
return Err("OpenViking CLI returned empty output".to_string());
|
||||
}
|
||||
tracing::info!("[VikingCommands] Initializing storage at {:?}", db_path);
|
||||
|
||||
// Try to parse as JSON
|
||||
serde_json::from_str(&output)
|
||||
.map_err(|e| format!("Failed to parse OpenViking output as JSON: {}\nOutput: {}", e, output))
|
||||
let storage = SqliteStorage::new(&db_path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to initialize storage: {}", e))?;
|
||||
|
||||
let _ = STORAGE.set(Arc::new(storage));
|
||||
|
||||
tracing::info!("[VikingCommands] Storage initialized successfully");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the storage instance (public for use by other modules)
|
||||
pub async fn get_storage() -> Result<Arc<SqliteStorage>, String> {
|
||||
STORAGE
|
||||
.get()
|
||||
.cloned()
|
||||
.ok_or_else(|| "Storage not initialized. Call init_storage() first.".to_string())
|
||||
}
|
||||
|
||||
/// Get storage directory for status
|
||||
fn get_data_dir_string() -> Option<String> {
|
||||
get_storage_dir().to_str().map(|s| s.to_string())
|
||||
}
|
||||
|
||||
// === Tauri Commands ===
|
||||
|
||||
/// Check if OpenViking CLI is available
|
||||
/// Check if memory storage is available
|
||||
#[tauri::command]
|
||||
pub fn viking_status() -> Result<VikingStatus, String> {
|
||||
let result = run_viking_cli(&["--version"]);
|
||||
|
||||
match result {
|
||||
Ok(version_output) => {
|
||||
// Parse version from output like "ov 0.1.0"
|
||||
let version = version_output
|
||||
.lines()
|
||||
.next()
|
||||
.map(|s| s.trim().to_string());
|
||||
pub async fn viking_status() -> Result<VikingStatus, String> {
|
||||
match get_storage().await {
|
||||
Ok(storage) => {
|
||||
// Try a simple query to verify storage is working
|
||||
let _ = storage
|
||||
.find("", FindOptions::default())
|
||||
.await
|
||||
.map_err(|e| format!("Storage health check failed: {}", e))?;
|
||||
|
||||
Ok(VikingStatus {
|
||||
available: true,
|
||||
version,
|
||||
data_dir: None, // TODO: Get from CLI
|
||||
version: Some("0.2.0-native".to_string()),
|
||||
data_dir: get_data_dir_string(),
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
Err(e) => Ok(VikingStatus {
|
||||
available: false,
|
||||
version: None,
|
||||
data_dir: None,
|
||||
data_dir: get_data_dir_string(),
|
||||
error: Some(e),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a resource to OpenViking
|
||||
/// Add a memory entry
|
||||
#[tauri::command]
|
||||
pub fn viking_add(uri: String, content: String) -> Result<VikingAddResult, String> {
|
||||
// Create a temporary file for the content
|
||||
let temp_dir = std::env::temp_dir();
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.map(|d| d.as_millis())
|
||||
.unwrap_or(0);
|
||||
let temp_file = temp_dir.join(format!("viking_add_{}.txt", timestamp));
|
||||
pub async fn viking_add(uri: String, content: String) -> Result<VikingAddResult, String> {
|
||||
let storage = get_storage().await?;
|
||||
|
||||
std::fs::write(&temp_file, &content)
|
||||
.map_err(|e| format!("Failed to write temp file: {}", e))?;
|
||||
// Parse URI to extract agent_id, memory_type, and category
|
||||
// Expected format: agent://{agent_id}/{type}/{category}
|
||||
let (agent_id, memory_type, category) = parse_uri(&uri)?;
|
||||
|
||||
let temp_path = temp_file.to_string_lossy();
|
||||
let result = run_viking_cli(&["add", &uri, "--file", &temp_path]);
|
||||
let entry = MemoryEntry::new(&agent_id, memory_type, &category, content);
|
||||
|
||||
// Clean up temp file
|
||||
let _ = std::fs::remove_file(&temp_file);
|
||||
storage
|
||||
.store(&entry)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to store memory: {}", e))?;
|
||||
|
||||
match result {
|
||||
Ok(_) => Ok(VikingAddResult {
|
||||
uri,
|
||||
status: "added".to_string(),
|
||||
}),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
Ok(VikingAddResult {
|
||||
uri,
|
||||
status: "added".to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Add a resource with inline content (for small content)
|
||||
/// Add a memory with metadata
|
||||
#[tauri::command]
|
||||
pub fn viking_add_inline(uri: String, content: String) -> Result<VikingAddResult, String> {
|
||||
// Use stdin for content
|
||||
let cli_path = get_viking_cli_path()?;
|
||||
pub async fn viking_add_with_metadata(
|
||||
uri: String,
|
||||
content: String,
|
||||
keywords: Vec<String>,
|
||||
importance: Option<u8>,
|
||||
) -> Result<VikingAddResult, String> {
|
||||
let storage = get_storage().await?;
|
||||
|
||||
let output = Command::new(&cli_path)
|
||||
.args(["add", &uri])
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to spawn OpenViking CLI: {}", e))?;
|
||||
let (agent_id, memory_type, category) = parse_uri(&uri)?;
|
||||
|
||||
// Write content to stdin
|
||||
if let Some(mut stdin) = output.stdin.as_ref() {
|
||||
use std::io::Write;
|
||||
stdin.write_all(content.as_bytes())
|
||||
.map_err(|e| format!("Failed to write to stdin: {}", e))?;
|
||||
let mut entry = MemoryEntry::new(&agent_id, memory_type, &category, content);
|
||||
entry.keywords = keywords;
|
||||
|
||||
if let Some(imp) = importance {
|
||||
entry.importance = imp.min(10).max(1);
|
||||
}
|
||||
|
||||
let result = output.wait_with_output()
|
||||
.map_err(|e| format!("Failed to read output: {}", e))?;
|
||||
storage
|
||||
.store(&entry)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to store memory: {}", e))?;
|
||||
|
||||
if result.status.success() {
|
||||
Ok(VikingAddResult {
|
||||
uri,
|
||||
status: "added".to_string(),
|
||||
})
|
||||
} else {
|
||||
let stderr = String::from_utf8_lossy(&result.stderr).trim().to_string();
|
||||
Err(if !stderr.is_empty() { stderr } else { "Failed to add resource".to_string() })
|
||||
}
|
||||
Ok(VikingAddResult {
|
||||
uri,
|
||||
status: "added".to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Find resources by semantic search
|
||||
/// Find memories by semantic search
|
||||
#[tauri::command]
|
||||
pub fn viking_find(
|
||||
pub async fn viking_find(
|
||||
query: String,
|
||||
scope: Option<String>,
|
||||
limit: Option<usize>,
|
||||
) -> Result<Vec<VikingFindResult>, String> {
|
||||
let mut args = vec!["find", "--json", &query];
|
||||
let storage = get_storage().await?;
|
||||
|
||||
let scope_arg;
|
||||
if let Some(ref s) = scope {
|
||||
scope_arg = format!("--scope={}", s);
|
||||
args.push(&scope_arg);
|
||||
}
|
||||
let options = FindOptions {
|
||||
scope,
|
||||
limit,
|
||||
min_similarity: Some(0.1),
|
||||
};
|
||||
|
||||
let limit_arg;
|
||||
if let Some(l) = limit {
|
||||
limit_arg = format!("--limit={}", l);
|
||||
args.push(&limit_arg);
|
||||
}
|
||||
let entries = storage
|
||||
.find(&query, options)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to search memories: {}", e))?;
|
||||
|
||||
// CLI returns JSON array directly
|
||||
let output = run_viking_cli(&args)?;
|
||||
|
||||
// Handle empty or null results
|
||||
if output.is_empty() || output == "null" || output == "[]" {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
serde_json::from_str(&output)
|
||||
.map_err(|e| format!("Failed to parse find results: {}\nOutput: {}", e, output))
|
||||
Ok(entries
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, entry)| VikingFindResult {
|
||||
uri: entry.uri,
|
||||
score: 1.0 - (i as f64 * 0.1), // Simple scoring based on rank
|
||||
content: entry.content,
|
||||
level: "L1".to_string(),
|
||||
overview: None,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// Grep resources by pattern
|
||||
/// Grep memories by pattern (uses FTS5)
|
||||
#[tauri::command]
|
||||
pub fn viking_grep(
|
||||
pub async fn viking_grep(
|
||||
pattern: String,
|
||||
uri: Option<String>,
|
||||
case_sensitive: Option<bool>,
|
||||
_case_sensitive: Option<bool>,
|
||||
limit: Option<usize>,
|
||||
) -> Result<Vec<VikingGrepResult>, String> {
|
||||
let mut args = vec!["grep", "--json", &pattern];
|
||||
let storage = get_storage().await?;
|
||||
|
||||
let uri_arg;
|
||||
if let Some(ref u) = uri {
|
||||
uri_arg = format!("--uri={}", u);
|
||||
args.push(&uri_arg);
|
||||
}
|
||||
let scope = uri.as_ref().and_then(|u| {
|
||||
// Extract agent scope from URI
|
||||
u.strip_prefix("agent://")
|
||||
.and_then(|s| s.split('/').next())
|
||||
.map(|agent| format!("agent://{}", agent))
|
||||
});
|
||||
|
||||
if case_sensitive.unwrap_or(false) {
|
||||
args.push("--case-sensitive");
|
||||
}
|
||||
let options = FindOptions {
|
||||
scope,
|
||||
limit,
|
||||
min_similarity: Some(0.05), // Lower threshold for grep
|
||||
};
|
||||
|
||||
let limit_arg;
|
||||
if let Some(l) = limit {
|
||||
limit_arg = format!("--limit={}", l);
|
||||
args.push(&limit_arg);
|
||||
}
|
||||
let entries = storage
|
||||
.find(&pattern, options)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to grep memories: {}", e))?;
|
||||
|
||||
let output = run_viking_cli(&args)?;
|
||||
|
||||
if output.is_empty() || output == "null" || output == "[]" {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
serde_json::from_str(&output)
|
||||
.map_err(|e| format!("Failed to parse grep results: {}\nOutput: {}", e, output))
|
||||
Ok(entries
|
||||
.into_iter()
|
||||
.flat_map(|entry| {
|
||||
// Find matching lines
|
||||
entry
|
||||
.content
|
||||
.lines()
|
||||
.enumerate()
|
||||
.filter(|(_, line)| {
|
||||
line.to_lowercase()
|
||||
.contains(&pattern.to_lowercase())
|
||||
})
|
||||
.map(|(i, line)| VikingGrepResult {
|
||||
uri: entry.uri.clone(),
|
||||
line: (i + 1) as u32,
|
||||
content: line.to_string(),
|
||||
match_start: line.find(&pattern).unwrap_or(0) as u32,
|
||||
match_end: (line.find(&pattern).unwrap_or(0) + pattern.len()) as u32,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.take(limit.unwrap_or(100))
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// List resources at a path
|
||||
/// List memories at a path
|
||||
#[tauri::command]
|
||||
pub fn viking_ls(path: String) -> Result<Vec<VikingResource>, String> {
|
||||
let output = run_viking_cli(&["ls", "--json", &path])?;
|
||||
pub async fn viking_ls(path: String) -> Result<Vec<VikingResource>, String> {
|
||||
let storage = get_storage().await?;
|
||||
|
||||
if output.is_empty() || output == "null" || output == "[]" {
|
||||
return Ok(Vec::new());
|
||||
let entries = storage
|
||||
.find_by_prefix(&path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to list memories: {}", e))?;
|
||||
|
||||
Ok(entries
|
||||
.into_iter()
|
||||
.map(|entry| VikingResource {
|
||||
uri: entry.uri.clone(),
|
||||
name: entry
|
||||
.uri
|
||||
.rsplit('/')
|
||||
.next()
|
||||
.unwrap_or(&entry.uri)
|
||||
.to_string(),
|
||||
resource_type: entry.memory_type.to_string(),
|
||||
size: Some(entry.content.len() as u64),
|
||||
modified_at: Some(entry.last_accessed.to_rfc3339()),
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// Read memory content
|
||||
#[tauri::command]
|
||||
pub async fn viking_read(uri: String, _level: Option<String>) -> Result<String, String> {
|
||||
let storage = get_storage().await?;
|
||||
|
||||
let entry = storage
|
||||
.get(&uri)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read memory: {}", e))?;
|
||||
|
||||
match entry {
|
||||
Some(e) => Ok(e.content),
|
||||
None => Err(format!("Memory not found: {}", uri)),
|
||||
}
|
||||
|
||||
serde_json::from_str(&output)
|
||||
.map_err(|e| format!("Failed to parse ls results: {}\nOutput: {}", e, output))
|
||||
}
|
||||
|
||||
/// Read resource content
|
||||
/// Remove a memory
|
||||
#[tauri::command]
|
||||
pub fn viking_read(uri: String, level: Option<String>) -> Result<String, String> {
|
||||
let level_val = level.unwrap_or_else(|| "L1".to_string());
|
||||
let level_arg = format!("--level={}", level_val);
|
||||
pub async fn viking_remove(uri: String) -> Result<(), String> {
|
||||
let storage = get_storage().await?;
|
||||
|
||||
run_viking_cli(&["read", &uri, &level_arg])
|
||||
}
|
||||
storage
|
||||
.delete(&uri)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to remove memory: {}", e))?;
|
||||
|
||||
/// Remove a resource
|
||||
#[tauri::command]
|
||||
pub fn viking_remove(uri: String) -> Result<(), String> {
|
||||
run_viking_cli(&["remove", &uri])?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get resource tree
|
||||
/// Get memory tree
|
||||
#[tauri::command]
|
||||
pub fn viking_tree(path: String, depth: Option<usize>) -> Result<serde_json::Value, String> {
|
||||
let depth_val = depth.unwrap_or(2);
|
||||
let depth_arg = format!("--depth={}", depth_val);
|
||||
pub async fn viking_tree(path: String, _depth: Option<usize>) -> Result<serde_json::Value, String> {
|
||||
let storage = get_storage().await?;
|
||||
|
||||
let output = run_viking_cli(&["tree", "--json", &path, &depth_arg])?;
|
||||
let entries = storage
|
||||
.find_by_prefix(&path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to get tree: {}", e))?;
|
||||
|
||||
if output.is_empty() || output == "null" {
|
||||
return Ok(serde_json::json!({}));
|
||||
// Build a simple tree structure
|
||||
let mut tree = serde_json::Map::new();
|
||||
|
||||
for entry in entries {
|
||||
let parts: Vec<&str> = entry.uri.split('/').collect();
|
||||
let mut current = &mut tree;
|
||||
|
||||
for part in &parts[..parts.len().saturating_sub(1)] {
|
||||
if !current.contains_key(*part) {
|
||||
current.insert(
|
||||
(*part).to_string(),
|
||||
serde_json::json!({}),
|
||||
);
|
||||
}
|
||||
current = current
|
||||
.get_mut(*part)
|
||||
.and_then(|v| v.as_object_mut())
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
if let Some(last) = parts.last() {
|
||||
current.insert(
|
||||
(*last).to_string(),
|
||||
serde_json::json!({
|
||||
"type": entry.memory_type.to_string(),
|
||||
"importance": entry.importance,
|
||||
"access_count": entry.access_count,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
serde_json::from_str(&output)
|
||||
.map_err(|e| format!("Failed to parse tree result: {}\nOutput: {}", e, output))
|
||||
Ok(serde_json::Value::Object(tree))
|
||||
}
|
||||
|
||||
/// Inject memories into prompt (for agent loop integration)
|
||||
#[tauri::command]
|
||||
pub async fn viking_inject_prompt(
|
||||
agent_id: String,
|
||||
base_prompt: String,
|
||||
user_input: String,
|
||||
max_tokens: Option<usize>,
|
||||
) -> Result<String, String> {
|
||||
let storage = get_storage().await?;
|
||||
|
||||
// Retrieve relevant memories
|
||||
let options = FindOptions {
|
||||
scope: Some(format!("agent://{}", agent_id)),
|
||||
limit: Some(10),
|
||||
min_similarity: Some(0.3),
|
||||
};
|
||||
|
||||
let entries = storage
|
||||
.find(&user_input, options)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to retrieve memories: {}", e))?;
|
||||
|
||||
// Convert to RetrievalResult
|
||||
let mut result = RetrievalResult::default();
|
||||
for entry in entries {
|
||||
match entry.memory_type {
|
||||
MemoryType::Preference => result.preferences.push(entry),
|
||||
MemoryType::Knowledge => result.knowledge.push(entry),
|
||||
MemoryType::Experience => result.experience.push(entry),
|
||||
MemoryType::Session => {} // Skip session memories
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate tokens
|
||||
result.total_tokens = result.calculate_tokens();
|
||||
|
||||
// Apply token budget
|
||||
let budget = max_tokens.unwrap_or(500);
|
||||
if result.total_tokens > budget {
|
||||
// Truncate by priority: preferences > knowledge > experience
|
||||
while result.total_tokens > budget && !result.experience.is_empty() {
|
||||
result.experience.pop();
|
||||
result.total_tokens = result.calculate_tokens();
|
||||
}
|
||||
while result.total_tokens > budget && !result.knowledge.is_empty() {
|
||||
result.knowledge.pop();
|
||||
result.total_tokens = result.calculate_tokens();
|
||||
}
|
||||
while result.total_tokens > budget && !result.preferences.is_empty() {
|
||||
result.preferences.pop();
|
||||
result.total_tokens = result.calculate_tokens();
|
||||
}
|
||||
}
|
||||
|
||||
// Inject into prompt
|
||||
let injector = PromptInjector::new();
|
||||
Ok(injector.inject_with_format(&base_prompt, &result))
|
||||
}
|
||||
|
||||
// === Helper Functions ===
|
||||
|
||||
/// Parse URI to extract components
|
||||
fn parse_uri(uri: &str) -> Result<(String, MemoryType, String), String> {
|
||||
// Expected format: agent://{agent_id}/{type}/{category}
|
||||
let without_prefix = uri
|
||||
.strip_prefix("agent://")
|
||||
.ok_or_else(|| format!("Invalid URI format: {}", uri))?;
|
||||
|
||||
let parts: Vec<&str> = without_prefix.splitn(3, '/').collect();
|
||||
|
||||
if parts.len() < 3 {
|
||||
return Err(format!("Invalid URI format, expected agent://{{agent_id}}/{{type}}/{{category}}: {}", uri));
|
||||
}
|
||||
|
||||
let agent_id = parts[0].to_string();
|
||||
let memory_type = MemoryType::parse(parts[1]);
|
||||
let category = parts[2].to_string();
|
||||
|
||||
Ok((agent_id, memory_type, category))
|
||||
}
|
||||
|
||||
// === Tests ===
|
||||
@@ -361,10 +468,19 @@ pub fn viking_tree(path: String, depth: Option<usize>) -> Result<serde_json::Val
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_uri() {
|
||||
let (agent_id, memory_type, category) =
|
||||
parse_uri("agent://test-agent/preferences/style").unwrap();
|
||||
|
||||
assert_eq!(agent_id, "test-agent");
|
||||
assert_eq!(memory_type, MemoryType::Preference);
|
||||
assert_eq!(category, "style");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_status_unavailable_without_cli() {
|
||||
// This test will fail if ov is installed, which is fine
|
||||
let result = viking_status();
|
||||
assert!(result.is_ok());
|
||||
fn test_invalid_uri() {
|
||||
assert!(parse_uri("invalid-uri").is_err());
|
||||
assert!(parse_uri("agent://only-agent").is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,295 +0,0 @@
|
||||
//! OpenViking Local Server Management
|
||||
//!
|
||||
//! Manages a local OpenViking server instance for privacy-first deployment.
|
||||
//! All data is stored locally in ~/.openviking/ - nothing is uploaded to remote servers.
|
||||
//!
|
||||
//! Architecture:
|
||||
//! ┌─────────────────────────────────────────────────────────────────┐
|
||||
//! │ ZCLAW Desktop (Tauri) │
|
||||
//! │ │
|
||||
//! │ ┌─────────────────┐ HTTP ┌─────────────────────────┐ │
|
||||
//! │ │ viking_commands │ ◄────────────►│ openviking-server │ │
|
||||
//! │ │ (Tauri cmds) │ localhost │ (Python, managed here) │ │
|
||||
//! │ └─────────────────┘ └───────────┬─────────────┘ │
|
||||
//! │ │ │
|
||||
//! │ ┌─────────▼─────────────┐ │
|
||||
//! │ │ SQLite + Vector Store │ │
|
||||
//! │ │ ~/.openviking/ │ │
|
||||
//! │ │ (LOCAL DATA ONLY) │ │
|
||||
//! │ └───────────────────────┘ │
|
||||
//! └─────────────────────────────────────────────────────────────────┘
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process::{Child, Command};
|
||||
use std::sync::Mutex;
|
||||
use std::time::Duration;
|
||||
|
||||
// === Types ===
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ServerStatus {
|
||||
pub running: bool,
|
||||
pub port: u16,
|
||||
pub pid: Option<u32>,
|
||||
pub data_dir: Option<String>,
|
||||
pub version: Option<String>,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ServerConfig {
|
||||
pub port: u16,
|
||||
pub data_dir: String,
|
||||
pub config_file: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for ServerConfig {
|
||||
fn default() -> Self {
|
||||
let home = dirs::home_dir()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.unwrap_or_else(|| ".".to_string());
|
||||
|
||||
Self {
|
||||
port: 1933,
|
||||
data_dir: format!("{}/.openviking/workspace", home),
|
||||
config_file: Some(format!("{}/.openviking/ov.conf", home)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// === Server Process Management ===
|
||||
|
||||
static SERVER_PROCESS: Mutex<Option<Child>> = Mutex::new(None);
|
||||
|
||||
/// Check if OpenViking server is running
|
||||
fn is_server_running(port: u16) -> bool {
|
||||
// Try to connect to the server
|
||||
let url = format!("http://127.0.0.1:{}/api/v1/status", port);
|
||||
|
||||
let client = reqwest::blocking::Client::builder()
|
||||
.timeout(Duration::from_secs(2))
|
||||
.build()
|
||||
.ok();
|
||||
|
||||
if let Some(client) = client {
|
||||
if let Ok(resp) = client.get(&url).send() {
|
||||
return resp.status().is_success();
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Find openviking-server executable
|
||||
fn find_server_binary() -> Result<String, String> {
|
||||
// Check environment variable first
|
||||
if let Ok(path) = std::env::var("ZCLAW_VIKING_SERVER_BIN") {
|
||||
if std::path::Path::new(&path).exists() {
|
||||
return Ok(path);
|
||||
}
|
||||
}
|
||||
|
||||
// Check common locations
|
||||
let candidates = vec![
|
||||
"openviking-server".to_string(),
|
||||
"python -m openviking.server".to_string(),
|
||||
];
|
||||
|
||||
// Try to find in PATH
|
||||
for cmd in &candidates {
|
||||
if Command::new("which")
|
||||
.arg(cmd.split_whitespace().next().unwrap_or(""))
|
||||
.output()
|
||||
.map(|o| o.status.success())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return Ok(cmd.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Check Python virtual environment
|
||||
let home = dirs::home_dir()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
|
||||
let venv_candidates = vec![
|
||||
format!("{}/.openviking/venv/bin/openviking-server", home),
|
||||
format!("{}/.local/bin/openviking-server", home),
|
||||
];
|
||||
|
||||
for path in venv_candidates {
|
||||
if std::path::Path::new(&path).exists() {
|
||||
return Ok(path);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: assume it's in PATH via pip install
|
||||
Ok("openviking-server".to_string())
|
||||
}
|
||||
|
||||
// === Tauri Commands ===
|
||||
|
||||
/// Get server status
|
||||
#[tauri::command]
|
||||
pub fn viking_server_status() -> Result<ServerStatus, String> {
|
||||
let config = ServerConfig::default();
|
||||
|
||||
let running = is_server_running(config.port);
|
||||
|
||||
let pid = if running {
|
||||
SERVER_PROCESS
|
||||
.lock()
|
||||
.map(|guard| guard.as_ref().map(|c| c.id()))
|
||||
.ok()
|
||||
.flatten()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Get version if running
|
||||
let version = if running {
|
||||
let url = format!("http://127.0.0.1:{}/api/v1/version", config.port);
|
||||
reqwest::blocking::Client::builder()
|
||||
.timeout(Duration::from_secs(2))
|
||||
.build()
|
||||
.ok()
|
||||
.and_then(|client| client.get(&url).send().ok())
|
||||
.and_then(|resp| resp.text().ok())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(ServerStatus {
|
||||
running,
|
||||
port: config.port,
|
||||
pid,
|
||||
data_dir: Some(config.data_dir),
|
||||
version,
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Start local OpenViking server
|
||||
#[tauri::command]
|
||||
pub fn viking_server_start(config: Option<ServerConfig>) -> Result<ServerStatus, String> {
|
||||
let config = config.unwrap_or_default();
|
||||
|
||||
// Check if already running
|
||||
if is_server_running(config.port) {
|
||||
return Ok(ServerStatus {
|
||||
running: true,
|
||||
port: config.port,
|
||||
pid: None,
|
||||
data_dir: Some(config.data_dir),
|
||||
version: None,
|
||||
error: Some("Server already running".to_string()),
|
||||
});
|
||||
}
|
||||
|
||||
// Find server binary
|
||||
let server_bin = find_server_binary()?;
|
||||
|
||||
// Ensure data directory exists
|
||||
std::fs::create_dir_all(&config.data_dir)
|
||||
.map_err(|e| format!("Failed to create data directory: {}", e))?;
|
||||
|
||||
// Set environment variables
|
||||
if let Some(ref config_file) = config.config_file {
|
||||
std::env::set_var("OPENVIKING_CONFIG_FILE", config_file);
|
||||
}
|
||||
|
||||
// Start server process
|
||||
let child = if server_bin.contains("python") {
|
||||
// Use Python module
|
||||
let parts: Vec<&str> = server_bin.split_whitespace().collect();
|
||||
Command::new(parts[0])
|
||||
.args(&parts[1..])
|
||||
.arg("--host")
|
||||
.arg("127.0.0.1")
|
||||
.arg("--port")
|
||||
.arg(config.port.to_string())
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to start server: {}", e))?
|
||||
} else {
|
||||
// Direct binary
|
||||
Command::new(&server_bin)
|
||||
.arg("--host")
|
||||
.arg("127.0.0.1")
|
||||
.arg("--port")
|
||||
.arg(config.port.to_string())
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to start server: {}", e))?
|
||||
};
|
||||
|
||||
let pid = child.id();
|
||||
|
||||
// Store process handle
|
||||
if let Ok(mut guard) = SERVER_PROCESS.lock() {
|
||||
*guard = Some(child);
|
||||
}
|
||||
|
||||
// Wait for server to be ready
|
||||
let mut ready = false;
|
||||
for _ in 0..30 {
|
||||
std::thread::sleep(Duration::from_millis(500));
|
||||
if is_server_running(config.port) {
|
||||
ready = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !ready {
|
||||
return Err("Server failed to start within 15 seconds".to_string());
|
||||
}
|
||||
|
||||
Ok(ServerStatus {
|
||||
running: true,
|
||||
port: config.port,
|
||||
pid: Some(pid),
|
||||
data_dir: Some(config.data_dir),
|
||||
version: None,
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Stop local OpenViking server
|
||||
#[tauri::command]
|
||||
pub fn viking_server_stop() -> Result<(), String> {
|
||||
if let Ok(mut guard) = SERVER_PROCESS.lock() {
|
||||
if let Some(mut child) = guard.take() {
|
||||
child.kill().map_err(|e| format!("Failed to kill server: {}", e))?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Restart local OpenViking server
|
||||
#[tauri::command]
|
||||
pub fn viking_server_restart(config: Option<ServerConfig>) -> Result<ServerStatus, String> {
|
||||
viking_server_stop()?;
|
||||
std::thread::sleep(Duration::from_secs(1));
|
||||
viking_server_start(config)
|
||||
}
|
||||
|
||||
// === Tests ===
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_server_config_default() {
|
||||
let config = ServerConfig::default();
|
||||
assert_eq!(config.port, 1933);
|
||||
assert!(config.data_dir.contains(".openviking"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_server_running_not_running() {
|
||||
// Should return false when no server is running on port 1933
|
||||
let result = is_server_running(1933);
|
||||
// Just check it doesn't panic
|
||||
assert!(result || !result);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user