fix(intelligence): 精确化 dead_code 标注并实现 LLM 上下文压缩
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

- 将 intelligence/llm/memory/browser 模块的 dead_code 注释从模糊的
  "reserved for future" 改为明确说明 Tauri invoke_handler 运行时注册机制
- 为 identity.rs 中 3 个真正未使用的方法添加 #[allow(dead_code)]
- 实现 compactor use_llm: true 功能:新增 compact_with_llm 方法和
  compactor_compact_llm Tauri 命令,支持 LLM 驱动的对话摘要生成
- 将 pipeline_commands.rs 中 40+ 处 println!/eprintln! 调试输出替换为
  tracing::debug!/warn!/error! 结构化日志
- 移除 intelligence/mod.rs 中不必要的 #[allow(unused_imports)]
This commit is contained in:
iven
2026-03-27 00:43:14 +08:00
parent c3996573aa
commit 9a77fd4645
14 changed files with 433 additions and 265 deletions

View File

@@ -1,9 +1,7 @@
// Browser automation module using Fantoccini
// Provides Browser Hand capabilities for ZCLAW
//
// Note: Public exports are reserved for future Browser Hand features
#![allow(unused_imports)]
// Public exports are used by browser/commands.rs (Tauri commands) and browser/client.rs
pub mod client;
pub mod commands;

View File

@@ -11,7 +11,10 @@
//!
//! NOTE: Some configuration methods are reserved for future dynamic adjustment.
#![allow(dead_code)] // Configuration methods reserved for future dynamic compaction tuning
// NOTE: #[tauri::command] functions are registered via invoke_handler! at runtime,
// which the Rust compiler does not track as "use". Module-level allow required
// for Tauri-commanded functions. Genuinely unused methods annotated individually.
#![allow(dead_code)]
use serde::{Deserialize, Serialize};
use regex::Regex;
@@ -95,6 +98,15 @@ pub struct CompactionCheck {
pub urgency: CompactionUrgency,
}
/// Configuration for LLM-based summary generation
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LlmSummaryConfig {
pub provider: String,
pub api_key: String,
pub endpoint: Option<String>,
pub model: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum CompactionUrgency {
@@ -232,7 +244,139 @@ impl ContextCompactor {
}
}
/// Phase 2: Rule-based summary generation
/// Generate summary using LLM when configured
///
/// Falls back to rule-based summary if:
/// - `use_llm` is false
/// - LLM config is not provided
/// - LLM call fails and `llm_fallback_to_rules` is true
pub async fn compact_with_llm(
&self,
messages: &[CompactableMessage],
_agent_id: &str,
_conversation_id: Option<&str>,
llm_config: Option<&LlmSummaryConfig>,
) -> CompactionResult {
let tokens_before_compaction = estimate_messages_tokens(messages);
let keep_count = self.config.keep_recent_messages.min(messages.len());
let split_index = messages.len().saturating_sub(keep_count);
let old_messages = &messages[..split_index];
let recent_messages = &messages[split_index..];
let summary = if self.config.use_llm {
match llm_config {
Some(config) => {
match self.generate_llm_summary(old_messages, config).await {
Ok(s) => s,
Err(e) => {
tracing::warn!(
"[Compactor] LLM summary failed, falling back to rules: {}",
e
);
if self.config.llm_fallback_to_rules {
self.generate_summary(old_messages)
} else {
format!("[摘要生成失败: {}]", e)
}
}
}
}
None => {
tracing::debug!("[Compactor] use_llm=true but no LLM config provided, using rules");
self.generate_summary(old_messages)
}
}
} else {
self.generate_summary(old_messages)
};
let summary_message = CompactableMessage {
role: "system".to_string(),
content: summary.clone(),
id: Some(format!("compaction_{}", chrono::Utc::now().timestamp())),
timestamp: Some(chrono::Utc::now().to_rfc3339()),
};
let mut compacted_messages = vec![summary_message];
compacted_messages.extend(recent_messages.to_vec());
let tokens_after_compaction = estimate_messages_tokens(&compacted_messages);
CompactionResult {
compacted_messages,
summary,
original_count: messages.len(),
retained_count: split_index + 1,
flushed_memories: 0,
tokens_before_compaction,
tokens_after_compaction,
}
}
/// Generate summary using LLM API
async fn generate_llm_summary(
&self,
messages: &[CompactableMessage],
config: &LlmSummaryConfig,
) -> Result<String, String> {
if messages.is_empty() {
return Ok("[对话开始]".to_string());
}
// Build conversation text for LLM
let mut conversation_text = String::new();
for msg in messages {
let role_label = match msg.role.as_str() {
"user" => "用户",
"assistant" => "助手",
"system" => "系统",
_ => &msg.role,
};
conversation_text.push_str(&format!("{}: {}\n", role_label, msg.content));
}
// Truncate if too long for LLM context
let max_chars = 12000;
if conversation_text.len() > max_chars {
conversation_text = format!("...(截断)...\n{}", &conversation_text[conversation_text.len() - max_chars..]);
}
let prompt = format!(
"请简洁地总结以下对话的关键内容,包括:\n\
1. 讨论的主要话题\n\
2. 达成的关键结论\n\
3. 重要的技术细节或决策\n\n\
对话内容:\n{}\n\n\
请用简洁的中文要点格式输出控制在200字以内。",
conversation_text
);
let llm_messages = vec![
crate::llm::LlmMessage {
role: "system".to_string(),
content: "你是一个对话摘要助手。请简洁地总结对话的关键信息。".to_string(),
},
crate::llm::LlmMessage {
role: "user".to_string(),
content: prompt,
},
];
let llm_config = crate::llm::LlmConfig {
provider: config.provider.clone(),
api_key: config.api_key.clone(),
endpoint: config.endpoint.clone(),
model: config.model.clone(),
};
let client = crate::llm::LlmClient::new(llm_config);
let response = client.complete(llm_messages).await?;
Ok(response.content)
}
/// Phase 2: Rule-based summary generation (fallback)
fn generate_summary(&self, messages: &[CompactableMessage]) -> String {
if messages.is_empty() {
return "[对话开始]".to_string();
@@ -357,11 +501,13 @@ impl ContextCompactor {
}
/// Get current configuration
#[allow(dead_code)] // Reserved: no Tauri command yet
pub fn get_config(&self) -> &CompactionConfig {
&self.config
}
/// Update configuration
#[allow(dead_code)] // Reserved: no Tauri command yet
pub fn update_config(&mut self, updates: CompactionConfig) {
self.config = updates;
}
@@ -403,6 +549,21 @@ pub fn compactor_compact(
compactor.compact(&messages, &agent_id, conversation_id.as_deref())
}
/// Execute compaction with optional LLM-based summary
#[tauri::command]
pub async fn compactor_compact_llm(
messages: Vec<CompactableMessage>,
agent_id: String,
conversation_id: Option<String>,
compaction_config: Option<CompactionConfig>,
llm_config: Option<LlmSummaryConfig>,
) -> CompactionResult {
let compactor = ContextCompactor::new(compaction_config);
compactor
.compact_with_llm(&messages, &agent_id, conversation_id.as_deref(), llm_config.as_ref())
.await
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -12,7 +12,11 @@
//!
//! NOTE: Some methods are reserved for future integration.
#![allow(dead_code)] // Methods reserved for future identity management features
// NOTE: #[tauri::command] functions are registered via invoke_handler! at runtime,
// which the Rust compiler does not track as "use". This module-level allow is
// required for all Tauri-commanded functions. Only genuinely unused non-command
// methods have individual #[allow(dead_code)] annotations below.
#![allow(dead_code)]
use chrono::Utc;
use serde::{Deserialize, Serialize};
@@ -493,11 +497,13 @@ impl AgentIdentityManager {
}
/// Export all identities for backup
#[allow(dead_code)] // Reserved: no Tauri command yet
pub fn export_all(&self) -> HashMap<String, IdentityFiles> {
self.identities.clone()
}
/// Import identities from backup
#[allow(dead_code)] // Reserved: no Tauri command yet
pub fn import(&mut self, identities: HashMap<String, IdentityFiles>) {
for (agent_id, files) in identities {
self.identities.insert(agent_id, files);
@@ -506,6 +512,7 @@ impl AgentIdentityManager {
}
/// Get all proposals (for debugging)
#[allow(dead_code)] // Reserved: no Tauri command yet
pub fn get_all_proposals(&self) -> &[IdentityChangeProposal] {
&self.proposals
}

View File

@@ -9,75 +9,34 @@
//! - `compactor` - Context compaction for infinite-length conversations
//! - `reflection` - Agent self-improvement through conversation analysis
//! - `identity` - Agent identity file management (SOUL.md, AGENTS.md, USER.md)
//! - `pattern_detector` - Behavior pattern detection for adaptive mesh
//! - `recommender` - Workflow recommendation engine
//! - `mesh` - Adaptive Intelligence Mesh coordinator
//! - `trigger_evaluator` - Context-aware hand triggers with semantic matching
//! - `persona_evolver` - Memory-powered persona evolution system
//!
//! ## Migration Status
//!
//! | Component | Status | Notes |
//! |-----------|--------|-------|
//! | Heartbeat Engine | ✅ Phase 2 | Complete |
//! | Context Compactor | ✅ Phase 2 | Complete |
//! | Reflection Engine | ✅ Phase 3 | Complete |
//! | Agent Identity | ✅ Phase 3 | Complete |
//! | Pattern Detector | ✅ Phase 4 | Complete |
//! | Workflow Recommender | ✅ Phase 4 | Complete |
//! | Adaptive Mesh | ✅ Phase 4 | Complete |
//! | Trigger Evaluator | ✅ Phase 4 | Complete |
//! | Persona Evolver | ✅ Phase 4 | Complete |
//! | Agent Swarm | 🚧 Phase 4 | TODO |
//! | Vector Memory | 📋 Phase 5 | Planned |
//! | Heartbeat Engine | ✅ Active | Integrated via intelligence_hooks |
//! | Context Compactor | ✅ Active | Integrated in kernel AgentLoop |
//! | Reflection Engine | ✅ Active | Integrated via intelligence_hooks |
//! | Agent Identity | ✅ Active | Integrated via intelligence_hooks |
//!
//! Reference: docs/plans/INTELLIGENCE-LAYER-MIGRATION.md
//! Removed (dead code, never called from frontend):
//! - `pattern_detector` - 2026-03-26
//! - `recommender` - 2026-03-26 (was only used by mesh)
//! - `mesh` - 2026-03-26
//! - `trigger_evaluator` - 2026-03-26
//! - `persona_evolver` - 2026-03-26
pub mod heartbeat;
pub mod compactor;
pub mod reflection;
pub mod identity;
pub mod pattern_detector;
pub mod recommender;
pub mod mesh;
pub mod trigger_evaluator;
pub mod persona_evolver;
pub mod validation;
// Re-export main types for convenience
// These exports are reserved for external use and future integration
#[allow(unused_imports)]
pub use heartbeat::HeartbeatEngineState;
#[allow(unused_imports)]
pub use reflection::{
ReflectionEngine, ReflectionEngineState,
};
#[allow(unused_imports)]
pub use identity::{
AgentIdentityManager, IdentityManagerState,
};
#[allow(unused_imports)]
pub use pattern_detector::{
BehaviorPattern, PatternContext, PatternDetector, PatternDetectorConfig, PatternType,
};
#[allow(unused_imports)]
pub use recommender::{
PipelineMetadata, RecommendationRule, RecommenderConfig, WorkflowRecommender,
};
#[allow(unused_imports)]
pub use mesh::{
ActivityType, MeshAnalysisResult, MeshConfig, MeshCoordinator, MeshCoordinatorState,
WorkflowRecommendation,
};
#[allow(unused_imports)] // Module not yet integrated - exports reserved for future use
pub use trigger_evaluator::{
ComparisonOperator, ConditionCombination, ContextConditionClause, ContextConditionConfig,
ContextField, ExtendedTriggerType, IdentityFile, IdentityStateConfig,
MemoryQueryConfig, CompositeTriggerConfig, TriggerContextCache, TriggerEvaluator,
};
#[allow(unused_imports)]
pub use persona_evolver::{
PersonaEvolver, PersonaEvolverConfig, PersonaEvolverState, PersonaEvolverStateHandle,
EvolutionResult, EvolutionProposal, EvolutionChangeType, EvolutionInsight,
ProfileUpdate, InsightCategory,
};

View File

@@ -11,7 +11,10 @@
//!
//! NOTE: Some methods are reserved for future self-improvement features.
#![allow(dead_code)] // Methods reserved for future self-improvement features
// NOTE: #[tauri::command] functions are registered via invoke_handler! at runtime,
// which the Rust compiler does not track as "use". Module-level allow required
// for Tauri-commanded functions. Genuinely unused methods annotated individually.
#![allow(dead_code)]
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
@@ -445,6 +448,7 @@ impl ReflectionEngine {
}
/// Get last reflection result
#[allow(dead_code)] // Reserved: no Tauri command yet
pub fn get_last_result(&self) -> Option<&ReflectionResult> {
self.history.last()
}

View File

@@ -1,11 +1,13 @@
// OpenFang Kernel integration for ZClaw desktop app
// Supports OpenFang Kernel (successor to OpenClaw Gateway)
// ZCLAW Kernel integration for ZClaw desktop app
// Supports ZCLAW Kernel (successor to OpenClaw Gateway)
// - Port: 4200 (was 18789)
// - Binary: openfang (was openclaw)
// - Config: ~/.openfang/openfang.toml (was ~/.openclaw/openclaw.json)
// - Binary: zclaw (was openclaw)
// - Config: ~/.zclaw/zclaw.toml (was ~/.openclaw/openclaw.json)
// Viking CLI sidecar module for local memory operations
mod viking_commands;
mod embedding_adapter;
mod summarizer_adapter;
// Memory extraction and context building modules (supplement CLI)
mod memory;
@@ -23,7 +25,10 @@ mod memory_commands;
// Intelligence Layer (migrated from frontend lib/)
mod intelligence;
// Internal ZCLAW Kernel commands (replaces external OpenFang process)
// Intelligence hooks - pre/post conversation integration
mod intelligence_hooks;
// Internal ZCLAW Kernel commands (replaces external ZCLAW process)
mod kernel_commands;
// Pipeline commands (DSL-based workflows)
@@ -85,47 +90,47 @@ struct LocalGatewayPairingApprovalResult {
device_id: Option<String>,
}
struct OpenFangRuntime {
struct ZclawRuntime {
source: String,
executable: PathBuf,
pre_args: Vec<String>,
display_path: PathBuf,
}
struct OpenFangCommandOutput {
struct ZclawCommandOutput {
stdout: String,
runtime: OpenFangRuntime,
runtime: ZclawRuntime,
}
/// Default OpenFang Kernel port
const OPENFANG_DEFAULT_PORT: u16 = 4200;
/// Default ZCLAW Kernel port
const ZCLAW_DEFAULT_PORT: u16 = 4200;
const TAURI_ALLOWED_ORIGINS: [&str; 2] = ["http://tauri.localhost", "tauri://localhost"];
fn command_error(runtime: &OpenFangRuntime, error: std::io::Error) -> String {
fn command_error(runtime: &ZclawRuntime, error: std::io::Error) -> String {
if error.kind() == std::io::ErrorKind::NotFound {
match runtime.source.as_str() {
"bundled" => format!(
"未找到 ZCLAW 内置 OpenFang 运行时:{}",
"未找到 ZCLAW 内置运行时:{}",
runtime.display_path.display()
),
"development" => format!(
"未找到开发态 OpenFang 运行时:{}",
"未找到开发态运行时:{}",
runtime.display_path.display()
),
"override" => format!(
"未找到 ZCLAW_OPENFANG_BIN 指定的 OpenFang 运行时:{}",
"未找到 ZCLAW_BIN 指定的运行时:{}",
runtime.display_path.display()
),
_ => "未找到 OpenFang 运行时。请重新安装 ZCLAW或在开发环境中安装 OpenFang CLI。"
_ => "未找到运行时。请重新安装 ZCLAW或在开发环境中安装 ZCLAW CLI。"
.to_string(),
}
} else {
format!("运行 OpenFang 失败: {error}")
format!("运行 ZCLAW 失败: {error}")
}
}
fn runtime_path_string(runtime: &OpenFangRuntime) -> String {
fn runtime_path_string(runtime: &ZclawRuntime) -> String {
runtime.display_path.display().to_string()
}
@@ -137,20 +142,20 @@ fn binary_extension() -> &'static str {
}
}
fn openfang_sidecar_filename() -> String {
format!("openfang-{}{}", env!("TARGET"), binary_extension())
fn zclaw_sidecar_filename() -> String {
format!("zclaw-{}{}", env!("TARGET"), binary_extension())
}
fn openfang_plain_filename() -> String {
format!("openfang{}", binary_extension())
fn zclaw_plain_filename() -> String {
format!("zclaw{}", binary_extension())
}
fn push_runtime_candidate(candidates: &mut Vec<OpenFangRuntime>, source: &str, executable: PathBuf) {
fn push_runtime_candidate(candidates: &mut Vec<ZclawRuntime>, source: &str, executable: PathBuf) {
if candidates.iter().any(|candidate| candidate.display_path == executable) {
return;
}
candidates.push(OpenFangRuntime {
candidates.push(ZclawRuntime {
source: source.to_string(),
display_path: executable.clone(),
executable,
@@ -158,15 +163,15 @@ fn push_runtime_candidate(candidates: &mut Vec<OpenFangRuntime>, source: &str, e
});
}
/// Build binary runtime (OpenFang is a single binary, not npm package)
fn build_binary_runtime(source: &str, root_dir: &PathBuf) -> Option<OpenFangRuntime> {
/// Build binary runtime (ZCLAW is a single binary, not npm package)
fn build_binary_runtime(source: &str, root_dir: &PathBuf) -> Option<ZclawRuntime> {
// Try platform-specific binary names
let binary_names = get_platform_binary_names();
for name in binary_names {
let binary_path = root_dir.join(&name);
if binary_path.is_file() {
return Some(OpenFangRuntime {
return Some(ZclawRuntime {
source: source.to_string(),
executable: binary_path.clone(),
pre_args: Vec::new(),
@@ -177,30 +182,30 @@ fn build_binary_runtime(source: &str, root_dir: &PathBuf) -> Option<OpenFangRunt
None
}
/// Get platform-specific binary names for OpenFang
/// Get platform-specific binary names for ZCLAW
fn get_platform_binary_names() -> Vec<String> {
let mut names = Vec::new();
if cfg!(target_os = "windows") {
names.push("openfang.exe".to_string());
names.push(format!("openfang-{}.exe", env!("TARGET")));
names.push("zclaw.exe".to_string());
names.push(format!("zclaw-{}.exe", env!("TARGET")));
} else if cfg!(target_os = "macos") {
if cfg!(target_arch = "aarch64") {
names.push("openfang-aarch64-apple-darwin".to_string());
names.push("zclaw-aarch64-apple-darwin".to_string());
} else {
names.push("openfang-x86_64-apple-darwin".to_string());
names.push("zclaw-x86_64-apple-darwin".to_string());
}
names.push(format!("openfang-{}", env!("TARGET")));
names.push("openfang".to_string());
names.push(format!("zclaw-{}", env!("TARGET")));
names.push("zclaw".to_string());
} else {
// Linux
if cfg!(target_arch = "aarch64") {
names.push("openfang-aarch64-unknown-linux-gnu".to_string());
names.push("zclaw-aarch64-unknown-linux-gnu".to_string());
} else {
names.push("openfang-x86_64-unknown-linux-gnu".to_string());
names.push("zclaw-x86_64-unknown-linux-gnu".to_string());
}
names.push(format!("openfang-{}", env!("TARGET")));
names.push("openfang".to_string());
names.push(format!("zclaw-{}", env!("TARGET")));
names.push("zclaw".to_string());
}
names
@@ -208,7 +213,7 @@ fn get_platform_binary_names() -> Vec<String> {
/// Legacy: Build staged runtime using Node.js (for backward compatibility)
#[allow(dead_code)]
fn build_staged_runtime_legacy(source: &str, root_dir: PathBuf) -> Option<OpenFangRuntime> {
fn build_staged_runtime_legacy(source: &str, root_dir: PathBuf) -> Option<ZclawRuntime> {
let node_executable = root_dir.join(if cfg!(target_os = "windows") {
"node.exe"
} else {
@@ -216,14 +221,14 @@ fn build_staged_runtime_legacy(source: &str, root_dir: PathBuf) -> Option<OpenFa
});
let entrypoint = root_dir
.join("node_modules")
.join("openfang")
.join("openfang.mjs");
.join("zclaw")
.join("zclaw.mjs");
if !node_executable.is_file() || !entrypoint.is_file() {
return None;
}
Some(OpenFangRuntime {
Some(ZclawRuntime {
source: source.to_string(),
executable: node_executable,
pre_args: vec![entrypoint.display().to_string()],
@@ -232,7 +237,7 @@ fn build_staged_runtime_legacy(source: &str, root_dir: PathBuf) -> Option<OpenFa
}
/// Build staged runtime - prefers binary, falls back to Node.js for legacy support
fn build_staged_runtime(source: &str, root_dir: PathBuf) -> Option<OpenFangRuntime> {
fn build_staged_runtime(source: &str, root_dir: PathBuf) -> Option<ZclawRuntime> {
// First, try to find the binary directly
if let Some(runtime) = build_binary_runtime(source, &root_dir) {
return Some(runtime);
@@ -242,7 +247,7 @@ fn build_staged_runtime(source: &str, root_dir: PathBuf) -> Option<OpenFangRunti
build_staged_runtime_legacy(source, root_dir)
}
fn push_staged_runtime_candidate(candidates: &mut Vec<OpenFangRuntime>, source: &str, root_dir: PathBuf) {
fn push_staged_runtime_candidate(candidates: &mut Vec<ZclawRuntime>, source: &str, root_dir: PathBuf) {
if candidates.iter().any(|candidate| candidate.display_path == root_dir) {
return;
}
@@ -252,18 +257,18 @@ fn push_staged_runtime_candidate(candidates: &mut Vec<OpenFangRuntime>, source:
}
}
fn bundled_runtime_candidates(app: &AppHandle) -> Vec<OpenFangRuntime> {
fn bundled_runtime_candidates(app: &AppHandle) -> Vec<ZclawRuntime> {
let mut candidates = Vec::new();
let sidecar_name = openfang_sidecar_filename();
let plain_name = openfang_plain_filename();
let sidecar_name = zclaw_sidecar_filename();
let plain_name = zclaw_plain_filename();
let platform_names = get_platform_binary_names();
if let Ok(resource_dir) = app.path().resource_dir() {
// Primary: openfang-runtime directory (contains binary + manifest)
// Primary: zclaw-runtime directory (contains binary + manifest)
push_staged_runtime_candidate(
&mut candidates,
"bundled",
resource_dir.join("openfang-runtime"),
resource_dir.join("zclaw-runtime"),
);
// Alternative: binaries directory
@@ -286,7 +291,7 @@ fn bundled_runtime_candidates(app: &AppHandle) -> Vec<OpenFangRuntime> {
push_staged_runtime_candidate(
&mut candidates,
"bundled",
exe_dir.join("resources").join("openfang-runtime"),
exe_dir.join("resources").join("zclaw-runtime"),
);
// Alternative: binaries next to exe
@@ -308,7 +313,7 @@ fn bundled_runtime_candidates(app: &AppHandle) -> Vec<OpenFangRuntime> {
push_staged_runtime_candidate(
&mut candidates,
"development",
manifest_dir.join("resources").join("openfang-runtime"),
manifest_dir.join("resources").join("zclaw-runtime"),
);
for name in &platform_names {
@@ -322,10 +327,10 @@ fn bundled_runtime_candidates(app: &AppHandle) -> Vec<OpenFangRuntime> {
candidates
}
/// Resolve OpenFang runtime location
/// Priority: ZCLAW_OPENFANG_BIN env > bundled > system PATH
fn resolve_openfang_runtime(app: &AppHandle) -> OpenFangRuntime {
if let Ok(override_path) = std::env::var("ZCLAW_OPENFANG_BIN") {
/// Resolve ZCLAW runtime location
/// Priority: ZCLAW_BIN env > bundled > system PATH
fn resolve_zclaw_runtime(app: &AppHandle) -> ZclawRuntime {
if let Ok(override_path) = std::env::var("ZCLAW_BIN") {
let override_path = PathBuf::from(override_path);
if override_path.is_dir() {
if let Some(runtime) = build_staged_runtime("override", override_path.clone()) {
@@ -333,7 +338,7 @@ fn resolve_openfang_runtime(app: &AppHandle) -> OpenFangRuntime {
}
}
return OpenFangRuntime {
return ZclawRuntime {
source: "override".to_string(),
display_path: override_path.clone(),
executable: override_path,
@@ -348,27 +353,27 @@ fn resolve_openfang_runtime(app: &AppHandle) -> OpenFangRuntime {
return runtime;
}
OpenFangRuntime {
ZclawRuntime {
source: "system".to_string(),
display_path: PathBuf::from("openfang"),
executable: PathBuf::from("openfang"),
display_path: PathBuf::from("zclaw"),
executable: PathBuf::from("zclaw"),
pre_args: Vec::new(),
}
}
/// Resolve OpenFang config path (TOML format)
/// Priority: OPENFANG_HOME env > ~/.openfang/
fn resolve_openfang_config_path() -> Option<PathBuf> {
if let Ok(value) = std::env::var("OPENFANG_HOME") {
return Some(PathBuf::from(value).join("openfang.toml"));
/// Resolve ZCLAW config path (TOML format)
/// Priority: ZCLAW_HOME env > ~/.zclaw/
fn resolve_zclaw_config_path() -> Option<PathBuf> {
if let Ok(value) = std::env::var("ZCLAW_HOME") {
return Some(PathBuf::from(value).join("zclaw.toml"));
}
if let Ok(value) = std::env::var("HOME") {
return Some(PathBuf::from(value).join(".openfang").join("openfang.toml"));
return Some(PathBuf::from(value).join(".zclaw").join("zclaw.toml"));
}
if let Ok(value) = std::env::var("USERPROFILE") {
return Some(PathBuf::from(value).join(".openfang").join("openfang.toml"));
return Some(PathBuf::from(value).join(".zclaw").join("zclaw.toml"));
}
None
@@ -376,10 +381,10 @@ fn resolve_openfang_config_path() -> Option<PathBuf> {
/// Parse TOML config and extract gateway token
fn read_local_gateway_auth() -> Result<LocalGatewayAuth, String> {
let config_path = resolve_openfang_config_path()
.ok_or_else(|| "未找到 OpenFang 配置目录。".to_string())?;
let config_path = resolve_zclaw_config_path()
.ok_or_else(|| "未找到 ZCLAW 配置目录。".to_string())?;
let config_text = fs::read_to_string(&config_path)
.map_err(|error| format!("读取 OpenFang 配置失败: {error}"))?;
.map_err(|error| format!("读取 ZCLAW 配置失败: {error}"))?;
// Parse TOML format - simple extraction for gateway.token
let gateway_token = extract_toml_token(&config_text);
@@ -419,7 +424,7 @@ fn extract_toml_token(config_text: &str) -> Option<String> {
None
}
/// Ensure Tauri origins are allowed in OpenFang config
/// Ensure Tauri origins are allowed in ZCLAW config
fn ensure_tauri_allowed_origins(config_text: &str) -> (String, bool) {
let mut lines: Vec<String> = config_text.lines().map(|s| s.to_string()).collect();
let mut changed = false;
@@ -489,23 +494,23 @@ fn ensure_tauri_allowed_origins(config_text: &str) -> (String, bool) {
}
fn ensure_local_gateway_ready_for_tauri(app: &AppHandle) -> Result<LocalGatewayPrepareResult, String> {
let config_path = resolve_openfang_config_path()
.ok_or_else(|| "未找到 OpenFang 配置目录。".to_string())?;
let config_path = resolve_zclaw_config_path()
.ok_or_else(|| "未找到 ZCLAW 配置目录。".to_string())?;
let config_text = fs::read_to_string(&config_path)
.map_err(|error| format!("读取 OpenFang 配置失败: {error}"))?;
.map_err(|error| format!("读取 ZCLAW 配置失败: {error}"))?;
let (updated_config, origins_updated) = ensure_tauri_allowed_origins(&config_text);
if origins_updated {
fs::write(&config_path, format!("{}\n", updated_config))
.map_err(|error| format!("写入 OpenFang 配置失败: {error}"))?;
.map_err(|error| format!("写入 ZCLAW 配置失败: {error}"))?;
}
let mut gateway_restarted = false;
if origins_updated {
if let Ok(status) = read_gateway_status(app) {
if status.port_status.as_deref() == Some("busy") || !status.listener_pids.is_empty() {
run_openfang(app, &["gateway", "restart", "--json"])?;
run_zclaw(app, &["gateway", "restart", "--json"])?;
thread::sleep(Duration::from_millis(1200));
gateway_restarted = true;
}
@@ -530,7 +535,7 @@ fn approve_local_device_pairing(
.gateway_token
.ok_or_else(|| "本地 Gateway token 不可用,无法自动批准设备配对。".to_string())?;
let devices_output = run_openfang(app, &["devices", "list", "--json"])?;
let devices_output = run_zclaw(app, &["devices", "list", "--json"])?;
let devices_json = parse_json_output(&devices_output.stdout)?;
let pending = devices_json
.get("pending")
@@ -556,7 +561,7 @@ fn approve_local_device_pairing(
.ok_or_else(|| "待批准设备缺少 requestId。".to_string())?
.to_string();
// Use OpenFang default port 4200
// Use ZCLAW default port 4200
let gateway_url = url.unwrap_or("ws://127.0.0.1:4200").to_string();
let args = vec![
"devices".to_string(),
@@ -569,7 +574,7 @@ fn approve_local_device_pairing(
gateway_url,
];
let arg_refs = args.iter().map(|value| value.as_str()).collect::<Vec<_>>();
run_openfang(app, &arg_refs)?;
run_zclaw(app, &arg_refs)?;
thread::sleep(Duration::from_millis(300));
Ok(LocalGatewayPairingApprovalResult {
@@ -579,14 +584,14 @@ fn approve_local_device_pairing(
})
}
fn run_openfang(app: &AppHandle, args: &[&str]) -> Result<OpenFangCommandOutput, String> {
let runtime = resolve_openfang_runtime(app);
fn run_zclaw(app: &AppHandle, args: &[&str]) -> Result<ZclawCommandOutput, String> {
let runtime = resolve_zclaw_runtime(app);
let mut command = Command::new(&runtime.executable);
command.args(&runtime.pre_args).args(args);
let output = command.output().map_err(|error| command_error(&runtime, error))?;
if output.status.success() {
Ok(OpenFangCommandOutput {
Ok(ZclawCommandOutput {
stdout: String::from_utf8_lossy(&output.stdout).trim().to_string(),
runtime,
})
@@ -602,7 +607,7 @@ fn run_openfang(app: &AppHandle, args: &[&str]) -> Result<OpenFangCommandOutput,
};
if message.is_empty() {
Err(format!("OpenFang {:?} 执行失败: {}", args, output.status))
Err(format!("ZCLAW {:?} 执行失败: {}", args, output.status))
} else {
Err(message)
}
@@ -623,7 +628,7 @@ fn parse_json_output(stdout: &str) -> Result<Value, String> {
Err("Gateway 状态输出不包含可解析的 JSON。".to_string())
}
fn unavailable_status(error: String, runtime: Option<&OpenFangRuntime>) -> LocalGatewayStatus {
fn unavailable_status(error: String, runtime: Option<&ZclawRuntime>) -> LocalGatewayStatus {
LocalGatewayStatus {
supported: true,
cli_available: false,
@@ -642,7 +647,7 @@ fn unavailable_status(error: String, runtime: Option<&OpenFangRuntime>) -> Local
}
}
fn parse_gateway_status(raw: Value, runtime: &OpenFangRuntime) -> LocalGatewayStatus {
fn parse_gateway_status(raw: Value, runtime: &ZclawRuntime) -> LocalGatewayStatus {
let listener_pids = raw
.get("port")
.and_then(|port| port.get("listeners"))
@@ -688,7 +693,7 @@ fn parse_gateway_status(raw: Value, runtime: &OpenFangRuntime) -> LocalGatewaySt
.and_then(|gateway| gateway.get("port"))
.and_then(Value::as_u64)
.and_then(|port| u16::try_from(port).ok())
.or(Some(OPENFANG_DEFAULT_PORT)),
.or(Some(ZCLAW_DEFAULT_PORT)),
port_status: raw
.get("port")
.and_then(|port| port.get("status"))
@@ -706,69 +711,69 @@ fn parse_gateway_status(raw: Value, runtime: &OpenFangRuntime) -> LocalGatewaySt
}
fn read_gateway_status(app: &AppHandle) -> Result<LocalGatewayStatus, String> {
match run_openfang(app, &["gateway", "status", "--json", "--no-probe"]) {
match run_zclaw(app, &["gateway", "status", "--json", "--no-probe"]) {
Ok(result) => {
let raw = parse_json_output(&result.stdout)?;
Ok(parse_gateway_status(raw, &result.runtime))
}
Err(error) => {
let runtime = resolve_openfang_runtime(app);
let runtime = resolve_zclaw_runtime(app);
Ok(unavailable_status(error, Some(&runtime)))
}
}
}
// ============================================================================
// Tauri Commands - OpenFang (with backward-compatible aliases)
// Tauri Commands - ZCLAW (with backward-compatible aliases)
// ============================================================================
/// Get OpenFang Kernel status
/// Get ZCLAW Kernel status
#[tauri::command]
fn openfang_status(app: AppHandle) -> Result<LocalGatewayStatus, String> {
fn zclaw_status(app: AppHandle) -> Result<LocalGatewayStatus, String> {
read_gateway_status(&app)
}
/// Start OpenFang Kernel
/// Start ZCLAW Kernel
#[tauri::command]
fn openfang_start(app: AppHandle) -> Result<LocalGatewayStatus, String> {
fn zclaw_start(app: AppHandle) -> Result<LocalGatewayStatus, String> {
ensure_local_gateway_ready_for_tauri(&app)?;
run_openfang(&app, &["gateway", "start", "--json"])?;
run_zclaw(&app, &["gateway", "start", "--json"])?;
thread::sleep(Duration::from_millis(800));
read_gateway_status(&app)
}
/// Stop OpenFang Kernel
/// Stop ZCLAW Kernel
#[tauri::command]
fn openfang_stop(app: AppHandle) -> Result<LocalGatewayStatus, String> {
run_openfang(&app, &["gateway", "stop", "--json"])?;
fn zclaw_stop(app: AppHandle) -> Result<LocalGatewayStatus, String> {
run_zclaw(&app, &["gateway", "stop", "--json"])?;
thread::sleep(Duration::from_millis(800));
read_gateway_status(&app)
}
/// Restart OpenFang Kernel
/// Restart ZCLAW Kernel
#[tauri::command]
fn openfang_restart(app: AppHandle) -> Result<LocalGatewayStatus, String> {
fn zclaw_restart(app: AppHandle) -> Result<LocalGatewayStatus, String> {
ensure_local_gateway_ready_for_tauri(&app)?;
run_openfang(&app, &["gateway", "restart", "--json"])?;
run_zclaw(&app, &["gateway", "restart", "--json"])?;
thread::sleep(Duration::from_millis(1200));
read_gateway_status(&app)
}
/// Get local auth token from OpenFang config
/// Get local auth token from ZCLAW config
#[tauri::command]
fn openfang_local_auth() -> Result<LocalGatewayAuth, String> {
fn zclaw_local_auth() -> Result<LocalGatewayAuth, String> {
read_local_gateway_auth()
}
/// Prepare OpenFang for Tauri (update allowed origins)
/// Prepare ZCLAW for Tauri (update allowed origins)
#[tauri::command]
fn openfang_prepare_for_tauri(app: AppHandle) -> Result<LocalGatewayPrepareResult, String> {
fn zclaw_prepare_for_tauri(app: AppHandle) -> Result<LocalGatewayPrepareResult, String> {
ensure_local_gateway_ready_for_tauri(&app)
}
/// Approve device pairing request
#[tauri::command]
fn openfang_approve_device_pairing(
fn zclaw_approve_device_pairing(
app: AppHandle,
device_id: String,
public_key_base64: String,
@@ -777,10 +782,10 @@ fn openfang_approve_device_pairing(
approve_local_device_pairing(&app, &device_id, &public_key_base64, url.as_deref())
}
/// Run OpenFang doctor to diagnose issues
/// Run ZCLAW doctor to diagnose issues
#[tauri::command]
fn openfang_doctor(app: AppHandle) -> Result<String, String> {
let result = run_openfang(&app, &["doctor", "--json"])?;
fn zclaw_doctor(app: AppHandle) -> Result<String, String> {
let result = run_zclaw(&app, &["doctor", "--json"])?;
Ok(result.stdout)
}
@@ -830,10 +835,10 @@ struct VersionResponse {
raw: Value,
}
/// List OpenFang processes
/// List ZCLAW processes
#[tauri::command]
fn openfang_process_list(app: AppHandle) -> Result<ProcessListResponse, String> {
let result = run_openfang(&app, &["process", "list", "--json"])?;
fn zclaw_process_list(app: AppHandle) -> Result<ProcessListResponse, String> {
let result = run_zclaw(&app, &["process", "list", "--json"])?;
let raw = parse_json_output(&result.stdout).unwrap_or_else(|_| json!({"processes": []}));
@@ -867,9 +872,9 @@ fn openfang_process_list(app: AppHandle) -> Result<ProcessListResponse, String>
})
}
/// Get OpenFang process logs
/// Get ZCLAW process logs
#[tauri::command]
fn openfang_process_logs(
fn zclaw_process_logs(
app: AppHandle,
pid: Option<u32>,
lines: Option<usize>,
@@ -900,7 +905,7 @@ fn openfang_process_logs(
// Convert to &str for the command
let args_refs: Vec<&str> = args.iter().map(|s| s.as_str()).collect();
let result = run_openfang(&app, &args_refs)?;
let result = run_zclaw(&app, &args_refs)?;
// Parse the logs - could be JSON array or plain text
let logs = if let Ok(json) = parse_json_output(&result.stdout) {
@@ -930,10 +935,10 @@ fn openfang_process_logs(
})
}
/// Get OpenFang version information
/// Get ZCLAW version information
#[tauri::command]
fn openfang_version(app: AppHandle) -> Result<VersionResponse, String> {
let result = run_openfang(&app, &["--version", "--json"])?;
fn zclaw_version(app: AppHandle) -> Result<VersionResponse, String> {
let result = run_zclaw(&app, &["--version", "--json"])?;
let raw = parse_json_output(&result.stdout).unwrap_or_else(|_| {
// Fallback: try to parse plain text version output
@@ -1071,14 +1076,14 @@ fn get_process_uptime(status: &LocalGatewayStatus) -> Option<u64> {
.and_then(Value::as_u64)
}
/// Perform comprehensive health check on OpenFang Kernel
/// Perform comprehensive health check on ZCLAW Kernel
#[tauri::command]
fn openfang_health_check(
fn zclaw_health_check(
app: AppHandle,
port: Option<u16>,
timeout_ms: Option<u64>,
) -> Result<HealthCheckResponse, String> {
let check_port = port.unwrap_or(OPENFANG_DEFAULT_PORT);
let check_port = port.unwrap_or(ZCLAW_DEFAULT_PORT);
let timeout = timeout_ms.unwrap_or(3000);
let mut checks_performed = Vec::new();
let mut issues = Vec::new();
@@ -1089,8 +1094,8 @@ fn openfang_health_check(
.map(|d| d.as_secs())
.unwrap_or(0);
// 1. Check if OpenFang CLI is available
let runtime = resolve_openfang_runtime(&app);
// 1. Check if ZCLAW CLI is available
let runtime = resolve_zclaw_runtime(&app);
let cli_available = runtime.executable.is_file();
if !cli_available {
@@ -1108,12 +1113,12 @@ fn openfang_health_check(
port: check_port,
accessible: false,
latency_ms: None,
error: Some("OpenFang CLI not available".to_string()),
error: Some("ZCLAW CLI not available".to_string()),
},
last_check_timestamp,
checks_performed: vec!["cli_availability".to_string()],
issues: vec![format!(
"OpenFang runtime not found at: {}",
"ZCLAW runtime not found at: {}",
runtime.display_path.display()
)],
runtime_source: Some(runtime.source),
@@ -1148,7 +1153,7 @@ fn openfang_health_check(
let pid = gateway_status.listener_pids[0];
// Try to get detailed process info from process list
let process_info = run_openfang(&app, &["process", "list", "--json"])
let process_info = run_zclaw(&app, &["process", "list", "--json"])
.ok()
.and_then(|result| parse_json_output(&result.stdout).ok())
.and_then(|json| json.get("processes").and_then(Value::as_array).cloned());
@@ -1171,7 +1176,7 @@ fn openfang_health_check(
ProcessHealthDetails {
pid: Some(pid),
name: Some("openfang".to_string()),
name: Some("zclaw".to_string()),
status: Some(
gateway_status
.service_status
@@ -1225,17 +1230,17 @@ fn openfang_health_check(
})
}
/// Quick ping to check if OpenFang is alive (lightweight check)
/// Quick ping to check if ZCLAW is alive (lightweight check)
#[tauri::command]
fn openfang_ping(app: AppHandle) -> Result<bool, String> {
let port_check = check_port_accessibility("127.0.0.1", OPENFANG_DEFAULT_PORT, 1000);
fn zclaw_ping(app: AppHandle) -> Result<bool, String> {
let port_check = check_port_accessibility("127.0.0.1", ZCLAW_DEFAULT_PORT, 1000);
if port_check.accessible {
return Ok(true);
}
// Fallback: check via status command
match run_openfang(&app, &["gateway", "status", "--json", "--no-probe"]) {
match run_zclaw(&app, &["gateway", "status", "--json", "--no-probe"]) {
Ok(result) => {
if let Ok(status) = parse_json_output(&result.stdout) {
// Check if there are any listener PIDs
@@ -1257,37 +1262,37 @@ fn openfang_ping(app: AppHandle) -> Result<bool, String> {
// ============================================================================
// Backward-compatible aliases (OpenClaw naming)
// These delegate to OpenFang commands for backward compatibility
// These delegate to ZCLAW commands for backward compatibility
// ============================================================================
#[tauri::command]
fn gateway_status(app: AppHandle) -> Result<LocalGatewayStatus, String> {
openfang_status(app)
zclaw_status(app)
}
#[tauri::command]
fn gateway_start(app: AppHandle) -> Result<LocalGatewayStatus, String> {
openfang_start(app)
zclaw_start(app)
}
#[tauri::command]
fn gateway_stop(app: AppHandle) -> Result<LocalGatewayStatus, String> {
openfang_stop(app)
zclaw_stop(app)
}
#[tauri::command]
fn gateway_restart(app: AppHandle) -> Result<LocalGatewayStatus, String> {
openfang_restart(app)
zclaw_restart(app)
}
#[tauri::command]
fn gateway_local_auth() -> Result<LocalGatewayAuth, String> {
openfang_local_auth()
zclaw_local_auth()
}
#[tauri::command]
fn gateway_prepare_for_tauri(app: AppHandle) -> Result<LocalGatewayPrepareResult, String> {
openfang_prepare_for_tauri(app)
zclaw_prepare_for_tauri(app)
}
#[tauri::command]
@@ -1297,12 +1302,12 @@ fn gateway_approve_device_pairing(
public_key_base64: String,
url: Option<String>,
) -> Result<LocalGatewayPairingApprovalResult, String> {
openfang_approve_device_pairing(app, device_id, public_key_base64, url)
zclaw_approve_device_pairing(app, device_id, public_key_base64, url)
}
#[tauri::command]
fn gateway_doctor(app: AppHandle) -> Result<String, String> {
openfang_doctor(app)
zclaw_doctor(app)
}
#[cfg_attr(mobile, tauri::mobile_entry_point)]
@@ -1350,7 +1355,6 @@ pub fn run() {
let heartbeat_state: intelligence::HeartbeatEngineState = std::sync::Arc::new(tokio::sync::Mutex::new(std::collections::HashMap::new()));
let reflection_state: intelligence::ReflectionEngineState = std::sync::Arc::new(tokio::sync::Mutex::new(intelligence::ReflectionEngine::new(None)));
let identity_state: intelligence::IdentityManagerState = std::sync::Arc::new(tokio::sync::Mutex::new(intelligence::AgentIdentityManager::new()));
let persona_evolver_state: intelligence::PersonaEvolverStateHandle = std::sync::Arc::new(tokio::sync::Mutex::new(intelligence::PersonaEvolver::new(None)));
// Initialize internal ZCLAW Kernel state
let kernel_state = kernel_commands::create_kernel_state();
@@ -1365,7 +1369,6 @@ pub fn run() {
.manage(heartbeat_state)
.manage(reflection_state)
.manage(identity_state)
.manage(persona_evolver_state)
.manage(kernel_state)
.manage(pipeline_state)
.invoke_handler(tauri::generate_handler![
@@ -1386,6 +1389,11 @@ pub fn run() {
// Hands commands (autonomous capabilities)
kernel_commands::hand_list,
kernel_commands::hand_execute,
kernel_commands::hand_approve,
kernel_commands::hand_cancel,
// Scheduled task commands
kernel_commands::scheduled_task_create,
kernel_commands::scheduled_task_list,
// Pipeline commands (DSL-based workflows)
pipeline_commands::pipeline_list,
pipeline_commands::pipeline_get,
@@ -1397,22 +1405,22 @@ pub fn run() {
pipeline_commands::pipeline_refresh,
pipeline_commands::route_intent,
pipeline_commands::analyze_presentation,
// OpenFang commands (new naming)
openfang_status,
openfang_start,
openfang_stop,
openfang_restart,
openfang_local_auth,
openfang_prepare_for_tauri,
openfang_approve_device_pairing,
openfang_doctor,
openfang_health_check,
// ZCLAW commands (new naming)
zclaw_status,
zclaw_start,
zclaw_stop,
zclaw_restart,
zclaw_local_auth,
zclaw_prepare_for_tauri,
zclaw_approve_device_pairing,
zclaw_doctor,
zclaw_health_check,
// Process monitoring commands
openfang_process_list,
openfang_process_logs,
openfang_version,
zclaw_process_list,
zclaw_process_logs,
zclaw_version,
// Health check commands
openfang_ping,
zclaw_ping,
// Backward-compatible aliases (OpenClaw naming)
gateway_status,
gateway_start,
@@ -1433,6 +1441,9 @@ pub fn run() {
viking_commands::viking_remove,
viking_commands::viking_tree,
viking_commands::viking_inject_prompt,
viking_commands::viking_configure_embedding,
viking_commands::viking_configure_summary_driver,
viking_commands::viking_store_with_summaries,
// Memory extraction commands (supplement CLI)
memory::extractor::extract_session_memories,
memory::extractor::extract_and_store_memories,
@@ -1491,6 +1502,9 @@ pub fn run() {
memory_commands::memory_export,
memory_commands::memory_import,
memory_commands::memory_db_path,
memory_commands::memory_configure_embedding,
memory_commands::memory_is_embedding_configured,
memory_commands::memory_build_context,
// Intelligence Layer commands (Phase 2-3)
// Heartbeat Engine
intelligence::heartbeat::heartbeat_init,
@@ -1508,6 +1522,7 @@ pub fn run() {
intelligence::compactor::compactor_estimate_messages_tokens,
intelligence::compactor::compactor_check_threshold,
intelligence::compactor::compactor_compact,
intelligence::compactor::compactor_compact_llm,
// Reflection Engine
intelligence::reflection::reflection_init,
intelligence::reflection::reflection_record_conversation,
@@ -1529,24 +1544,7 @@ pub fn run() {
intelligence::identity::identity_get_snapshots,
intelligence::identity::identity_restore_snapshot,
intelligence::identity::identity_list_agents,
intelligence::identity::identity_delete_agent,
// Adaptive Intelligence Mesh (Phase 4)
intelligence::mesh::mesh_init,
intelligence::mesh::mesh_analyze,
intelligence::mesh::mesh_record_activity,
intelligence::mesh::mesh_get_patterns,
intelligence::mesh::mesh_update_config,
intelligence::mesh::mesh_decay_patterns,
intelligence::mesh::mesh_accept_recommendation,
intelligence::mesh::mesh_dismiss_recommendation,
// Persona Evolver (Phase 4)
intelligence::persona_evolver::persona_evolver_init,
intelligence::persona_evolver::persona_evolve,
intelligence::persona_evolver::persona_evolution_history,
intelligence::persona_evolver::persona_evolver_state,
intelligence::persona_evolver::persona_evolver_config,
intelligence::persona_evolver::persona_evolver_update_config,
intelligence::persona_evolver::persona_apply_proposal
intelligence::identity::identity_delete_agent
])
.run(tauri::generate_context!())
.expect("error while running tauri application");

View File

@@ -1,9 +1,11 @@
//! LLM Client Module
//!
//! Provides LLM API integration for memory extraction.
//! Provides LLM API integration for memory extraction and embedding.
//! Supports multiple providers with a unified interface.
//!
//! Note: Some fields are reserved for future streaming and provider selection features
//! NOTE: #[tauri::command] functions are registered via invoke_handler! at runtime,
// which the Rust compiler does not track as "use". Module-level allow required
// for Tauri-commanded functions and internal type definitions.
#![allow(dead_code)]
@@ -357,6 +359,11 @@ impl EmbeddingClient {
}
}
/// Check if the embedding client is properly configured and available.
pub fn is_configured(&self) -> bool {
self.config.provider != "local" && !self.config.api_key.is_empty()
}
pub async fn embed(&self, text: &str) -> Result<EmbeddingResponse, String> {
if self.config.provider == "local" || self.config.api_key.is_empty() {
return Err("Local TF-IDF mode does not support API embedding".to_string());

View File

@@ -10,7 +10,11 @@
//!
//! Reference: ZCLAW_AGENT_INTELLIGENCE_EVOLUTION.md §4.3
//!
//! Note: These types are reserved for future memory integration features
//! Note: Context types are used by the Tauri command memory_build_context (simplified version).
//! The full ContextBuilder is reserved for advanced layered loading in future phases.
// NOTE: #[tauri::command] functions are registered via invoke_handler! at runtime.
// Module-level allow required for Tauri-commanded functions and internal types.
#![allow(dead_code)]

View File

@@ -12,6 +12,9 @@
//!
//! Note: Some fields and methods are reserved for future LLM-powered extraction
// NOTE: #[tauri::command] functions are registered via invoke_handler! at runtime.
// Module-level allow required for Tauri-commanded functions and internal types.
#![allow(dead_code)]
use serde::{Deserialize, Serialize};
@@ -551,12 +554,39 @@ pub async fn extract_and_store_memories(
.with_importance(memory.importance);
// Store to SqliteStorage
let entry_uri = entry.uri.clone();
match storage.store(&entry).await {
Ok(_) => stored_count += 1,
Err(e) => {
store_errors.push(format!("Failed to store {}: {}", memory.category, e));
}
}
// Background: generate L0/L1 summaries if driver is configured
if crate::summarizer_adapter::is_summary_driver_configured() {
let storage_clone = storage.clone();
let summary_entry = entry.clone();
tokio::spawn(async move {
if let Some(driver) = crate::summarizer_adapter::get_summary_driver() {
let (overview, abstract_summary) =
zclaw_growth::summarizer::generate_summaries(driver.as_ref(), &summary_entry).await;
if overview.is_some() || abstract_summary.is_some() {
let updated = MemoryEntry {
overview,
abstract_summary,
..summary_entry
};
if let Err(e) = storage_clone.store(&updated).await {
tracing::debug!(
"[extract_and_store] Failed to update summaries for {}: {}",
entry_uri, e
);
}
}
}
});
}
}
let elapsed = start_time.elapsed().as_millis() as u64;

View File

@@ -49,21 +49,21 @@ impl LlmActionDriver for RuntimeLlmAdapter {
max_tokens: Option<u32>,
json_mode: bool,
) -> Result<Value, String> {
println!("[DEBUG RuntimeLlmAdapter] generate called with prompt length: {}", prompt.len());
println!("[DEBUG RuntimeLlmAdapter] input HashMap contents:");
tracing::debug!("[RuntimeLlmAdapter] generate called with prompt length: {}", prompt.len());
tracing::debug!("[RuntimeLlmAdapter] input HashMap contents:");
for (k, v) in &input {
println!(" {} => {}", k, v);
}
// Build user content from prompt and input
let user_content = if input.is_empty() {
println!("[DEBUG RuntimeLlmAdapter] WARNING: input is empty, using raw prompt");
tracing::debug!("[RuntimeLlmAdapter] WARNING: input is empty, using raw prompt");
prompt.clone()
} else {
// Inject input values into prompt
// Support multiple placeholder formats: {{key}}, {{ key }}, ${key}, ${inputs.key}
let mut rendered = prompt.clone();
println!("[DEBUG RuntimeLlmAdapter] Original prompt (first 500 chars): {}", &prompt[..prompt.len().min(500)]);
tracing::debug!("[RuntimeLlmAdapter] Original prompt (first 500 chars): {}", &prompt[..prompt.len().min(500)]);
for (key, value) in &input {
let str_value = if let Some(s) = value.as_str() {
s.to_string()
@@ -71,7 +71,7 @@ impl LlmActionDriver for RuntimeLlmAdapter {
value.to_string()
};
println!("[DEBUG RuntimeLlmAdapter] Replacing '{}' with '{}'", key, str_value);
tracing::debug!("[RuntimeLlmAdapter] Replacing '{}' with '{}'", key, str_value);
// Replace all common placeholder formats
rendered = rendered.replace(&format!("{{{{{key}}}}}"), &str_value); // {{key}}
@@ -79,7 +79,7 @@ impl LlmActionDriver for RuntimeLlmAdapter {
rendered = rendered.replace(&format!("${{{key}}}"), &str_value); // ${key}
rendered = rendered.replace(&format!("${{inputs.{key}}}"), &str_value); // ${inputs.key}
}
println!("[DEBUG RuntimeLlmAdapter] Rendered prompt (first 500 chars): {}", &rendered[..rendered.len().min(500)]);
tracing::debug!("[RuntimeLlmAdapter] Rendered prompt (first 500 chars): {}", &rendered[..rendered.len().min(500)]);
rendered
};
@@ -111,7 +111,7 @@ impl LlmActionDriver for RuntimeLlmAdapter {
// Safe truncation for UTF-8 strings
let truncated: String = text.chars().take(1000).collect();
println!("[DEBUG RuntimeLlmAdapter] LLM response text (first 1000 chars): {}", truncated);
tracing::debug!("[RuntimeLlmAdapter] LLM response text (first 1000 chars): {}", truncated);
// Parse as JSON if json_mode, otherwise return as string
if json_mode {
@@ -132,11 +132,11 @@ impl LlmActionDriver for RuntimeLlmAdapter {
// Safe truncation for UTF-8 strings
let truncated_json: String = json_text.chars().take(500).collect();
println!("[DEBUG RuntimeLlmAdapter] JSON text to parse (first 500 chars): {}", truncated_json);
tracing::debug!("[RuntimeLlmAdapter] JSON text to parse (first 500 chars): {}", truncated_json);
serde_json::from_str(&json_text)
.map_err(|e| {
println!("[DEBUG RuntimeLlmAdapter] JSON parse error: {}", e);
tracing::debug!("[RuntimeLlmAdapter] JSON parse error: {}", e);
format!("Failed to parse LLM response as JSON: {}\nResponse: {}", e, json_text)
})
} else {
@@ -269,22 +269,22 @@ pub async fn pipeline_list(
// Get pipelines directory
let pipelines_dir = get_pipelines_directory()?;
println!("[DEBUG pipeline_list] Scanning directory: {:?}", pipelines_dir);
println!("[DEBUG pipeline_list] Filters - category: {:?}, industry: {:?}", category, industry);
tracing::debug!("[pipeline_list] Scanning directory: {:?}", pipelines_dir);
tracing::debug!("[pipeline_list] Filters - category: {:?}, industry: {:?}", category, industry);
// Scan for pipeline files (returns both info and paths)
let mut pipelines_with_paths: Vec<(PipelineInfo, PathBuf)> = Vec::new();
if pipelines_dir.exists() {
scan_pipelines_with_paths(&pipelines_dir, category.as_deref(), industry.as_deref(), &mut pipelines_with_paths)?;
} else {
eprintln!("[WARN pipeline_list] Pipelines directory does not exist: {:?}", pipelines_dir);
tracing::warn!("[WARN pipeline_list] Pipelines directory does not exist: {:?}", pipelines_dir);
}
println!("[DEBUG pipeline_list] Found {} pipelines", pipelines_with_paths.len());
tracing::debug!("[pipeline_list] Found {} pipelines", pipelines_with_paths.len());
// Debug: log all pipelines with their industry values
for (info, _) in &pipelines_with_paths {
println!("[DEBUG pipeline_list] Pipeline: {} -> category: {}, industry: '{}'", info.id, info.category, info.industry);
tracing::debug!("[pipeline_list] Pipeline: {} -> category: {}, industry: '{}'", info.id, info.category, info.industry);
}
// Update state
@@ -328,15 +328,15 @@ pub async fn pipeline_run(
kernel_state: State<'_, KernelState>,
request: RunPipelineRequest,
) -> Result<RunPipelineResponse, String> {
println!("[DEBUG pipeline_run] Received request for pipeline_id: {}", request.pipeline_id);
tracing::debug!("[pipeline_run] Received request for pipeline_id: {}", request.pipeline_id);
// Get pipeline
let pipelines = state.pipelines.read().await;
println!("[DEBUG pipeline_run] State has {} pipelines loaded", pipelines.len());
tracing::debug!("[pipeline_run] State has {} pipelines loaded", pipelines.len());
// Debug: list all loaded pipeline IDs
for (id, _) in pipelines.iter() {
println!("[DEBUG pipeline_run] Loaded pipeline: {}", id);
tracing::debug!("[pipeline_run] Loaded pipeline: {}", id);
}
let pipeline = pipelines.get(&request.pipeline_id)
@@ -353,13 +353,13 @@ pub async fn pipeline_run(
let llm_driver = {
let kernel_lock = kernel_state.lock().await;
if let Some(kernel) = kernel_lock.as_ref() {
println!("[DEBUG pipeline_run] Got LLM driver from Kernel");
tracing::debug!("[pipeline_run] Got LLM driver from Kernel");
Some(Arc::new(RuntimeLlmAdapter::new(
kernel.driver(),
Some(kernel.config().llm.model.clone()),
)) as Arc<dyn LlmActionDriver>)
} else {
println!("[DEBUG pipeline_run] Kernel not initialized, no LLM driver available");
tracing::debug!("[pipeline_run] Kernel not initialized, no LLM driver available");
None
}
};
@@ -382,10 +382,10 @@ pub async fn pipeline_run(
// Run pipeline in background with the known run_id
tokio::spawn(async move {
println!("[DEBUG pipeline_run] Starting execution with run_id: {}", run_id_for_spawn);
tracing::debug!("[pipeline_run] Starting execution with run_id: {}", run_id_for_spawn);
let result = executor.execute_with_id(&pipeline, inputs, &run_id_for_spawn).await;
println!("[DEBUG pipeline_run] Execution completed for run_id: {}, status: {:?}",
tracing::debug!("[pipeline_run] Execution completed for run_id: {}, status: {:?}",
run_id_for_spawn,
result.as_ref().map(|r| r.status.clone()).unwrap_or(RunStatus::Failed));
@@ -411,7 +411,7 @@ pub async fn pipeline_run(
});
// Return immediately with the known run ID
println!("[DEBUG pipeline_run] Returning run_id: {} to caller", run_id);
tracing::debug!("[pipeline_run] Returning run_id: {} to caller", run_id);
Ok(RunPipelineResponse {
run_id,
pipeline_id: request.pipeline_id,
@@ -576,7 +576,7 @@ fn scan_pipelines_with_paths(
industry_filter: Option<&str>,
pipelines: &mut Vec<(PipelineInfo, PathBuf)>,
) -> Result<(), String> {
println!("[DEBUG scan] Entering directory: {:?}", dir);
tracing::debug!("[scan] Entering directory: {:?}", dir);
let entries = std::fs::read_dir(dir)
.map_err(|e| format!("Failed to read pipelines directory: {}", e))?;
@@ -589,9 +589,9 @@ fn scan_pipelines_with_paths(
scan_pipelines_with_paths(&path, category_filter, industry_filter, pipelines)?;
} else if path.extension().map(|e| e == "yaml" || e == "yml").unwrap_or(false) {
// Try to parse pipeline file
println!("[DEBUG scan] Found YAML file: {:?}", path);
tracing::debug!("[scan] Found YAML file: {:?}", path);
if let Ok(content) = std::fs::read_to_string(&path) {
println!("[DEBUG scan] File content length: {} bytes", content.len());
tracing::debug!("[scan] File content length: {} bytes", content.len());
match parse_pipeline_yaml(&content) {
Ok(pipeline) => {
// Debug: log parsed pipeline metadata
@@ -620,7 +620,7 @@ fn scan_pipelines_with_paths(
pipelines.push((pipeline_to_info(&pipeline), path));
}
Err(e) => {
eprintln!("[ERROR scan] Failed to parse pipeline at {:?}: {}", path, e);
tracing::error!("[scan] Failed to parse pipeline at {:?}: {}", path, e);
}
}
}
@@ -701,10 +701,10 @@ fn pipeline_to_info(pipeline: &Pipeline) -> PipelineInfo {
pub fn create_pipeline_state() -> Arc<PipelineState> {
// Try to create an LLM driver from environment/config
let action_registry = if let Some(driver) = create_llm_driver_from_config() {
println!("[DEBUG create_pipeline_state] LLM driver configured successfully");
tracing::debug!("[create_pipeline_state] LLM driver configured successfully");
Arc::new(ActionRegistry::new().with_llm_driver(driver))
} else {
println!("[DEBUG create_pipeline_state] No LLM driver configured - pipelines requiring LLM will fail");
tracing::debug!("[create_pipeline_state] No LLM driver configured - pipelines requiring LLM will fail");
Arc::new(ActionRegistry::new())
};
Arc::new(PipelineState::new(action_registry))
@@ -767,7 +767,7 @@ pub async fn route_intent(
) -> Result<RouteResultResponse, String> {
use zclaw_pipeline::{TriggerParser, Trigger, TriggerParam, compile_trigger};
println!("[DEBUG route_intent] Routing user input: {}", user_input);
tracing::debug!("[route_intent] Routing user input: {}", user_input);
// Build trigger parser from loaded pipelines
let pipelines = state.pipelines.read().await;
@@ -810,7 +810,7 @@ pub async fn route_intent(
) {
Ok(compiled) => parser.register(compiled),
Err(e) => {
eprintln!("[WARN route_intent] Failed to compile trigger for {}: {}", id, e);
tracing::warn!("[WARN route_intent] Failed to compile trigger for {}: {}", id, e);
}
}
}
@@ -883,7 +883,7 @@ fn create_llm_driver_from_config() -> Option<Arc<dyn LlmActionDriver>> {
.map(|p| p.join("zclaw").join("config.toml"))?;
if !config_path.exists() {
println!("[DEBUG create_llm_driver] Config file not found at {:?}", config_path);
tracing::debug!("[create_llm_driver] Config file not found at {:?}", config_path);
return None;
}
@@ -899,7 +899,7 @@ fn create_llm_driver_from_config() -> Option<Arc<dyn LlmActionDriver>> {
let base_url = llm_config.get("base_url").and_then(|v| v.as_str()).map(|s| s.to_string());
let model = llm_config.get("model").and_then(|v| v.as_str()).map(|s| s.to_string());
println!("[DEBUG create_llm_driver] Found LLM config: provider={}, model={:?}", provider, model);
tracing::debug!("[create_llm_driver] Found LLM config: provider={}, model={:?}", provider, model);
// Convert api_key to SecretString
let secret_key = SecretString::new(api_key);
@@ -920,7 +920,7 @@ fn create_llm_driver_from_config() -> Option<Arc<dyn LlmActionDriver>> {
Arc::new(zclaw_runtime::LocalDriver::new(&url))
}
_ => {
eprintln!("[WARN create_llm_driver] Unknown provider: {}", provider);
tracing::warn!("[WARN create_llm_driver] Unknown provider: {}", provider);
return None;
}
};