refactor: 清理未使用代码并添加未来功能标记
Some checks failed
CI / Rust Check (push) Has been cancelled
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
Some checks failed
CI / Rust Check (push) Has been cancelled
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
style: 统一代码格式和注释风格 docs: 更新多个功能文档的完整度和状态 feat(runtime): 添加路径验证工具支持 fix(pipeline): 改进条件判断和变量解析逻辑 test(types): 为ID类型添加全面测试用例 chore: 更新依赖项和Cargo.lock文件 perf(mcp): 优化MCP协议传输和错误处理
This commit is contained in:
@@ -8,6 +8,10 @@
|
||||
//!
|
||||
//! Phase 2 of Intelligence Layer Migration.
|
||||
//! Reference: ZCLAW_AGENT_INTELLIGENCE_EVOLUTION.md §6.3.1
|
||||
//!
|
||||
//! NOTE: Some configuration methods are reserved for future dynamic adjustment.
|
||||
|
||||
#![allow(dead_code)] // Configuration methods reserved for future dynamic compaction tuning
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use regex::Regex;
|
||||
|
||||
@@ -6,6 +6,10 @@
|
||||
//!
|
||||
//! Phase 2 of Intelligence Layer Migration.
|
||||
//! Reference: ZCLAW_AGENT_INTELLIGENCE_EVOLUTION.md §6.4.1
|
||||
//!
|
||||
//! NOTE: Some methods are reserved for future proactive features.
|
||||
|
||||
#![allow(dead_code)] // Methods reserved for future proactive features
|
||||
|
||||
use chrono::{Local, Timelike};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -9,13 +9,17 @@
|
||||
//!
|
||||
//! Phase 3 of Intelligence Layer Migration.
|
||||
//! Reference: ZCLAW_AGENT_INTELLIGENCE_EVOLUTION.md §6.2.3
|
||||
//!
|
||||
//! NOTE: Some methods are reserved for future integration.
|
||||
|
||||
#![allow(dead_code)] // Methods reserved for future identity management features
|
||||
|
||||
use chrono::Utc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use tracing::{error, warn};
|
||||
use tracing::{debug, error, warn};
|
||||
|
||||
// === Types ===
|
||||
|
||||
@@ -169,11 +173,11 @@ impl AgentIdentityManager {
|
||||
self.proposals = store.proposals;
|
||||
self.snapshots = store.snapshots;
|
||||
self.snapshot_counter = store.snapshot_counter;
|
||||
eprintln!(
|
||||
"[IdentityManager] Loaded {} identities, {} proposals, {} snapshots",
|
||||
self.identities.len(),
|
||||
self.proposals.len(),
|
||||
self.snapshots.len()
|
||||
debug!(
|
||||
identities_count = self.identities.len(),
|
||||
proposals_count = self.proposals.len(),
|
||||
snapshots_count = self.snapshots.len(),
|
||||
"[IdentityManager] Loaded identity data from disk"
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
|
||||
397
desktop/src-tauri/src/intelligence/mesh.rs
Normal file
397
desktop/src-tauri/src/intelligence/mesh.rs
Normal file
@@ -0,0 +1,397 @@
|
||||
//! Adaptive Intelligence Mesh - Coordinates Memory, Pipeline, and Heartbeat
|
||||
//!
|
||||
//! This module provides proactive workflow recommendations based on user behavior patterns.
|
||||
//! It integrates with:
|
||||
//! - PatternDetector for behavior pattern detection
|
||||
//! - WorkflowRecommender for generating recommendations
|
||||
//! - HeartbeatEngine for periodic checks
|
||||
//! - PersistentMemoryStore for historical data
|
||||
//! - PipelineExecutor for workflow execution
|
||||
//!
|
||||
//! NOTE: Some methods are reserved for future integration with the UI.
|
||||
|
||||
#![allow(dead_code)] // Methods reserved for future UI integration
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{broadcast, Mutex};
|
||||
|
||||
use super::pattern_detector::{BehaviorPattern, PatternContext, PatternDetector};
|
||||
use super::recommender::WorkflowRecommender;
|
||||
|
||||
// === Types ===
|
||||
|
||||
/// Workflow recommendation generated by the mesh
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct WorkflowRecommendation {
|
||||
/// Unique recommendation identifier
|
||||
pub id: String,
|
||||
/// Pipeline ID to recommend
|
||||
pub pipeline_id: String,
|
||||
/// Confidence score (0.0-1.0)
|
||||
pub confidence: f32,
|
||||
/// Human-readable reason for recommendation
|
||||
pub reason: String,
|
||||
/// Suggested input values
|
||||
pub suggested_inputs: HashMap<String, serde_json::Value>,
|
||||
/// Pattern IDs that matched
|
||||
pub patterns_matched: Vec<String>,
|
||||
/// When this recommendation was generated
|
||||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Mesh coordinator configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MeshConfig {
|
||||
/// Enable mesh recommendations
|
||||
pub enabled: bool,
|
||||
/// Minimum confidence threshold for recommendations
|
||||
pub min_confidence: f32,
|
||||
/// Maximum recommendations to generate per analysis
|
||||
pub max_recommendations: usize,
|
||||
/// Hours to look back for pattern analysis
|
||||
pub analysis_window_hours: u64,
|
||||
}
|
||||
|
||||
impl Default for MeshConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: true,
|
||||
min_confidence: 0.6,
|
||||
max_recommendations: 5,
|
||||
analysis_window_hours: 24,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Analysis result from mesh coordinator
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MeshAnalysisResult {
|
||||
/// Generated recommendations
|
||||
pub recommendations: Vec<WorkflowRecommendation>,
|
||||
/// Patterns detected
|
||||
pub patterns_detected: usize,
|
||||
/// Analysis timestamp
|
||||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
// === Mesh Coordinator ===
|
||||
|
||||
/// Main mesh coordinator that integrates pattern detection and recommendations
|
||||
pub struct MeshCoordinator {
|
||||
/// Agent ID
|
||||
#[allow(dead_code)] // Reserved for multi-agent scenarios
|
||||
agent_id: String,
|
||||
/// Configuration
|
||||
config: Arc<Mutex<MeshConfig>>,
|
||||
/// Pattern detector
|
||||
pattern_detector: Arc<Mutex<PatternDetector>>,
|
||||
/// Workflow recommender
|
||||
recommender: Arc<Mutex<WorkflowRecommender>>,
|
||||
/// Recommendation sender
|
||||
#[allow(dead_code)] // Reserved for real-time recommendation streaming
|
||||
recommendation_sender: broadcast::Sender<WorkflowRecommendation>,
|
||||
/// Last analysis timestamp
|
||||
last_analysis: Arc<Mutex<Option<DateTime<Utc>>>>,
|
||||
}
|
||||
|
||||
impl MeshCoordinator {
|
||||
/// Create a new mesh coordinator
|
||||
pub fn new(agent_id: String, config: Option<MeshConfig>) -> Self {
|
||||
let (sender, _) = broadcast::channel(100);
|
||||
let config = config.unwrap_or_default();
|
||||
|
||||
Self {
|
||||
agent_id,
|
||||
config: Arc::new(Mutex::new(config)),
|
||||
pattern_detector: Arc::new(Mutex::new(PatternDetector::new(None))),
|
||||
recommender: Arc::new(Mutex::new(WorkflowRecommender::new(None))),
|
||||
recommendation_sender: sender,
|
||||
last_analysis: Arc::new(Mutex::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Analyze current context and generate recommendations
|
||||
pub async fn analyze(&self) -> Result<MeshAnalysisResult, String> {
|
||||
let config = self.config.lock().await.clone();
|
||||
|
||||
if !config.enabled {
|
||||
return Ok(MeshAnalysisResult {
|
||||
recommendations: vec![],
|
||||
patterns_detected: 0,
|
||||
timestamp: Utc::now(),
|
||||
});
|
||||
}
|
||||
|
||||
// Get patterns from detector (clone to avoid borrow issues)
|
||||
let patterns: Vec<BehaviorPattern> = {
|
||||
let detector = self.pattern_detector.lock().await;
|
||||
let patterns_ref = detector.get_patterns();
|
||||
patterns_ref.into_iter().cloned().collect()
|
||||
};
|
||||
let patterns_detected = patterns.len();
|
||||
|
||||
// Generate recommendations from patterns
|
||||
let recommender = self.recommender.lock().await;
|
||||
let pattern_refs: Vec<&BehaviorPattern> = patterns.iter().collect();
|
||||
let mut recommendations = recommender.recommend(&pattern_refs);
|
||||
|
||||
// Filter by confidence
|
||||
recommendations.retain(|r| r.confidence >= config.min_confidence);
|
||||
|
||||
// Limit count
|
||||
recommendations.truncate(config.max_recommendations);
|
||||
|
||||
// Update timestamps
|
||||
for rec in &mut recommendations {
|
||||
rec.timestamp = Utc::now();
|
||||
}
|
||||
|
||||
// Update last analysis time
|
||||
*self.last_analysis.lock().await = Some(Utc::now());
|
||||
|
||||
Ok(MeshAnalysisResult {
|
||||
recommendations: recommendations.clone(),
|
||||
patterns_detected,
|
||||
timestamp: Utc::now(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Record user activity for pattern detection
|
||||
pub async fn record_activity(
|
||||
&self,
|
||||
activity_type: ActivityType,
|
||||
context: PatternContext,
|
||||
) -> Result<(), String> {
|
||||
let mut detector = self.pattern_detector.lock().await;
|
||||
|
||||
match activity_type {
|
||||
ActivityType::SkillUsed { skill_ids } => {
|
||||
detector.record_skill_usage(skill_ids);
|
||||
}
|
||||
ActivityType::PipelineExecuted {
|
||||
task_type,
|
||||
pipeline_id,
|
||||
} => {
|
||||
detector.record_pipeline_execution(&task_type, &pipeline_id, context);
|
||||
}
|
||||
ActivityType::InputReceived { keywords, intent } => {
|
||||
detector.record_input_pattern(keywords, &intent, context);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Subscribe to recommendations
|
||||
pub fn subscribe(&self) -> broadcast::Receiver<WorkflowRecommendation> {
|
||||
self.recommendation_sender.subscribe()
|
||||
}
|
||||
|
||||
/// Get current patterns
|
||||
pub async fn get_patterns(&self) -> Vec<BehaviorPattern> {
|
||||
let detector = self.pattern_detector.lock().await;
|
||||
detector.get_patterns().into_iter().cloned().collect()
|
||||
}
|
||||
|
||||
/// Decay old patterns (call periodically)
|
||||
pub async fn decay_patterns(&self) {
|
||||
let mut detector = self.pattern_detector.lock().await;
|
||||
detector.decay_patterns();
|
||||
}
|
||||
|
||||
/// Update configuration
|
||||
pub async fn update_config(&self, config: MeshConfig) {
|
||||
*self.config.lock().await = config;
|
||||
}
|
||||
|
||||
/// Get configuration
|
||||
pub async fn get_config(&self) -> MeshConfig {
|
||||
self.config.lock().await.clone()
|
||||
}
|
||||
|
||||
/// Record a user correction (for pattern refinement)
|
||||
pub async fn record_correction(&self, correction_type: &str) {
|
||||
let mut detector = self.pattern_detector.lock().await;
|
||||
// Record as input pattern with negative signal
|
||||
detector.record_input_pattern(
|
||||
vec![format!("correction:{}", correction_type)],
|
||||
"user_preference",
|
||||
PatternContext::default(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Get recommendation count
|
||||
pub async fn recommendation_count(&self) -> usize {
|
||||
let recommender = self.recommender.lock().await;
|
||||
recommender.recommendation_count()
|
||||
}
|
||||
|
||||
/// Accept a recommendation (returns the accepted recommendation)
|
||||
pub async fn accept_recommendation(&self, recommendation_id: &str) -> Option<WorkflowRecommendation> {
|
||||
let mut recommender = self.recommender.lock().await;
|
||||
recommender.accept_recommendation(recommendation_id)
|
||||
}
|
||||
|
||||
/// Dismiss a recommendation (returns true if found and dismissed)
|
||||
pub async fn dismiss_recommendation(&self, recommendation_id: &str) -> bool {
|
||||
let mut recommender = self.recommender.lock().await;
|
||||
recommender.dismiss_recommendation(recommendation_id)
|
||||
}
|
||||
}
|
||||
|
||||
/// Types of user activities that can be recorded
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ActivityType {
|
||||
/// Skills were used together
|
||||
SkillUsed { skill_ids: Vec<String> },
|
||||
/// A pipeline was executed
|
||||
PipelineExecuted { task_type: String, pipeline_id: String },
|
||||
/// User input was received
|
||||
InputReceived { keywords: Vec<String>, intent: String },
|
||||
}
|
||||
|
||||
// === Tauri Commands ===
|
||||
|
||||
/// Mesh coordinator state for Tauri
|
||||
pub type MeshCoordinatorState = Arc<Mutex<HashMap<String, MeshCoordinator>>>;
|
||||
|
||||
/// Initialize mesh coordinator for an agent
|
||||
#[tauri::command]
|
||||
pub async fn mesh_init(
|
||||
agent_id: String,
|
||||
config: Option<MeshConfig>,
|
||||
state: tauri::State<'_, MeshCoordinatorState>,
|
||||
) -> Result<(), String> {
|
||||
let coordinator = MeshCoordinator::new(agent_id.clone(), config);
|
||||
let mut coordinators = state.lock().await;
|
||||
coordinators.insert(agent_id, coordinator);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Analyze and get recommendations
|
||||
#[tauri::command]
|
||||
pub async fn mesh_analyze(
|
||||
agent_id: String,
|
||||
state: tauri::State<'_, MeshCoordinatorState>,
|
||||
) -> Result<MeshAnalysisResult, String> {
|
||||
let coordinators = state.lock().await;
|
||||
let coordinator = coordinators
|
||||
.get(&agent_id)
|
||||
.ok_or_else(|| format!("Mesh coordinator not initialized for agent: {}", agent_id))?;
|
||||
coordinator.analyze().await
|
||||
}
|
||||
|
||||
/// Record user activity
|
||||
#[tauri::command]
|
||||
pub async fn mesh_record_activity(
|
||||
agent_id: String,
|
||||
activity_type: ActivityType,
|
||||
context: PatternContext,
|
||||
state: tauri::State<'_, MeshCoordinatorState>,
|
||||
) -> Result<(), String> {
|
||||
let coordinators = state.lock().await;
|
||||
let coordinator = coordinators
|
||||
.get(&agent_id)
|
||||
.ok_or_else(|| format!("Mesh coordinator not initialized for agent: {}", agent_id))?;
|
||||
coordinator.record_activity(activity_type, context).await
|
||||
}
|
||||
|
||||
/// Get current patterns
|
||||
#[tauri::command]
|
||||
pub async fn mesh_get_patterns(
|
||||
agent_id: String,
|
||||
state: tauri::State<'_, MeshCoordinatorState>,
|
||||
) -> Result<Vec<BehaviorPattern>, String> {
|
||||
let coordinators = state.lock().await;
|
||||
let coordinator = coordinators
|
||||
.get(&agent_id)
|
||||
.ok_or_else(|| format!("Mesh coordinator not initialized for agent: {}", agent_id))?;
|
||||
Ok(coordinator.get_patterns().await)
|
||||
}
|
||||
|
||||
/// Update mesh configuration
|
||||
#[tauri::command]
|
||||
pub async fn mesh_update_config(
|
||||
agent_id: String,
|
||||
config: MeshConfig,
|
||||
state: tauri::State<'_, MeshCoordinatorState>,
|
||||
) -> Result<(), String> {
|
||||
let coordinators = state.lock().await;
|
||||
let coordinator = coordinators
|
||||
.get(&agent_id)
|
||||
.ok_or_else(|| format!("Mesh coordinator not initialized for agent: {}", agent_id))?;
|
||||
coordinator.update_config(config).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Decay old patterns
|
||||
#[tauri::command]
|
||||
pub async fn mesh_decay_patterns(
|
||||
agent_id: String,
|
||||
state: tauri::State<'_, MeshCoordinatorState>,
|
||||
) -> Result<(), String> {
|
||||
let coordinators = state.lock().await;
|
||||
let coordinator = coordinators
|
||||
.get(&agent_id)
|
||||
.ok_or_else(|| format!("Mesh coordinator not initialized for agent: {}", agent_id))?;
|
||||
coordinator.decay_patterns().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Accept a recommendation (removes it and returns the accepted recommendation)
|
||||
#[tauri::command]
|
||||
pub async fn mesh_accept_recommendation(
|
||||
agent_id: String,
|
||||
recommendation_id: String,
|
||||
state: tauri::State<'_, MeshCoordinatorState>,
|
||||
) -> Result<Option<WorkflowRecommendation>, String> {
|
||||
let coordinators = state.lock().await;
|
||||
let coordinator = coordinators
|
||||
.get(&agent_id)
|
||||
.ok_or_else(|| format!("Mesh coordinator not initialized for agent: {}", agent_id))?;
|
||||
Ok(coordinator.accept_recommendation(&recommendation_id).await)
|
||||
}
|
||||
|
||||
/// Dismiss a recommendation (removes it without acting on it)
|
||||
#[tauri::command]
|
||||
pub async fn mesh_dismiss_recommendation(
|
||||
agent_id: String,
|
||||
recommendation_id: String,
|
||||
state: tauri::State<'_, MeshCoordinatorState>,
|
||||
) -> Result<bool, String> {
|
||||
let coordinators = state.lock().await;
|
||||
let coordinator = coordinators
|
||||
.get(&agent_id)
|
||||
.ok_or_else(|| format!("Mesh coordinator not initialized for agent: {}", agent_id))?;
|
||||
Ok(coordinator.dismiss_recommendation(&recommendation_id).await)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_mesh_config_default() {
|
||||
let config = MeshConfig::default();
|
||||
assert!(config.enabled);
|
||||
assert_eq!(config.min_confidence, 0.6);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_mesh_coordinator_creation() {
|
||||
let coordinator = MeshCoordinator::new("test_agent".to_string(), None);
|
||||
let config = coordinator.get_config().await;
|
||||
assert!(config.enabled);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_mesh_analysis() {
|
||||
let coordinator = MeshCoordinator::new("test_agent".to_string(), None);
|
||||
let result = coordinator.analyze().await;
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,11 @@
|
||||
//! - `compactor` - Context compaction for infinite-length conversations
|
||||
//! - `reflection` - Agent self-improvement through conversation analysis
|
||||
//! - `identity` - Agent identity file management (SOUL.md, AGENTS.md, USER.md)
|
||||
//! - `pattern_detector` - Behavior pattern detection for adaptive mesh
|
||||
//! - `recommender` - Workflow recommendation engine
|
||||
//! - `mesh` - Adaptive Intelligence Mesh coordinator
|
||||
//! - `trigger_evaluator` - Context-aware hand triggers with semantic matching
|
||||
//! - `persona_evolver` - Memory-powered persona evolution system
|
||||
//!
|
||||
//! ## Migration Status
|
||||
//!
|
||||
@@ -18,8 +23,13 @@
|
||||
//! | Context Compactor | ✅ Phase 2 | Complete |
|
||||
//! | Reflection Engine | ✅ Phase 3 | Complete |
|
||||
//! | Agent Identity | ✅ Phase 3 | Complete |
|
||||
//! | Agent Swarm | 🚧 Phase 3 | TODO |
|
||||
//! | Vector Memory | 📋 Phase 4 | Planned |
|
||||
//! | Pattern Detector | ✅ Phase 4 | Complete |
|
||||
//! | Workflow Recommender | ✅ Phase 4 | Complete |
|
||||
//! | Adaptive Mesh | ✅ Phase 4 | Complete |
|
||||
//! | Trigger Evaluator | ✅ Phase 4 | Complete |
|
||||
//! | Persona Evolver | ✅ Phase 4 | Complete |
|
||||
//! | Agent Swarm | 🚧 Phase 4 | TODO |
|
||||
//! | Vector Memory | 📋 Phase 5 | Planned |
|
||||
//!
|
||||
//! Reference: docs/plans/INTELLIGENCE-LAYER-MIGRATION.md
|
||||
|
||||
@@ -27,12 +37,47 @@ pub mod heartbeat;
|
||||
pub mod compactor;
|
||||
pub mod reflection;
|
||||
pub mod identity;
|
||||
pub mod pattern_detector;
|
||||
pub mod recommender;
|
||||
pub mod mesh;
|
||||
pub mod trigger_evaluator;
|
||||
pub mod persona_evolver;
|
||||
pub mod validation;
|
||||
|
||||
// Re-export main types for convenience
|
||||
// These exports are reserved for external use and future integration
|
||||
#[allow(unused_imports)]
|
||||
pub use heartbeat::HeartbeatEngineState;
|
||||
#[allow(unused_imports)]
|
||||
pub use reflection::{
|
||||
ReflectionEngine, ReflectionEngineState,
|
||||
};
|
||||
#[allow(unused_imports)]
|
||||
pub use identity::{
|
||||
AgentIdentityManager, IdentityManagerState,
|
||||
};
|
||||
#[allow(unused_imports)]
|
||||
pub use pattern_detector::{
|
||||
BehaviorPattern, PatternContext, PatternDetector, PatternDetectorConfig, PatternType,
|
||||
};
|
||||
#[allow(unused_imports)]
|
||||
pub use recommender::{
|
||||
PipelineMetadata, RecommendationRule, RecommenderConfig, WorkflowRecommender,
|
||||
};
|
||||
#[allow(unused_imports)]
|
||||
pub use mesh::{
|
||||
ActivityType, MeshAnalysisResult, MeshConfig, MeshCoordinator, MeshCoordinatorState,
|
||||
WorkflowRecommendation,
|
||||
};
|
||||
#[allow(unused_imports)] // Module not yet integrated - exports reserved for future use
|
||||
pub use trigger_evaluator::{
|
||||
ComparisonOperator, ConditionCombination, ContextConditionClause, ContextConditionConfig,
|
||||
ContextField, ExtendedTriggerType, IdentityFile, IdentityStateConfig,
|
||||
MemoryQueryConfig, CompositeTriggerConfig, TriggerContextCache, TriggerEvaluator,
|
||||
};
|
||||
#[allow(unused_imports)]
|
||||
pub use persona_evolver::{
|
||||
PersonaEvolver, PersonaEvolverConfig, PersonaEvolverState, PersonaEvolverStateHandle,
|
||||
EvolutionResult, EvolutionProposal, EvolutionChangeType, EvolutionInsight,
|
||||
ProfileUpdate, InsightCategory,
|
||||
};
|
||||
|
||||
421
desktop/src-tauri/src/intelligence/pattern_detector.rs
Normal file
421
desktop/src-tauri/src/intelligence/pattern_detector.rs
Normal file
@@ -0,0 +1,421 @@
|
||||
//! Pattern Detector - Behavior pattern detection for Adaptive Intelligence Mesh
|
||||
//!
|
||||
//! Detects patterns from user activities including:
|
||||
//! - Skill combinations (frequently used together)
|
||||
//! - Temporal triggers (time-based patterns)
|
||||
//! - Task-pipeline mappings (task types mapped to pipelines)
|
||||
//! - Input patterns (keyword/intent patterns)
|
||||
//!
|
||||
//! NOTE: Analysis and export methods are reserved for future dashboard integration.
|
||||
|
||||
#![allow(dead_code)] // Analysis and export methods reserved for future dashboard features
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
// === Pattern Types ===
|
||||
|
||||
/// Unique identifier for a pattern
|
||||
pub type PatternId = String;
|
||||
|
||||
/// Behavior pattern detected from user activities
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BehaviorPattern {
|
||||
/// Unique pattern identifier
|
||||
pub id: PatternId,
|
||||
/// Type of pattern detected
|
||||
pub pattern_type: PatternType,
|
||||
/// How many times this pattern has occurred
|
||||
pub frequency: usize,
|
||||
/// When this pattern was last detected
|
||||
pub last_occurrence: DateTime<Utc>,
|
||||
/// When this pattern was first detected
|
||||
pub first_occurrence: DateTime<Utc>,
|
||||
/// Confidence score (0.0-1.0)
|
||||
pub confidence: f32,
|
||||
/// Context when pattern was detected
|
||||
pub context: PatternContext,
|
||||
}
|
||||
|
||||
/// Types of detectable patterns
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum PatternType {
|
||||
/// Skills frequently used together
|
||||
SkillCombination {
|
||||
skill_ids: Vec<String>,
|
||||
},
|
||||
/// Time-based trigger pattern
|
||||
TemporalTrigger {
|
||||
hand_id: String,
|
||||
time_pattern: String, // Cron-like pattern or time range
|
||||
},
|
||||
/// Task type mapped to a pipeline
|
||||
TaskPipelineMapping {
|
||||
task_type: String,
|
||||
pipeline_id: String,
|
||||
},
|
||||
/// Input keyword/intent pattern
|
||||
InputPattern {
|
||||
keywords: Vec<String>,
|
||||
intent: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// Context information when pattern was detected
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct PatternContext {
|
||||
/// Skills involved in the session
|
||||
pub skill_ids: Option<Vec<String>>,
|
||||
/// Topics discussed recently
|
||||
pub recent_topics: Option<Vec<String>>,
|
||||
/// Detected intent
|
||||
pub intent: Option<String>,
|
||||
/// Time of day when detected (hour 0-23)
|
||||
pub time_of_day: Option<u8>,
|
||||
/// Day of week (0=Monday, 6=Sunday)
|
||||
pub day_of_week: Option<u8>,
|
||||
}
|
||||
|
||||
/// Pattern detection configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PatternDetectorConfig {
|
||||
/// Minimum occurrences before pattern is recognized
|
||||
pub min_frequency: usize,
|
||||
/// Minimum confidence threshold
|
||||
pub min_confidence: f32,
|
||||
/// Days after which pattern confidence decays
|
||||
pub decay_days: u32,
|
||||
/// Maximum patterns to keep
|
||||
pub max_patterns: usize,
|
||||
}
|
||||
|
||||
impl Default for PatternDetectorConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
min_frequency: 3,
|
||||
min_confidence: 0.5,
|
||||
decay_days: 30,
|
||||
max_patterns: 100,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// === Pattern Detector ===
|
||||
|
||||
/// Pattern detector that identifies behavior patterns from activities
|
||||
pub struct PatternDetector {
|
||||
/// Detected patterns
|
||||
patterns: HashMap<PatternId, BehaviorPattern>,
|
||||
/// Configuration
|
||||
config: PatternDetectorConfig,
|
||||
/// Skill combination history for pattern detection
|
||||
skill_combination_history: Vec<(Vec<String>, DateTime<Utc>)>,
|
||||
}
|
||||
|
||||
impl PatternDetector {
|
||||
/// Create a new pattern detector
|
||||
pub fn new(config: Option<PatternDetectorConfig>) -> Self {
|
||||
Self {
|
||||
patterns: HashMap::new(),
|
||||
config: config.unwrap_or_default(),
|
||||
skill_combination_history: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Record skill usage for combination detection
|
||||
pub fn record_skill_usage(&mut self, skill_ids: Vec<String>) {
|
||||
let now = Utc::now();
|
||||
self.skill_combination_history.push((skill_ids, now));
|
||||
|
||||
// Keep only recent history (last 1000 entries)
|
||||
if self.skill_combination_history.len() > 1000 {
|
||||
self.skill_combination_history.drain(0..500);
|
||||
}
|
||||
|
||||
// Detect patterns
|
||||
self.detect_skill_combinations();
|
||||
}
|
||||
|
||||
/// Record a pipeline execution for task mapping detection
|
||||
pub fn record_pipeline_execution(
|
||||
&mut self,
|
||||
task_type: &str,
|
||||
pipeline_id: &str,
|
||||
context: PatternContext,
|
||||
) {
|
||||
let pattern_key = format!("task_pipeline:{}:{}", task_type, pipeline_id);
|
||||
|
||||
self.update_or_create_pattern(
|
||||
&pattern_key,
|
||||
PatternType::TaskPipelineMapping {
|
||||
task_type: task_type.to_string(),
|
||||
pipeline_id: pipeline_id.to_string(),
|
||||
},
|
||||
context,
|
||||
);
|
||||
}
|
||||
|
||||
/// Record an input pattern
|
||||
pub fn record_input_pattern(
|
||||
&mut self,
|
||||
keywords: Vec<String>,
|
||||
intent: &str,
|
||||
context: PatternContext,
|
||||
) {
|
||||
let pattern_key = format!("input_pattern:{}:{}", keywords.join(","), intent);
|
||||
|
||||
self.update_or_create_pattern(
|
||||
&pattern_key,
|
||||
PatternType::InputPattern {
|
||||
keywords,
|
||||
intent: intent.to_string(),
|
||||
},
|
||||
context,
|
||||
);
|
||||
}
|
||||
|
||||
/// Update existing pattern or create new one
|
||||
fn update_or_create_pattern(
|
||||
&mut self,
|
||||
key: &str,
|
||||
pattern_type: PatternType,
|
||||
context: PatternContext,
|
||||
) {
|
||||
let now = Utc::now();
|
||||
let decay_days = self.config.decay_days;
|
||||
|
||||
if let Some(pattern) = self.patterns.get_mut(key) {
|
||||
// Update existing pattern
|
||||
pattern.frequency += 1;
|
||||
pattern.last_occurrence = now;
|
||||
pattern.context = context;
|
||||
|
||||
// Recalculate confidence inline to avoid borrow issues
|
||||
let days_since_last = (now - pattern.last_occurrence).num_days() as f32;
|
||||
let frequency_score = (pattern.frequency as f32 / 10.0).min(1.0);
|
||||
let decay_factor = if days_since_last > decay_days as f32 {
|
||||
0.5
|
||||
} else {
|
||||
1.0 - (days_since_last / decay_days as f32) * 0.3
|
||||
};
|
||||
pattern.confidence = (frequency_score * decay_factor).min(1.0);
|
||||
} else {
|
||||
// Create new pattern
|
||||
let pattern = BehaviorPattern {
|
||||
id: key.to_string(),
|
||||
pattern_type,
|
||||
frequency: 1,
|
||||
first_occurrence: now,
|
||||
last_occurrence: now,
|
||||
confidence: 0.1, // Low initial confidence
|
||||
context,
|
||||
};
|
||||
|
||||
self.patterns.insert(key.to_string(), pattern);
|
||||
|
||||
// Enforce max patterns limit
|
||||
self.enforce_max_patterns();
|
||||
}
|
||||
}
|
||||
|
||||
/// Detect skill combination patterns from history
|
||||
fn detect_skill_combinations(&mut self) {
|
||||
// Group skill combinations
|
||||
let mut combination_counts: HashMap<String, (Vec<String>, usize, DateTime<Utc>)> =
|
||||
HashMap::new();
|
||||
|
||||
for (skills, time) in &self.skill_combination_history {
|
||||
if skills.len() < 2 {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Sort skills for consistent grouping
|
||||
let mut sorted_skills = skills.clone();
|
||||
sorted_skills.sort();
|
||||
let key = sorted_skills.join("|");
|
||||
|
||||
let entry = combination_counts.entry(key).or_insert((
|
||||
sorted_skills,
|
||||
0,
|
||||
*time,
|
||||
));
|
||||
entry.1 += 1;
|
||||
entry.2 = *time; // Update last occurrence
|
||||
}
|
||||
|
||||
// Create patterns for combinations meeting threshold
|
||||
for (key, (skills, count, last_time)) in combination_counts {
|
||||
if count >= self.config.min_frequency {
|
||||
let pattern = BehaviorPattern {
|
||||
id: format!("skill_combo:{}", key),
|
||||
pattern_type: PatternType::SkillCombination { skill_ids: skills },
|
||||
frequency: count,
|
||||
first_occurrence: last_time,
|
||||
last_occurrence: last_time,
|
||||
confidence: self.calculate_confidence_from_frequency(count),
|
||||
context: PatternContext::default(),
|
||||
};
|
||||
|
||||
self.patterns.insert(pattern.id.clone(), pattern);
|
||||
}
|
||||
}
|
||||
|
||||
self.enforce_max_patterns();
|
||||
}
|
||||
|
||||
/// Calculate confidence based on frequency and recency
|
||||
fn calculate_confidence(&self, pattern: &BehaviorPattern) -> f32 {
|
||||
let now = Utc::now();
|
||||
let days_since_last = (now - pattern.last_occurrence).num_days() as f32;
|
||||
|
||||
// Base confidence from frequency (capped at 1.0)
|
||||
let frequency_score = (pattern.frequency as f32 / 10.0).min(1.0);
|
||||
|
||||
// Decay factor based on time since last occurrence
|
||||
let decay_factor = if days_since_last > self.config.decay_days as f32 {
|
||||
0.5 // Significant decay for old patterns
|
||||
} else {
|
||||
1.0 - (days_since_last / self.config.decay_days as f32) * 0.3
|
||||
};
|
||||
|
||||
(frequency_score * decay_factor).min(1.0)
|
||||
}
|
||||
|
||||
/// Calculate confidence from frequency alone
|
||||
fn calculate_confidence_from_frequency(&self, frequency: usize) -> f32 {
|
||||
(frequency as f32 / self.config.min_frequency.max(1) as f32).min(1.0)
|
||||
}
|
||||
|
||||
/// Enforce maximum patterns limit by removing lowest confidence patterns
|
||||
fn enforce_max_patterns(&mut self) {
|
||||
if self.patterns.len() <= self.config.max_patterns {
|
||||
return;
|
||||
}
|
||||
|
||||
// Sort patterns by confidence and remove lowest
|
||||
let mut patterns_vec: Vec<_> = self.patterns.drain().collect();
|
||||
patterns_vec.sort_by(|a, b| b.1.confidence.partial_cmp(&a.1.confidence).unwrap());
|
||||
|
||||
// Keep top patterns
|
||||
self.patterns = patterns_vec
|
||||
.into_iter()
|
||||
.take(self.config.max_patterns)
|
||||
.collect();
|
||||
}
|
||||
|
||||
/// Get all patterns above confidence threshold
|
||||
pub fn get_patterns(&self) -> Vec<&BehaviorPattern> {
|
||||
self.patterns
|
||||
.values()
|
||||
.filter(|p| p.confidence >= self.config.min_confidence)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get patterns of a specific type
|
||||
pub fn get_patterns_by_type(&self, pattern_type: &PatternType) -> Vec<&BehaviorPattern> {
|
||||
self.patterns
|
||||
.values()
|
||||
.filter(|p| std::mem::discriminant(&p.pattern_type) == std::mem::discriminant(pattern_type))
|
||||
.filter(|p| p.confidence >= self.config.min_confidence)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get patterns sorted by confidence
|
||||
pub fn get_patterns_sorted(&self) -> Vec<&BehaviorPattern> {
|
||||
let mut patterns: Vec<_> = self.get_patterns();
|
||||
patterns.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap());
|
||||
patterns
|
||||
}
|
||||
|
||||
/// Decay old patterns (should be called periodically)
|
||||
pub fn decay_patterns(&mut self) {
|
||||
let now = Utc::now();
|
||||
|
||||
for pattern in self.patterns.values_mut() {
|
||||
let days_since_last = (now - pattern.last_occurrence).num_days() as f32;
|
||||
|
||||
if days_since_last > self.config.decay_days as f32 {
|
||||
// Reduce confidence for old patterns
|
||||
let decay_amount = 0.1 * (days_since_last / self.config.decay_days as f32);
|
||||
pattern.confidence = (pattern.confidence - decay_amount).max(0.0);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove patterns below threshold
|
||||
self.patterns
|
||||
.retain(|_, p| p.confidence >= self.config.min_confidence * 0.5);
|
||||
}
|
||||
|
||||
/// Clear all patterns
|
||||
pub fn clear(&mut self) {
|
||||
self.patterns.clear();
|
||||
self.skill_combination_history.clear();
|
||||
}
|
||||
|
||||
/// Get pattern count
|
||||
pub fn pattern_count(&self) -> usize {
|
||||
self.patterns.len()
|
||||
}
|
||||
|
||||
/// Export patterns for persistence
|
||||
pub fn export_patterns(&self) -> Vec<BehaviorPattern> {
|
||||
self.patterns.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// Import patterns from persistence
|
||||
pub fn import_patterns(&mut self, patterns: Vec<BehaviorPattern>) {
|
||||
for pattern in patterns {
|
||||
self.patterns.insert(pattern.id.clone(), pattern);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_pattern_creation() {
|
||||
let detector = PatternDetector::new(None);
|
||||
assert_eq!(detector.pattern_count(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_skill_combination_detection() {
|
||||
let mut detector = PatternDetector::new(Some(PatternDetectorConfig {
|
||||
min_frequency: 2,
|
||||
..Default::default()
|
||||
}));
|
||||
|
||||
// Record skill usage multiple times
|
||||
detector.record_skill_usage(vec!["skill_a".to_string(), "skill_b".to_string()]);
|
||||
detector.record_skill_usage(vec!["skill_a".to_string(), "skill_b".to_string()]);
|
||||
|
||||
// Should detect pattern after 2 occurrences
|
||||
let patterns = detector.get_patterns();
|
||||
assert!(!patterns.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_confidence_calculation() {
|
||||
let detector = PatternDetector::new(None);
|
||||
|
||||
let pattern = BehaviorPattern {
|
||||
id: "test".to_string(),
|
||||
pattern_type: PatternType::TaskPipelineMapping {
|
||||
task_type: "test".to_string(),
|
||||
pipeline_id: "pipeline".to_string(),
|
||||
},
|
||||
frequency: 5,
|
||||
first_occurrence: Utc::now(),
|
||||
last_occurrence: Utc::now(),
|
||||
confidence: 0.5,
|
||||
context: PatternContext::default(),
|
||||
};
|
||||
|
||||
let confidence = detector.calculate_confidence(&pattern);
|
||||
assert!(confidence > 0.0 && confidence <= 1.0);
|
||||
}
|
||||
}
|
||||
819
desktop/src-tauri/src/intelligence/persona_evolver.rs
Normal file
819
desktop/src-tauri/src/intelligence/persona_evolver.rs
Normal file
@@ -0,0 +1,819 @@
|
||||
//! Persona Evolver - Memory-powered persona evolution system
|
||||
//!
|
||||
//! Automatically evolves agent persona based on:
|
||||
//! - User interaction patterns (preferences, communication style)
|
||||
//! - Reflection insights (positive/negative patterns)
|
||||
//! - Memory accumulation (facts, lessons, context)
|
||||
//!
|
||||
//! Key features:
|
||||
//! - Automatic user_profile enrichment from preferences
|
||||
//! - Instruction refinement proposals based on patterns
|
||||
//! - Soul evolution suggestions (requires explicit user approval)
|
||||
//!
|
||||
//! Phase 4 of Intelligence Layer - P1 Innovation Task.
|
||||
//!
|
||||
//! NOTE: Tauri commands defined here are not yet registered with the app.
|
||||
|
||||
#![allow(dead_code)] // Tauri commands not yet registered with application
|
||||
|
||||
use chrono::Utc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use super::reflection::{ReflectionResult, Sentiment, MemoryEntryForAnalysis};
|
||||
use super::identity::{IdentityFiles, IdentityFile, ProposalStatus};
|
||||
|
||||
// === Types ===
|
||||
|
||||
/// Persona evolution configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PersonaEvolverConfig {
|
||||
/// Enable automatic user_profile updates
|
||||
#[serde(default = "default_auto_profile_update")]
|
||||
pub auto_profile_update: bool,
|
||||
/// Minimum preferences before suggesting profile update
|
||||
#[serde(default = "default_min_preferences")]
|
||||
pub min_preferences_for_update: usize,
|
||||
/// Minimum conversations before evolution
|
||||
#[serde(default = "default_min_conversations")]
|
||||
pub min_conversations_for_evolution: usize,
|
||||
/// Enable instruction refinement proposals
|
||||
#[serde(default = "default_enable_instruction_refinement")]
|
||||
pub enable_instruction_refinement: bool,
|
||||
/// Enable soul evolution (requires explicit approval)
|
||||
#[serde(default = "default_enable_soul_evolution")]
|
||||
pub enable_soul_evolution: bool,
|
||||
/// Maximum proposals per evolution cycle
|
||||
#[serde(default = "default_max_proposals")]
|
||||
pub max_proposals_per_cycle: usize,
|
||||
}
|
||||
|
||||
fn default_auto_profile_update() -> bool { true }
|
||||
fn default_min_preferences() -> usize { 3 }
|
||||
fn default_min_conversations() -> usize { 5 }
|
||||
fn default_enable_instruction_refinement() -> bool { true }
|
||||
fn default_enable_soul_evolution() -> bool { true }
|
||||
fn default_max_proposals() -> usize { 3 }
|
||||
|
||||
impl Default for PersonaEvolverConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
auto_profile_update: true,
|
||||
min_preferences_for_update: 3,
|
||||
min_conversations_for_evolution: 5,
|
||||
enable_instruction_refinement: true,
|
||||
enable_soul_evolution: true,
|
||||
max_proposals_per_cycle: 3,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Persona evolution result
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct EvolutionResult {
|
||||
/// Agent ID
|
||||
pub agent_id: String,
|
||||
/// Timestamp
|
||||
pub timestamp: String,
|
||||
/// Profile updates applied (auto)
|
||||
pub profile_updates: Vec<ProfileUpdate>,
|
||||
/// Proposals generated (require approval)
|
||||
pub proposals: Vec<EvolutionProposal>,
|
||||
/// Evolution insights
|
||||
pub insights: Vec<EvolutionInsight>,
|
||||
/// Whether evolution occurred
|
||||
pub evolved: bool,
|
||||
}
|
||||
|
||||
/// Profile update (auto-applied)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProfileUpdate {
|
||||
pub section: String,
|
||||
pub previous: String,
|
||||
pub updated: String,
|
||||
pub source: String,
|
||||
}
|
||||
|
||||
/// Evolution proposal (requires approval)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct EvolutionProposal {
|
||||
pub id: String,
|
||||
pub agent_id: String,
|
||||
pub target_file: IdentityFile,
|
||||
pub change_type: EvolutionChangeType,
|
||||
pub reason: String,
|
||||
pub current_content: String,
|
||||
pub proposed_content: String,
|
||||
pub confidence: f32,
|
||||
pub evidence: Vec<String>,
|
||||
pub status: ProposalStatus,
|
||||
pub created_at: String,
|
||||
}
|
||||
|
||||
/// Type of evolution change
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum EvolutionChangeType {
|
||||
/// Add new instruction section
|
||||
InstructionAddition,
|
||||
/// Refine existing instruction
|
||||
InstructionRefinement,
|
||||
/// Add personality trait
|
||||
TraitAddition,
|
||||
/// Communication style adjustment
|
||||
StyleAdjustment,
|
||||
/// Knowledge domain expansion
|
||||
DomainExpansion,
|
||||
}
|
||||
|
||||
/// Evolution insight
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct EvolutionInsight {
|
||||
pub category: InsightCategory,
|
||||
pub observation: String,
|
||||
pub recommendation: String,
|
||||
pub confidence: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum InsightCategory {
|
||||
CommunicationStyle,
|
||||
TechnicalExpertise,
|
||||
TaskEfficiency,
|
||||
UserPreference,
|
||||
KnowledgeGap,
|
||||
}
|
||||
|
||||
/// Persona evolution state
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PersonaEvolverState {
|
||||
pub last_evolution: Option<String>,
|
||||
pub total_evolutions: usize,
|
||||
pub pending_proposals: usize,
|
||||
pub profile_enrichment_score: f32,
|
||||
}
|
||||
|
||||
impl Default for PersonaEvolverState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
last_evolution: None,
|
||||
total_evolutions: 0,
|
||||
pending_proposals: 0,
|
||||
profile_enrichment_score: 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// === Persona Evolver ===
|
||||
|
||||
pub struct PersonaEvolver {
|
||||
config: PersonaEvolverConfig,
|
||||
state: PersonaEvolverState,
|
||||
evolution_history: Vec<EvolutionResult>,
|
||||
}
|
||||
|
||||
impl PersonaEvolver {
|
||||
pub fn new(config: Option<PersonaEvolverConfig>) -> Self {
|
||||
Self {
|
||||
config: config.unwrap_or_default(),
|
||||
state: PersonaEvolverState::default(),
|
||||
evolution_history: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Run evolution cycle for an agent
|
||||
pub fn evolve(
|
||||
&mut self,
|
||||
agent_id: &str,
|
||||
memories: &[MemoryEntryForAnalysis],
|
||||
reflection_result: &ReflectionResult,
|
||||
current_identity: &IdentityFiles,
|
||||
) -> EvolutionResult {
|
||||
let mut profile_updates = Vec::new();
|
||||
let mut proposals = Vec::new();
|
||||
#[allow(unused_assignments)] // Overwritten by generate_insights below
|
||||
let mut insights = Vec::new();
|
||||
|
||||
// 1. Extract user preferences and auto-update profile
|
||||
if self.config.auto_profile_update {
|
||||
profile_updates = self.extract_profile_updates(memories, current_identity);
|
||||
}
|
||||
|
||||
// 2. Generate instruction refinement proposals
|
||||
if self.config.enable_instruction_refinement {
|
||||
let instruction_proposals = self.generate_instruction_proposals(
|
||||
agent_id,
|
||||
reflection_result,
|
||||
current_identity,
|
||||
);
|
||||
proposals.extend(instruction_proposals);
|
||||
}
|
||||
|
||||
// 3. Generate soul evolution proposals (rare, high bar)
|
||||
if self.config.enable_soul_evolution {
|
||||
let soul_proposals = self.generate_soul_proposals(
|
||||
agent_id,
|
||||
reflection_result,
|
||||
current_identity,
|
||||
);
|
||||
proposals.extend(soul_proposals);
|
||||
}
|
||||
|
||||
// 4. Generate insights
|
||||
insights = self.generate_insights(memories, reflection_result);
|
||||
|
||||
// 5. Limit proposals
|
||||
proposals.truncate(self.config.max_proposals_per_cycle);
|
||||
|
||||
// 6. Update state
|
||||
let evolved = !profile_updates.is_empty() || !proposals.is_empty();
|
||||
if evolved {
|
||||
self.state.last_evolution = Some(Utc::now().to_rfc3339());
|
||||
self.state.total_evolutions += 1;
|
||||
self.state.pending_proposals += proposals.len();
|
||||
self.state.profile_enrichment_score = self.calculate_profile_score(memories);
|
||||
}
|
||||
|
||||
let result = EvolutionResult {
|
||||
agent_id: agent_id.to_string(),
|
||||
timestamp: Utc::now().to_rfc3339(),
|
||||
profile_updates,
|
||||
proposals,
|
||||
insights,
|
||||
evolved,
|
||||
};
|
||||
|
||||
// Store in history
|
||||
self.evolution_history.push(result.clone());
|
||||
if self.evolution_history.len() > 20 {
|
||||
self.evolution_history = self.evolution_history.split_off(10);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Extract profile updates from memory
|
||||
fn extract_profile_updates(
|
||||
&self,
|
||||
memories: &[MemoryEntryForAnalysis],
|
||||
current_identity: &IdentityFiles,
|
||||
) -> Vec<ProfileUpdate> {
|
||||
let mut updates = Vec::new();
|
||||
|
||||
// Extract preferences
|
||||
let preferences: Vec<_> = memories
|
||||
.iter()
|
||||
.filter(|m| m.memory_type == "preference")
|
||||
.collect();
|
||||
|
||||
if preferences.len() >= self.config.min_preferences_for_update {
|
||||
// Check if user_profile needs updating
|
||||
let current_profile = ¤t_identity.user_profile;
|
||||
let default_profile = "尚未收集到用户偏好信息";
|
||||
|
||||
if current_profile.contains(default_profile) || current_profile.len() < 100 {
|
||||
// Build new profile from preferences
|
||||
let mut sections = Vec::new();
|
||||
|
||||
// Group preferences by category
|
||||
let mut categories: HashMap<String, Vec<String>> = HashMap::new();
|
||||
for pref in &preferences {
|
||||
// Simple categorization based on keywords
|
||||
let category = self.categorize_preference(&pref.content);
|
||||
categories
|
||||
.entry(category)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(pref.content.clone());
|
||||
}
|
||||
|
||||
// Build sections
|
||||
for (category, items) in categories {
|
||||
if !items.is_empty() {
|
||||
sections.push(format!("### {}\n{}", category, items.iter()
|
||||
.map(|i| format!("- {}", i))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")));
|
||||
}
|
||||
}
|
||||
|
||||
if !sections.is_empty() {
|
||||
let new_profile = format!("# 用户画像\n\n{}\n\n_自动生成于 {}_",
|
||||
sections.join("\n\n"),
|
||||
Utc::now().format("%Y-%m-%d")
|
||||
);
|
||||
|
||||
updates.push(ProfileUpdate {
|
||||
section: "user_profile".to_string(),
|
||||
previous: current_profile.clone(),
|
||||
updated: new_profile,
|
||||
source: format!("{} 个偏好记忆", preferences.len()),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updates
|
||||
}
|
||||
|
||||
/// Categorize a preference
|
||||
fn categorize_preference(&self, content: &str) -> String {
|
||||
let content_lower = content.to_lowercase();
|
||||
|
||||
if content_lower.contains("语言") || content_lower.contains("沟通") || content_lower.contains("回复") {
|
||||
"沟通偏好".to_string()
|
||||
} else if content_lower.contains("技术") || content_lower.contains("框架") || content_lower.contains("工具") {
|
||||
"技术栈".to_string()
|
||||
} else if content_lower.contains("项目") || content_lower.contains("工作") || content_lower.contains("任务") {
|
||||
"工作习惯".to_string()
|
||||
} else if content_lower.contains("格式") || content_lower.contains("风格") || content_lower.contains("风格") {
|
||||
"输出风格".to_string()
|
||||
} else {
|
||||
"其他偏好".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate instruction refinement proposals
|
||||
fn generate_instruction_proposals(
|
||||
&self,
|
||||
agent_id: &str,
|
||||
reflection_result: &ReflectionResult,
|
||||
current_identity: &IdentityFiles,
|
||||
) -> Vec<EvolutionProposal> {
|
||||
let mut proposals = Vec::new();
|
||||
|
||||
// Only propose if there are negative patterns
|
||||
let negative_patterns: Vec<_> = reflection_result.patterns
|
||||
.iter()
|
||||
.filter(|p| matches!(p.sentiment, Sentiment::Negative))
|
||||
.collect();
|
||||
|
||||
if negative_patterns.is_empty() {
|
||||
return proposals;
|
||||
}
|
||||
|
||||
// Check if instructions already contain these warnings
|
||||
let current_instructions = ¤t_identity.instructions;
|
||||
|
||||
// Build proposed additions
|
||||
let mut additions = Vec::new();
|
||||
let mut evidence = Vec::new();
|
||||
|
||||
for pattern in &negative_patterns {
|
||||
// Check if this pattern is already addressed
|
||||
let key_phrase = &pattern.observation;
|
||||
if !current_instructions.contains(key_phrase) {
|
||||
additions.push(format!("- **注意事项**: {}", pattern.observation));
|
||||
evidence.extend(pattern.evidence.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !additions.is_empty() {
|
||||
let proposed = format!(
|
||||
"{}\n\n## 🔄 自我改进建议\n\n{}\n\n_基于交互模式分析自动生成_",
|
||||
current_instructions.trim_end(),
|
||||
additions.join("\n")
|
||||
);
|
||||
|
||||
proposals.push(EvolutionProposal {
|
||||
id: format!("evo_inst_{}", Utc::now().timestamp()),
|
||||
agent_id: agent_id.to_string(),
|
||||
target_file: IdentityFile::Instructions,
|
||||
change_type: EvolutionChangeType::InstructionAddition,
|
||||
reason: format!(
|
||||
"基于 {} 个负面模式观察,建议在指令中增加自我改进提醒",
|
||||
negative_patterns.len()
|
||||
),
|
||||
current_content: current_instructions.clone(),
|
||||
proposed_content: proposed,
|
||||
confidence: 0.7 + (negative_patterns.len() as f32 * 0.05).min(0.2),
|
||||
evidence,
|
||||
status: ProposalStatus::Pending,
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
});
|
||||
}
|
||||
|
||||
// Check for improvement suggestions that could become instructions
|
||||
for improvement in &reflection_result.improvements {
|
||||
if current_instructions.contains(&improvement.suggestion) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// High priority improvements become instruction proposals
|
||||
if matches!(improvement.priority, super::reflection::Priority::High) {
|
||||
proposals.push(EvolutionProposal {
|
||||
id: format!("evo_inst_{}_{}", Utc::now().timestamp(), rand_suffix()),
|
||||
agent_id: agent_id.to_string(),
|
||||
target_file: IdentityFile::Instructions,
|
||||
change_type: EvolutionChangeType::InstructionRefinement,
|
||||
reason: format!("高优先级改进建议: {}", improvement.area),
|
||||
current_content: current_instructions.clone(),
|
||||
proposed_content: format!(
|
||||
"{}\n\n### {}\n\n{}",
|
||||
current_instructions.trim_end(),
|
||||
improvement.area,
|
||||
improvement.suggestion
|
||||
),
|
||||
confidence: 0.75,
|
||||
evidence: vec![improvement.suggestion.clone()],
|
||||
status: ProposalStatus::Pending,
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
proposals
|
||||
}
|
||||
|
||||
/// Generate soul evolution proposals (high bar)
|
||||
fn generate_soul_proposals(
|
||||
&self,
|
||||
agent_id: &str,
|
||||
reflection_result: &ReflectionResult,
|
||||
current_identity: &IdentityFiles,
|
||||
) -> Vec<EvolutionProposal> {
|
||||
let mut proposals = Vec::new();
|
||||
|
||||
// Soul evolution requires strong positive patterns
|
||||
let positive_patterns: Vec<_> = reflection_result.patterns
|
||||
.iter()
|
||||
.filter(|p| matches!(p.sentiment, Sentiment::Positive))
|
||||
.collect();
|
||||
|
||||
// Need at least 3 strong positive patterns
|
||||
if positive_patterns.len() < 3 {
|
||||
return proposals;
|
||||
}
|
||||
|
||||
// Calculate overall confidence
|
||||
let avg_frequency: usize = positive_patterns.iter()
|
||||
.map(|p| p.frequency)
|
||||
.sum::<usize>() / positive_patterns.len();
|
||||
|
||||
if avg_frequency < 5 {
|
||||
return proposals;
|
||||
}
|
||||
|
||||
// Build soul enhancement proposal
|
||||
let current_soul = ¤t_identity.soul;
|
||||
let mut traits = Vec::new();
|
||||
let mut evidence = Vec::new();
|
||||
|
||||
for pattern in &positive_patterns {
|
||||
// Extract trait from observation
|
||||
if pattern.observation.contains("偏好") {
|
||||
traits.push("深入理解用户偏好");
|
||||
} else if pattern.observation.contains("经验") {
|
||||
traits.push("持续积累经验教训");
|
||||
} else if pattern.observation.contains("知识") {
|
||||
traits.push("构建核心知识体系");
|
||||
}
|
||||
evidence.extend(pattern.evidence.clone());
|
||||
}
|
||||
|
||||
if !traits.is_empty() {
|
||||
let traits_section = traits.iter()
|
||||
.map(|t| format!("- {}", t))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
let proposed = format!(
|
||||
"{}\n\n## 🌱 成长特质\n\n{}\n\n_通过交互学习持续演化_",
|
||||
current_soul.trim_end(),
|
||||
traits_section
|
||||
);
|
||||
|
||||
proposals.push(EvolutionProposal {
|
||||
id: format!("evo_soul_{}", Utc::now().timestamp()),
|
||||
agent_id: agent_id.to_string(),
|
||||
target_file: IdentityFile::Soul,
|
||||
change_type: EvolutionChangeType::TraitAddition,
|
||||
reason: format!(
|
||||
"基于 {} 个强正面模式,建议增加成长特质",
|
||||
positive_patterns.len()
|
||||
),
|
||||
current_content: current_soul.clone(),
|
||||
proposed_content: proposed,
|
||||
confidence: 0.85,
|
||||
evidence,
|
||||
status: ProposalStatus::Pending,
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
});
|
||||
}
|
||||
|
||||
proposals
|
||||
}
|
||||
|
||||
/// Generate evolution insights
|
||||
fn generate_insights(
|
||||
&self,
|
||||
memories: &[MemoryEntryForAnalysis],
|
||||
reflection_result: &ReflectionResult,
|
||||
) -> Vec<EvolutionInsight> {
|
||||
let mut insights = Vec::new();
|
||||
|
||||
// Communication style insight
|
||||
let comm_prefs: Vec<_> = memories
|
||||
.iter()
|
||||
.filter(|m| m.memory_type == "preference" &&
|
||||
(m.content.contains("回复") || m.content.contains("语言") || m.content.contains("简洁")))
|
||||
.collect();
|
||||
|
||||
if !comm_prefs.is_empty() {
|
||||
insights.push(EvolutionInsight {
|
||||
category: InsightCategory::CommunicationStyle,
|
||||
observation: format!("用户有 {} 个沟通风格偏好", comm_prefs.len()),
|
||||
recommendation: "在回复中应用这些偏好,提高用户满意度".to_string(),
|
||||
confidence: 0.8,
|
||||
});
|
||||
}
|
||||
|
||||
// Technical expertise insight
|
||||
let tech_memories: Vec<_> = memories
|
||||
.iter()
|
||||
.filter(|m| m.tags.iter().any(|t| t.contains("技术") || t.contains("代码")))
|
||||
.collect();
|
||||
|
||||
if tech_memories.len() >= 5 {
|
||||
insights.push(EvolutionInsight {
|
||||
category: InsightCategory::TechnicalExpertise,
|
||||
observation: format!("积累了 {} 个技术相关记忆", tech_memories.len()),
|
||||
recommendation: "考虑构建技术知识图谱,提高检索效率".to_string(),
|
||||
confidence: 0.7,
|
||||
});
|
||||
}
|
||||
|
||||
// Task efficiency insight from negative patterns
|
||||
let has_task_issues = reflection_result.patterns
|
||||
.iter()
|
||||
.any(|p| p.observation.contains("任务") && matches!(p.sentiment, Sentiment::Negative));
|
||||
|
||||
if has_task_issues {
|
||||
insights.push(EvolutionInsight {
|
||||
category: InsightCategory::TaskEfficiency,
|
||||
observation: "存在任务管理相关问题".to_string(),
|
||||
recommendation: "建议增加任务跟踪和提醒机制".to_string(),
|
||||
confidence: 0.75,
|
||||
});
|
||||
}
|
||||
|
||||
// Knowledge gap insight
|
||||
let lesson_count = memories.iter()
|
||||
.filter(|m| m.memory_type == "lesson")
|
||||
.count();
|
||||
|
||||
if lesson_count > 10 {
|
||||
insights.push(EvolutionInsight {
|
||||
category: InsightCategory::KnowledgeGap,
|
||||
observation: format!("已记录 {} 条经验教训", lesson_count),
|
||||
recommendation: "定期回顾教训,避免重复错误".to_string(),
|
||||
confidence: 0.8,
|
||||
});
|
||||
}
|
||||
|
||||
insights
|
||||
}
|
||||
|
||||
/// Calculate profile enrichment score
|
||||
fn calculate_profile_score(&self, memories: &[MemoryEntryForAnalysis]) -> f32 {
|
||||
let pref_count = memories.iter().filter(|m| m.memory_type == "preference").count();
|
||||
let fact_count = memories.iter().filter(|m| m.memory_type == "fact").count();
|
||||
|
||||
// Score based on diversity and quantity
|
||||
let pref_score = (pref_count as f32 / 10.0).min(1.0) * 0.5;
|
||||
let fact_score = (fact_count as f32 / 20.0).min(1.0) * 0.3;
|
||||
let diversity = if pref_count > 0 && fact_count > 0 { 0.2 } else { 0.0 };
|
||||
|
||||
pref_score + fact_score + diversity
|
||||
}
|
||||
|
||||
/// Get evolution history
|
||||
pub fn get_history(&self, limit: usize) -> Vec<&EvolutionResult> {
|
||||
self.evolution_history.iter().rev().take(limit).collect()
|
||||
}
|
||||
|
||||
/// Get current state
|
||||
pub fn get_state(&self) -> &PersonaEvolverState {
|
||||
&self.state
|
||||
}
|
||||
|
||||
/// Get configuration
|
||||
pub fn get_config(&self) -> &PersonaEvolverConfig {
|
||||
&self.config
|
||||
}
|
||||
|
||||
/// Update configuration
|
||||
pub fn update_config(&mut self, config: PersonaEvolverConfig) {
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Mark proposal as handled (approved/rejected)
|
||||
pub fn proposal_handled(&mut self) {
|
||||
if self.state.pending_proposals > 0 {
|
||||
self.state.pending_proposals -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate random suffix
|
||||
fn rand_suffix() -> String {
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
static COUNTER: AtomicU64 = AtomicU64::new(0);
|
||||
let count = COUNTER.fetch_add(1, Ordering::Relaxed);
|
||||
format!("{:04x}", count % 0x10000)
|
||||
}
|
||||
|
||||
// === Tauri Commands ===
|
||||
|
||||
/// Type alias for Tauri state management (shared evolver handle)
|
||||
pub type PersonaEvolverStateHandle = Arc<Mutex<PersonaEvolver>>;
|
||||
|
||||
/// Initialize persona evolver
|
||||
#[tauri::command]
|
||||
pub async fn persona_evolver_init(
|
||||
config: Option<PersonaEvolverConfig>,
|
||||
state: tauri::State<'_, PersonaEvolverStateHandle>,
|
||||
) -> Result<bool, String> {
|
||||
let mut evolver = state.lock().await;
|
||||
if let Some(cfg) = config {
|
||||
evolver.update_config(cfg);
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Run evolution cycle
|
||||
#[tauri::command]
|
||||
pub async fn persona_evolve(
|
||||
agent_id: String,
|
||||
memories: Vec<MemoryEntryForAnalysis>,
|
||||
reflection_state: tauri::State<'_, super::reflection::ReflectionEngineState>,
|
||||
identity_state: tauri::State<'_, super::identity::IdentityManagerState>,
|
||||
evolver_state: tauri::State<'_, PersonaEvolverStateHandle>,
|
||||
) -> Result<EvolutionResult, String> {
|
||||
// 1. Run reflection first
|
||||
let mut reflection = reflection_state.lock().await;
|
||||
let reflection_result = reflection.reflect(&agent_id, &memories);
|
||||
drop(reflection);
|
||||
|
||||
// 2. Get current identity
|
||||
let mut identity = identity_state.lock().await;
|
||||
let current_identity = identity.get_identity(&agent_id);
|
||||
drop(identity);
|
||||
|
||||
// 3. Run evolution
|
||||
let mut evolver = evolver_state.lock().await;
|
||||
let result = evolver.evolve(&agent_id, &memories, &reflection_result, ¤t_identity);
|
||||
|
||||
// 4. Apply auto profile updates
|
||||
if !result.profile_updates.is_empty() {
|
||||
let mut identity = identity_state.lock().await;
|
||||
for update in &result.profile_updates {
|
||||
identity.update_user_profile(&agent_id, &update.updated);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Get evolution history
|
||||
#[tauri::command]
|
||||
pub async fn persona_evolution_history(
|
||||
limit: Option<usize>,
|
||||
state: tauri::State<'_, PersonaEvolverStateHandle>,
|
||||
) -> Result<Vec<EvolutionResult>, String> {
|
||||
let evolver = state.lock().await;
|
||||
Ok(evolver.get_history(limit.unwrap_or(10)).into_iter().cloned().collect())
|
||||
}
|
||||
|
||||
/// Get evolver state
|
||||
#[tauri::command]
|
||||
pub async fn persona_evolver_state(
|
||||
state: tauri::State<'_, PersonaEvolverStateHandle>,
|
||||
) -> Result<PersonaEvolverState, String> {
|
||||
let evolver = state.lock().await;
|
||||
Ok(evolver.get_state().clone())
|
||||
}
|
||||
|
||||
/// Get evolver config
|
||||
#[tauri::command]
|
||||
pub async fn persona_evolver_config(
|
||||
state: tauri::State<'_, PersonaEvolverStateHandle>,
|
||||
) -> Result<PersonaEvolverConfig, String> {
|
||||
let evolver = state.lock().await;
|
||||
Ok(evolver.get_config().clone())
|
||||
}
|
||||
|
||||
/// Update evolver config
|
||||
#[tauri::command]
|
||||
pub async fn persona_evolver_update_config(
|
||||
config: PersonaEvolverConfig,
|
||||
state: tauri::State<'_, PersonaEvolverStateHandle>,
|
||||
) -> Result<(), String> {
|
||||
let mut evolver = state.lock().await;
|
||||
evolver.update_config(config);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Apply evolution proposal (approve)
|
||||
#[tauri::command]
|
||||
pub async fn persona_apply_proposal(
|
||||
proposal: EvolutionProposal,
|
||||
identity_state: tauri::State<'_, super::identity::IdentityManagerState>,
|
||||
evolver_state: tauri::State<'_, PersonaEvolverStateHandle>,
|
||||
) -> Result<IdentityFiles, String> {
|
||||
// Apply the proposal through identity manager
|
||||
let mut identity = identity_state.lock().await;
|
||||
|
||||
let result = match proposal.target_file {
|
||||
IdentityFile::Soul => {
|
||||
identity.update_file(&proposal.agent_id, "soul", &proposal.proposed_content)
|
||||
}
|
||||
IdentityFile::Instructions => {
|
||||
identity.update_file(&proposal.agent_id, "instructions", &proposal.proposed_content)
|
||||
}
|
||||
};
|
||||
|
||||
if result.is_err() {
|
||||
return result.map(|_| IdentityFiles {
|
||||
soul: String::new(),
|
||||
instructions: String::new(),
|
||||
user_profile: String::new(),
|
||||
heartbeat: None,
|
||||
});
|
||||
}
|
||||
|
||||
// Update evolver state
|
||||
let mut evolver = evolver_state.lock().await;
|
||||
evolver.proposal_handled();
|
||||
|
||||
// Return updated identity
|
||||
Ok(identity.get_identity(&proposal.agent_id))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_evolve_empty() {
|
||||
let mut evolver = PersonaEvolver::new(None);
|
||||
let memories = vec![];
|
||||
let reflection = ReflectionResult {
|
||||
patterns: vec![],
|
||||
improvements: vec![],
|
||||
identity_proposals: vec![],
|
||||
new_memories: 0,
|
||||
timestamp: Utc::now().to_rfc3339(),
|
||||
};
|
||||
let identity = IdentityFiles {
|
||||
soul: "Test soul".to_string(),
|
||||
instructions: "Test instructions".to_string(),
|
||||
user_profile: "Test profile".to_string(),
|
||||
heartbeat: None,
|
||||
};
|
||||
|
||||
let result = evolver.evolve("test-agent", &memories, &reflection, &identity);
|
||||
assert!(!result.evolved);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_profile_update() {
|
||||
let mut evolver = PersonaEvolver::new(None);
|
||||
let memories = vec![
|
||||
MemoryEntryForAnalysis {
|
||||
memory_type: "preference".to_string(),
|
||||
content: "喜欢简洁的回复".to_string(),
|
||||
importance: 7,
|
||||
access_count: 3,
|
||||
tags: vec!["沟通".to_string()],
|
||||
},
|
||||
MemoryEntryForAnalysis {
|
||||
memory_type: "preference".to_string(),
|
||||
content: "使用中文".to_string(),
|
||||
importance: 8,
|
||||
access_count: 5,
|
||||
tags: vec!["语言".to_string()],
|
||||
},
|
||||
MemoryEntryForAnalysis {
|
||||
memory_type: "preference".to_string(),
|
||||
content: "代码使用 TypeScript".to_string(),
|
||||
importance: 7,
|
||||
access_count: 2,
|
||||
tags: vec!["技术".to_string()],
|
||||
},
|
||||
];
|
||||
|
||||
let identity = IdentityFiles {
|
||||
soul: "Test".to_string(),
|
||||
instructions: "Test".to_string(),
|
||||
user_profile: "尚未收集到用户偏好信息".to_string(),
|
||||
heartbeat: None,
|
||||
};
|
||||
|
||||
let updates = evolver.extract_profile_updates(&memories, &identity);
|
||||
assert!(!updates.is_empty());
|
||||
assert!(updates[0].updated.contains("用户画像"));
|
||||
}
|
||||
}
|
||||
519
desktop/src-tauri/src/intelligence/recommender.rs
Normal file
519
desktop/src-tauri/src/intelligence/recommender.rs
Normal file
@@ -0,0 +1,519 @@
|
||||
//! Workflow Recommender - Generates workflow recommendations from detected patterns
|
||||
//!
|
||||
//! This module analyzes behavior patterns and generates actionable workflow recommendations.
|
||||
//! It maps detected patterns to pipelines and provides confidence scoring.
|
||||
//!
|
||||
//! NOTE: Some methods are reserved for future integration with the UI.
|
||||
|
||||
#![allow(dead_code)] // Methods reserved for future UI integration
|
||||
|
||||
use chrono::Utc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::mesh::WorkflowRecommendation;
|
||||
use super::pattern_detector::{BehaviorPattern, PatternType};
|
||||
|
||||
// === Types ===
|
||||
|
||||
/// Recommendation rule that maps patterns to pipelines
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RecommendationRule {
|
||||
/// Rule identifier
|
||||
pub id: String,
|
||||
/// Pattern types this rule matches
|
||||
pub pattern_types: Vec<String>,
|
||||
/// Pipeline to recommend
|
||||
pub pipeline_id: String,
|
||||
/// Base confidence for this rule
|
||||
pub base_confidence: f32,
|
||||
/// Human-readable description
|
||||
pub description: String,
|
||||
/// Input mappings (pattern context field -> pipeline input)
|
||||
pub input_mappings: HashMap<String, String>,
|
||||
/// Priority (higher = more important)
|
||||
pub priority: u8,
|
||||
}
|
||||
|
||||
/// Recommender configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RecommenderConfig {
|
||||
/// Minimum confidence threshold
|
||||
pub min_confidence: f32,
|
||||
/// Maximum recommendations to generate
|
||||
pub max_recommendations: usize,
|
||||
/// Enable rule-based recommendations
|
||||
pub enable_rules: bool,
|
||||
/// Enable pattern-based recommendations
|
||||
pub enable_patterns: bool,
|
||||
}
|
||||
|
||||
impl Default for RecommenderConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
min_confidence: 0.5,
|
||||
max_recommendations: 10,
|
||||
enable_rules: true,
|
||||
enable_patterns: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// === Workflow Recommender ===
|
||||
|
||||
/// Workflow recommendation engine
|
||||
pub struct WorkflowRecommender {
|
||||
/// Configuration
|
||||
config: RecommenderConfig,
|
||||
/// Recommendation rules
|
||||
rules: Vec<RecommendationRule>,
|
||||
/// Pipeline registry (pipeline_id -> metadata)
|
||||
#[allow(dead_code)] // Reserved for future pipeline-based recommendations
|
||||
pipeline_registry: HashMap<String, PipelineMetadata>,
|
||||
/// Generated recommendations cache
|
||||
recommendations_cache: Vec<WorkflowRecommendation>,
|
||||
}
|
||||
|
||||
/// Metadata about a registered pipeline
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PipelineMetadata {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub tags: Vec<String>,
|
||||
pub input_schema: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
impl WorkflowRecommender {
|
||||
/// Create a new workflow recommender
|
||||
pub fn new(config: Option<RecommenderConfig>) -> Self {
|
||||
let mut recommender = Self {
|
||||
config: config.unwrap_or_default(),
|
||||
rules: Vec::new(),
|
||||
pipeline_registry: HashMap::new(),
|
||||
recommendations_cache: Vec::new(),
|
||||
};
|
||||
|
||||
// Initialize with built-in rules
|
||||
recommender.initialize_default_rules();
|
||||
recommender
|
||||
}
|
||||
|
||||
/// Initialize default recommendation rules
|
||||
fn initialize_default_rules(&mut self) {
|
||||
// Rule: Research + Analysis -> Report Generation
|
||||
self.rules.push(RecommendationRule {
|
||||
id: "rule_research_report".to_string(),
|
||||
pattern_types: vec!["SkillCombination".to_string()],
|
||||
pipeline_id: "research-report-generator".to_string(),
|
||||
base_confidence: 0.7,
|
||||
description: "Generate comprehensive research report".to_string(),
|
||||
input_mappings: HashMap::new(),
|
||||
priority: 8,
|
||||
});
|
||||
|
||||
// Rule: Code + Test -> Quality Check Pipeline
|
||||
self.rules.push(RecommendationRule {
|
||||
id: "rule_code_quality".to_string(),
|
||||
pattern_types: vec!["SkillCombination".to_string()],
|
||||
pipeline_id: "code-quality-check".to_string(),
|
||||
base_confidence: 0.75,
|
||||
description: "Run code quality and test pipeline".to_string(),
|
||||
input_mappings: HashMap::new(),
|
||||
priority: 7,
|
||||
});
|
||||
|
||||
// Rule: Daily morning -> Daily briefing
|
||||
self.rules.push(RecommendationRule {
|
||||
id: "rule_morning_briefing".to_string(),
|
||||
pattern_types: vec!["TemporalTrigger".to_string()],
|
||||
pipeline_id: "daily-briefing".to_string(),
|
||||
base_confidence: 0.6,
|
||||
description: "Generate daily briefing".to_string(),
|
||||
input_mappings: HashMap::new(),
|
||||
priority: 5,
|
||||
});
|
||||
|
||||
// Rule: Task + Deadline -> Priority sort
|
||||
self.rules.push(RecommendationRule {
|
||||
id: "rule_task_priority".to_string(),
|
||||
pattern_types: vec!["InputPattern".to_string()],
|
||||
pipeline_id: "task-priority-sorter".to_string(),
|
||||
base_confidence: 0.65,
|
||||
description: "Sort and prioritize tasks".to_string(),
|
||||
input_mappings: HashMap::new(),
|
||||
priority: 6,
|
||||
});
|
||||
}
|
||||
|
||||
/// Generate recommendations from detected patterns
|
||||
pub fn recommend(&self, patterns: &[&BehaviorPattern]) -> Vec<WorkflowRecommendation> {
|
||||
let mut recommendations = Vec::new();
|
||||
|
||||
if patterns.is_empty() {
|
||||
return recommendations;
|
||||
}
|
||||
|
||||
// Rule-based recommendations
|
||||
if self.config.enable_rules {
|
||||
for rule in &self.rules {
|
||||
if let Some(rec) = self.apply_rule(rule, patterns) {
|
||||
if rec.confidence >= self.config.min_confidence {
|
||||
recommendations.push(rec);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Pattern-based recommendations (direct mapping)
|
||||
if self.config.enable_patterns {
|
||||
for pattern in patterns {
|
||||
if let Some(rec) = self.pattern_to_recommendation(pattern) {
|
||||
if rec.confidence >= self.config.min_confidence {
|
||||
recommendations.push(rec);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by confidence (descending) and priority
|
||||
recommendations.sort_by(|a, b| {
|
||||
let priority_diff = self.get_priority_for_recommendation(b)
|
||||
.cmp(&self.get_priority_for_recommendation(a));
|
||||
if priority_diff != std::cmp::Ordering::Equal {
|
||||
return priority_diff;
|
||||
}
|
||||
b.confidence.partial_cmp(&a.confidence).unwrap()
|
||||
});
|
||||
|
||||
// Limit recommendations
|
||||
recommendations.truncate(self.config.max_recommendations);
|
||||
|
||||
recommendations
|
||||
}
|
||||
|
||||
/// Apply a recommendation rule to patterns
|
||||
fn apply_rule(
|
||||
&self,
|
||||
rule: &RecommendationRule,
|
||||
patterns: &[&BehaviorPattern],
|
||||
) -> Option<WorkflowRecommendation> {
|
||||
let mut matched_patterns: Vec<String> = Vec::new();
|
||||
let mut total_confidence = 0.0;
|
||||
let mut match_count = 0;
|
||||
|
||||
for pattern in patterns {
|
||||
let pattern_type_name = self.get_pattern_type_name(&pattern.pattern_type);
|
||||
|
||||
if rule.pattern_types.contains(&pattern_type_name) {
|
||||
matched_patterns.push(pattern.id.clone());
|
||||
total_confidence += pattern.confidence;
|
||||
match_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if matched_patterns.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Calculate combined confidence
|
||||
let avg_pattern_confidence = total_confidence / match_count as f32;
|
||||
let final_confidence = (rule.base_confidence * 0.6 + avg_pattern_confidence * 0.4).min(1.0);
|
||||
|
||||
// Build suggested inputs from pattern context
|
||||
let suggested_inputs = self.build_suggested_inputs(&matched_patterns, patterns, rule);
|
||||
|
||||
Some(WorkflowRecommendation {
|
||||
id: format!("rec_{}", Uuid::new_v4()),
|
||||
pipeline_id: rule.pipeline_id.clone(),
|
||||
confidence: final_confidence,
|
||||
reason: rule.description.clone(),
|
||||
suggested_inputs,
|
||||
patterns_matched: matched_patterns,
|
||||
timestamp: Utc::now(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Convert a single pattern to a recommendation
|
||||
fn pattern_to_recommendation(&self, pattern: &BehaviorPattern) -> Option<WorkflowRecommendation> {
|
||||
let (pipeline_id, reason) = match &pattern.pattern_type {
|
||||
PatternType::TaskPipelineMapping { task_type, pipeline_id } => {
|
||||
(pipeline_id.clone(), format!("Detected task type: {}", task_type))
|
||||
}
|
||||
PatternType::SkillCombination { skill_ids } => {
|
||||
// Find a pipeline that uses these skills
|
||||
let pipeline_id = self.find_pipeline_for_skills(skill_ids)?;
|
||||
(pipeline_id, format!("Skills often used together: {}", skill_ids.join(", ")))
|
||||
}
|
||||
PatternType::InputPattern { keywords, intent } => {
|
||||
// Find a pipeline for this intent
|
||||
let pipeline_id = self.find_pipeline_for_intent(intent)?;
|
||||
(pipeline_id, format!("Intent detected: {} ({})", intent, keywords.join(", ")))
|
||||
}
|
||||
PatternType::TemporalTrigger { hand_id, time_pattern } => {
|
||||
(format!("scheduled_{}", hand_id), format!("Scheduled at: {}", time_pattern))
|
||||
}
|
||||
};
|
||||
|
||||
Some(WorkflowRecommendation {
|
||||
id: format!("rec_{}", Uuid::new_v4()),
|
||||
pipeline_id,
|
||||
confidence: pattern.confidence,
|
||||
reason,
|
||||
suggested_inputs: HashMap::new(),
|
||||
patterns_matched: vec![pattern.id.clone()],
|
||||
timestamp: Utc::now(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get string name for pattern type
|
||||
fn get_pattern_type_name(&self, pattern_type: &PatternType) -> String {
|
||||
match pattern_type {
|
||||
PatternType::SkillCombination { .. } => "SkillCombination".to_string(),
|
||||
PatternType::TemporalTrigger { .. } => "TemporalTrigger".to_string(),
|
||||
PatternType::TaskPipelineMapping { .. } => "TaskPipelineMapping".to_string(),
|
||||
PatternType::InputPattern { .. } => "InputPattern".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get priority for a recommendation
|
||||
fn get_priority_for_recommendation(&self, rec: &WorkflowRecommendation) -> u8 {
|
||||
self.rules
|
||||
.iter()
|
||||
.find(|r| r.pipeline_id == rec.pipeline_id)
|
||||
.map(|r| r.priority)
|
||||
.unwrap_or(5)
|
||||
}
|
||||
|
||||
/// Build suggested inputs from patterns and rule
|
||||
fn build_suggested_inputs(
|
||||
&self,
|
||||
matched_pattern_ids: &[String],
|
||||
patterns: &[&BehaviorPattern],
|
||||
rule: &RecommendationRule,
|
||||
) -> HashMap<String, serde_json::Value> {
|
||||
let mut inputs = HashMap::new();
|
||||
|
||||
for pattern_id in matched_pattern_ids {
|
||||
if let Some(pattern) = patterns.iter().find(|p| p.id == *pattern_id) {
|
||||
// Add context-based inputs
|
||||
if let Some(ref topics) = pattern.context.recent_topics {
|
||||
if !topics.is_empty() {
|
||||
inputs.insert(
|
||||
"topics".to_string(),
|
||||
serde_json::Value::Array(
|
||||
topics.iter().map(|t| serde_json::Value::String(t.clone())).collect()
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref intent) = pattern.context.intent {
|
||||
inputs.insert("intent".to_string(), serde_json::Value::String(intent.clone()));
|
||||
}
|
||||
|
||||
// Add pattern-specific inputs
|
||||
match &pattern.pattern_type {
|
||||
PatternType::InputPattern { keywords, .. } => {
|
||||
inputs.insert(
|
||||
"keywords".to_string(),
|
||||
serde_json::Value::Array(
|
||||
keywords.iter().map(|k| serde_json::Value::String(k.clone())).collect()
|
||||
),
|
||||
);
|
||||
}
|
||||
PatternType::SkillCombination { skill_ids } => {
|
||||
inputs.insert(
|
||||
"skills".to_string(),
|
||||
serde_json::Value::Array(
|
||||
skill_ids.iter().map(|s| serde_json::Value::String(s.clone())).collect()
|
||||
),
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply rule mappings
|
||||
for (source, target) in &rule.input_mappings {
|
||||
if let Some(value) = inputs.get(source) {
|
||||
inputs.insert(target.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
inputs
|
||||
}
|
||||
|
||||
/// Find a pipeline that uses the given skills
|
||||
fn find_pipeline_for_skills(&self, skill_ids: &[String]) -> Option<String> {
|
||||
// In production, this would query the pipeline registry
|
||||
// For now, return a default
|
||||
if skill_ids.len() >= 2 {
|
||||
Some("skill-orchestration-pipeline".to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Find a pipeline for an intent
|
||||
fn find_pipeline_for_intent(&self, intent: &str) -> Option<String> {
|
||||
// Map common intents to pipelines
|
||||
match intent {
|
||||
"research" => Some("research-pipeline".to_string()),
|
||||
"analysis" => Some("analysis-pipeline".to_string()),
|
||||
"report" => Some("report-generation".to_string()),
|
||||
"code" => Some("code-generation".to_string()),
|
||||
"task" | "tasks" => Some("task-management".to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Register a pipeline
|
||||
pub fn register_pipeline(&mut self, metadata: PipelineMetadata) {
|
||||
self.pipeline_registry.insert(metadata.id.clone(), metadata);
|
||||
}
|
||||
|
||||
/// Unregister a pipeline
|
||||
pub fn unregister_pipeline(&mut self, pipeline_id: &str) {
|
||||
self.pipeline_registry.remove(pipeline_id);
|
||||
}
|
||||
|
||||
/// Add a custom recommendation rule
|
||||
pub fn add_rule(&mut self, rule: RecommendationRule) {
|
||||
self.rules.push(rule);
|
||||
// Sort by priority
|
||||
self.rules.sort_by(|a, b| b.priority.cmp(&a.priority));
|
||||
}
|
||||
|
||||
/// Remove a rule
|
||||
pub fn remove_rule(&mut self, rule_id: &str) {
|
||||
self.rules.retain(|r| r.id != rule_id);
|
||||
}
|
||||
|
||||
/// Get all rules
|
||||
pub fn get_rules(&self) -> &[RecommendationRule] {
|
||||
&self.rules
|
||||
}
|
||||
|
||||
/// Update configuration
|
||||
pub fn update_config(&mut self, config: RecommenderConfig) {
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
/// Get configuration
|
||||
pub fn get_config(&self) -> &RecommenderConfig {
|
||||
&self.config
|
||||
}
|
||||
|
||||
/// Get recommendation count
|
||||
pub fn recommendation_count(&self) -> usize {
|
||||
self.recommendations_cache.len()
|
||||
}
|
||||
|
||||
/// Clear recommendation cache
|
||||
pub fn clear_cache(&mut self) {
|
||||
self.recommendations_cache.clear();
|
||||
}
|
||||
|
||||
/// Accept a recommendation (remove from cache and return it)
|
||||
/// Returns the accepted recommendation if found
|
||||
pub fn accept_recommendation(&mut self, recommendation_id: &str) -> Option<WorkflowRecommendation> {
|
||||
if let Some(pos) = self.recommendations_cache.iter().position(|r| r.id == recommendation_id) {
|
||||
Some(self.recommendations_cache.remove(pos))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Dismiss a recommendation (remove from cache without acting on it)
|
||||
/// Returns true if the recommendation was found and dismissed
|
||||
pub fn dismiss_recommendation(&mut self, recommendation_id: &str) -> bool {
|
||||
if let Some(pos) = self.recommendations_cache.iter().position(|r| r.id == recommendation_id) {
|
||||
self.recommendations_cache.remove(pos);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a recommendation by ID
|
||||
pub fn get_recommendation(&self, recommendation_id: &str) -> Option<&WorkflowRecommendation> {
|
||||
self.recommendations_cache.iter().find(|r| r.id == recommendation_id)
|
||||
}
|
||||
|
||||
/// Load recommendations from file
|
||||
pub fn load_from_file(&mut self, path: &str) -> Result<(), String> {
|
||||
let content = std::fs::read_to_string(path)
|
||||
.map_err(|e| format!("Failed to read file: {}", e))?;
|
||||
|
||||
let recommendations: Vec<WorkflowRecommendation> = serde_json::from_str(&content)
|
||||
.map_err(|e| format!("Failed to parse recommendations: {}", e))?;
|
||||
|
||||
self.recommendations_cache = recommendations;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Save recommendations to file
|
||||
pub fn save_to_file(&self, path: &str) -> Result<(), String> {
|
||||
let content = serde_json::to_string_pretty(&self.recommendations_cache)
|
||||
.map_err(|e| format!("Failed to serialize recommendations: {}", e))?;
|
||||
|
||||
std::fs::write(path, content)
|
||||
.map_err(|e| format!("Failed to write file: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_recommender_creation() {
|
||||
let recommender = WorkflowRecommender::new(None);
|
||||
assert!(!recommender.get_rules().is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recommend_from_empty_patterns() {
|
||||
let recommender = WorkflowRecommender::new(None);
|
||||
let recommendations = recommender.recommend(&[]);
|
||||
assert!(recommendations.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rule_priority() {
|
||||
let mut recommender = WorkflowRecommender::new(None);
|
||||
|
||||
recommender.add_rule(RecommendationRule {
|
||||
id: "high_priority".to_string(),
|
||||
pattern_types: vec!["SkillCombination".to_string()],
|
||||
pipeline_id: "important-pipeline".to_string(),
|
||||
base_confidence: 0.9,
|
||||
description: "High priority rule".to_string(),
|
||||
input_mappings: HashMap::new(),
|
||||
priority: 10,
|
||||
});
|
||||
|
||||
let rules = recommender.get_rules();
|
||||
assert!(rules.iter().any(|r| r.priority == 10));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_register_pipeline() {
|
||||
let mut recommender = WorkflowRecommender::new(None);
|
||||
|
||||
recommender.register_pipeline(PipelineMetadata {
|
||||
id: "test-pipeline".to_string(),
|
||||
name: "Test Pipeline".to_string(),
|
||||
description: Some("A test pipeline".to_string()),
|
||||
tags: vec!["test".to_string()],
|
||||
input_schema: None,
|
||||
});
|
||||
|
||||
assert!(recommender.pipeline_registry.contains_key("test-pipeline"));
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,10 @@
|
||||
//!
|
||||
//! Phase 3 of Intelligence Layer Migration.
|
||||
//! Reference: ZCLAW_AGENT_INTELLIGENCE_EVOLUTION.md §6.4.2
|
||||
//!
|
||||
//! NOTE: Some methods are reserved for future self-improvement features.
|
||||
|
||||
#![allow(dead_code)] // Methods reserved for future self-improvement features
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
845
desktop/src-tauri/src/intelligence/trigger_evaluator.rs
Normal file
845
desktop/src-tauri/src/intelligence/trigger_evaluator.rs
Normal file
@@ -0,0 +1,845 @@
|
||||
//! Trigger Evaluator - Evaluates context-aware triggers for Hands
|
||||
//!
|
||||
//! This module extends the basic trigger system with semantic matching:
|
||||
//! Supports MemoryQuery, ContextCondition, and IdentityState triggers.
|
||||
//!
|
||||
//! NOTE: This module is not yet integrated into the main application.
|
||||
//! Components are still being developed and will be connected in a future release.
|
||||
|
||||
#![allow(dead_code)] // Module not yet integrated - components under development
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::pin::Pin;
|
||||
use tokio::sync::Mutex;
|
||||
use chrono::{DateTime, Utc, Timelike, Datelike};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value as JsonValue;
|
||||
use zclaw_memory::MemoryStore;
|
||||
|
||||
// === ReDoS Protection Constants ===
|
||||
|
||||
/// Maximum allowed length for regex patterns (prevents memory exhaustion)
|
||||
const MAX_REGEX_PATTERN_LENGTH: usize = 500;
|
||||
|
||||
/// Maximum allowed nesting depth for regex quantifiers/groups
|
||||
const MAX_REGEX_NESTING_DEPTH: usize = 10;
|
||||
|
||||
/// Error type for regex validation failures
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum RegexValidationError {
|
||||
/// Pattern exceeds maximum length
|
||||
TooLong { length: usize, max: usize },
|
||||
/// Pattern has excessive nesting depth
|
||||
TooDeeplyNested { depth: usize, max: usize },
|
||||
/// Pattern contains dangerous ReDoS-prone constructs
|
||||
DangerousPattern(String),
|
||||
/// Invalid regex syntax
|
||||
InvalidSyntax(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for RegexValidationError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
RegexValidationError::TooLong { length, max } => {
|
||||
write!(f, "Regex pattern too long: {} bytes (max: {})", length, max)
|
||||
}
|
||||
RegexValidationError::TooDeeplyNested { depth, max } => {
|
||||
write!(f, "Regex pattern too deeply nested: {} levels (max: {})", depth, max)
|
||||
}
|
||||
RegexValidationError::DangerousPattern(reason) => {
|
||||
write!(f, "Dangerous regex pattern detected: {}", reason)
|
||||
}
|
||||
RegexValidationError::InvalidSyntax(err) => {
|
||||
write!(f, "Invalid regex syntax: {}", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for RegexValidationError {}
|
||||
|
||||
/// Validate a regex pattern for ReDoS safety
|
||||
///
|
||||
/// This function checks for:
|
||||
/// 1. Pattern length (prevents memory exhaustion)
|
||||
/// 2. Nesting depth (prevents exponential backtracking)
|
||||
/// 3. Dangerous patterns (nested quantifiers on overlapping character classes)
|
||||
fn validate_regex_pattern(pattern: &str) -> Result<(), RegexValidationError> {
|
||||
// Check length
|
||||
if pattern.len() > MAX_REGEX_PATTERN_LENGTH {
|
||||
return Err(RegexValidationError::TooLong {
|
||||
length: pattern.len(),
|
||||
max: MAX_REGEX_PATTERN_LENGTH,
|
||||
});
|
||||
}
|
||||
|
||||
// Check nesting depth by counting unescaped parentheses and brackets
|
||||
let nesting_depth = calculate_nesting_depth(pattern);
|
||||
if nesting_depth > MAX_REGEX_NESTING_DEPTH {
|
||||
return Err(RegexValidationError::TooDeeplyNested {
|
||||
depth: nesting_depth,
|
||||
max: MAX_REGEX_NESTING_DEPTH,
|
||||
});
|
||||
}
|
||||
|
||||
// Check for dangerous ReDoS patterns:
|
||||
// - Nested quantifiers on overlapping patterns like (a+)+
|
||||
// - Alternation with overlapping patterns like (a|a)+
|
||||
if contains_dangerous_redos_pattern(pattern) {
|
||||
return Err(RegexValidationError::DangerousPattern(
|
||||
"Pattern contains nested quantifiers on overlapping character classes".to_string()
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Calculate the maximum nesting depth of groups in a regex pattern
|
||||
fn calculate_nesting_depth(pattern: &str) -> usize {
|
||||
let chars: Vec<char> = pattern.chars().collect();
|
||||
let mut max_depth = 0;
|
||||
let mut current_depth = 0;
|
||||
let mut i = 0;
|
||||
|
||||
while i < chars.len() {
|
||||
let c = chars[i];
|
||||
|
||||
// Check for escape sequence
|
||||
if c == '\\' && i + 1 < chars.len() {
|
||||
// Skip the escaped character
|
||||
i += 2;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle character classes [...]
|
||||
if c == '[' {
|
||||
current_depth += 1;
|
||||
max_depth = max_depth.max(current_depth);
|
||||
// Find matching ]
|
||||
i += 1;
|
||||
while i < chars.len() {
|
||||
if chars[i] == '\\' && i + 1 < chars.len() {
|
||||
i += 2;
|
||||
continue;
|
||||
}
|
||||
if chars[i] == ']' {
|
||||
current_depth -= 1;
|
||||
break;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
// Handle groups (...)
|
||||
else if c == '(' {
|
||||
// Skip non-capturing groups and lookaheads for simplicity
|
||||
// (?:...), (?=...), (?!...), (?<=...), (?<!...), (?P<name>...)
|
||||
current_depth += 1;
|
||||
max_depth = max_depth.max(current_depth);
|
||||
} else if c == ')' {
|
||||
if current_depth > 0 {
|
||||
current_depth -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
i += 1;
|
||||
}
|
||||
|
||||
max_depth
|
||||
}
|
||||
|
||||
/// Check for dangerous ReDoS patterns
|
||||
///
|
||||
/// Detects patterns like:
|
||||
/// - (a+)+ - nested quantifiers
|
||||
/// - (a*)+ - nested quantifiers
|
||||
/// - (a+)* - nested quantifiers
|
||||
/// - (.*)* - nested quantifiers on wildcard
|
||||
fn contains_dangerous_redos_pattern(pattern: &str) -> bool {
|
||||
let chars: Vec<char> = pattern.chars().collect();
|
||||
let mut i = 0;
|
||||
|
||||
while i < chars.len() {
|
||||
// Look for quantified patterns followed by another quantifier
|
||||
if i > 0 {
|
||||
let prev = chars[i - 1];
|
||||
|
||||
// Check if current char is a quantifier
|
||||
if matches!(chars[i], '+' | '*' | '?') {
|
||||
// Look back to see what's being quantified
|
||||
if prev == ')' {
|
||||
// Find the matching opening paren
|
||||
let mut depth = 1;
|
||||
let mut j = i - 2;
|
||||
while j > 0 && depth > 0 {
|
||||
if chars[j] == ')' {
|
||||
depth += 1;
|
||||
} else if chars[j] == '(' {
|
||||
depth -= 1;
|
||||
} else if chars[j] == '\\' && j > 0 {
|
||||
j -= 1; // Skip escaped char
|
||||
}
|
||||
j -= 1;
|
||||
}
|
||||
|
||||
// Check if the group content ends with a quantifier
|
||||
// This would indicate nested quantification
|
||||
// Note: j is usize, so we don't check >= 0 (always true)
|
||||
// The loop above ensures j is valid if depth reached 0
|
||||
let mut k = i - 2;
|
||||
while k > j + 1 {
|
||||
if chars[k] == '\\' && k > 0 {
|
||||
k -= 1;
|
||||
} else if matches!(chars[k], '+' | '*' | '?') {
|
||||
// Found nested quantifier
|
||||
return true;
|
||||
} else if chars[k] == ')' {
|
||||
// Skip nested groups
|
||||
let mut nested_depth = 1;
|
||||
k -= 1;
|
||||
while k > j + 1 && nested_depth > 0 {
|
||||
if chars[k] == ')' {
|
||||
nested_depth += 1;
|
||||
} else if chars[k] == '(' {
|
||||
nested_depth -= 1;
|
||||
} else if chars[k] == '\\' && k > 0 {
|
||||
k -= 1;
|
||||
}
|
||||
k -= 1;
|
||||
}
|
||||
}
|
||||
k -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Safely compile a regex pattern with ReDoS protection
|
||||
///
|
||||
/// This function validates the pattern for safety before compilation.
|
||||
/// Returns a compiled regex or an error describing why validation failed.
|
||||
pub fn compile_safe_regex(pattern: &str) -> Result<regex::Regex, RegexValidationError> {
|
||||
validate_regex_pattern(pattern)?;
|
||||
|
||||
regex::Regex::new(pattern).map_err(|e| RegexValidationError::InvalidSyntax(e.to_string()))
|
||||
}
|
||||
|
||||
// === Extended Trigger Types ===
|
||||
|
||||
/// Memory query trigger configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MemoryQueryConfig {
|
||||
/// Memory type to filter (e.g., "task", "preference")
|
||||
pub memory_type: Option<String>,
|
||||
/// Content pattern to match (regex or substring)
|
||||
pub content_pattern: String,
|
||||
/// Minimum count of matching memories
|
||||
pub min_count: usize,
|
||||
/// Minimum importance threshold
|
||||
pub min_importance: Option<i32>,
|
||||
/// Time window for memories (hours)
|
||||
pub time_window_hours: Option<u64>,
|
||||
}
|
||||
|
||||
/// Context condition configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ContextConditionConfig {
|
||||
/// Conditions to check
|
||||
pub conditions: Vec<ContextConditionClause>,
|
||||
/// How to combine conditions (All, Any, None)
|
||||
pub combination: ConditionCombination,
|
||||
}
|
||||
|
||||
/// Single context condition clause
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ContextConditionClause {
|
||||
/// Field to check
|
||||
pub field: ContextField,
|
||||
/// Comparison operator
|
||||
pub operator: ComparisonOperator,
|
||||
/// Value to compare against
|
||||
pub value: JsonValue,
|
||||
}
|
||||
|
||||
/// Context fields that can be checked
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum ContextField {
|
||||
/// Current hour of day (0-23)
|
||||
TimeOfDay,
|
||||
/// Day of week (0=Monday, 6=Sunday)
|
||||
DayOfWeek,
|
||||
/// Currently active project (if any)
|
||||
ActiveProject,
|
||||
/// Topics discussed recently
|
||||
RecentTopic,
|
||||
/// Number of pending tasks
|
||||
PendingTasks,
|
||||
/// Count of memories in storage
|
||||
MemoryCount,
|
||||
/// Hours since last interaction
|
||||
LastInteractionHours,
|
||||
/// Current conversation intent
|
||||
ConversationIntent,
|
||||
}
|
||||
|
||||
/// Comparison operators for context conditions
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum ComparisonOperator {
|
||||
Equals,
|
||||
NotEquals,
|
||||
Contains,
|
||||
GreaterThan,
|
||||
LessThan,
|
||||
Exists,
|
||||
NotExists,
|
||||
Matches, // regex match
|
||||
}
|
||||
|
||||
/// How to combine multiple conditions
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum ConditionCombination {
|
||||
/// All conditions must true
|
||||
All,
|
||||
/// Any one condition being true is enough
|
||||
Any,
|
||||
/// None of the conditions should be true
|
||||
None,
|
||||
}
|
||||
|
||||
/// Identity state trigger configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct IdentityStateConfig {
|
||||
/// Identity file to check
|
||||
pub file: IdentityFile,
|
||||
/// Content pattern to match (regex)
|
||||
pub content_pattern: Option<String>,
|
||||
/// Trigger on any change to the file
|
||||
pub any_change: bool,
|
||||
}
|
||||
|
||||
/// Identity files that can be monitored
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum IdentityFile {
|
||||
Soul,
|
||||
Instructions,
|
||||
User,
|
||||
}
|
||||
|
||||
/// Composite trigger configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CompositeTriggerConfig {
|
||||
/// Sub-triggers to combine
|
||||
pub triggers: Vec<ExtendedTriggerType>,
|
||||
/// How to combine results
|
||||
pub combination: ConditionCombination,
|
||||
}
|
||||
|
||||
/// Extended trigger type that includes semantic triggers
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ExtendedTriggerType {
|
||||
/// Standard interval trigger
|
||||
Interval {
|
||||
/// Interval in seconds
|
||||
seconds: u64,
|
||||
},
|
||||
/// Time-of-day trigger
|
||||
TimeOfDay {
|
||||
/// Hour (0-23)
|
||||
hour: u8,
|
||||
/// Optional minute (0-59)
|
||||
minute: Option<u8>,
|
||||
},
|
||||
/// Memory query trigger
|
||||
MemoryQuery(MemoryQueryConfig),
|
||||
/// Context condition trigger
|
||||
ContextCondition(ContextConditionConfig),
|
||||
/// Identity state trigger
|
||||
IdentityState(IdentityStateConfig),
|
||||
/// Composite trigger
|
||||
Composite(CompositeTriggerConfig),
|
||||
}
|
||||
|
||||
// === Trigger Evaluator ===
|
||||
|
||||
/// Evaluator for context-aware triggers
|
||||
pub struct TriggerEvaluator {
|
||||
/// Memory store for memory queries
|
||||
memory_store: Arc<MemoryStore>,
|
||||
/// Identity manager for identity triggers
|
||||
identity_manager: Arc<Mutex<super::identity::AgentIdentityManager>>,
|
||||
/// Heartbeat engine for context
|
||||
heartbeat_engine: Arc<Mutex<super::heartbeat::HeartbeatEngine>>,
|
||||
/// Cached context data
|
||||
context_cache: Arc<Mutex<TriggerContextCache>>,
|
||||
}
|
||||
|
||||
/// Cached context for trigger evaluation
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct TriggerContextCache {
|
||||
/// Last known active project
|
||||
pub active_project: Option<String>,
|
||||
/// Recent topics discussed
|
||||
pub recent_topics: Vec<String>,
|
||||
/// Last conversation intent
|
||||
pub conversation_intent: Option<String>,
|
||||
/// Last update time
|
||||
pub last_updated: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
impl TriggerEvaluator {
|
||||
/// Create a new trigger evaluator
|
||||
pub fn new(
|
||||
memory_store: Arc<MemoryStore>,
|
||||
identity_manager: Arc<Mutex<super::identity::AgentIdentityManager>>,
|
||||
heartbeat_engine: Arc<Mutex<super::heartbeat::HeartbeatEngine>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
memory_store,
|
||||
identity_manager,
|
||||
heartbeat_engine,
|
||||
context_cache: Arc::new(Mutex::new(TriggerContextCache::default())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Evaluate a trigger
|
||||
pub async fn evaluate(
|
||||
&self,
|
||||
trigger: &ExtendedTriggerType,
|
||||
agent_id: &str,
|
||||
) -> Result<bool, String> {
|
||||
match trigger {
|
||||
ExtendedTriggerType::Interval { .. } => Ok(true),
|
||||
ExtendedTriggerType::TimeOfDay { hour, minute } => {
|
||||
let now = Utc::now();
|
||||
let current_hour = now.hour() as u8;
|
||||
let current_minute = now.minute() as u8;
|
||||
|
||||
if current_hour != *hour {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
if let Some(min) = minute {
|
||||
if current_minute != *min {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
ExtendedTriggerType::MemoryQuery(config) => {
|
||||
self.evaluate_memory_query(config, agent_id).await
|
||||
}
|
||||
ExtendedTriggerType::ContextCondition(config) => {
|
||||
self.evaluate_context_condition(config, agent_id).await
|
||||
}
|
||||
ExtendedTriggerType::IdentityState(config) => {
|
||||
self.evaluate_identity_state(config, agent_id).await
|
||||
}
|
||||
ExtendedTriggerType::Composite(config) => {
|
||||
self.evaluate_composite(config, agent_id, None).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Evaluate memory query trigger
|
||||
async fn evaluate_memory_query(
|
||||
&self,
|
||||
config: &MemoryQueryConfig,
|
||||
_agent_id: &str,
|
||||
) -> Result<bool, String> {
|
||||
// TODO: Implement proper memory search when MemoryStore supports it
|
||||
// For now, use KV store to check if we have enough keys matching pattern
|
||||
// This is a simplified implementation
|
||||
|
||||
// Memory search is not fully implemented in current MemoryStore
|
||||
// Return false to indicate no matches until proper search is available
|
||||
tracing::warn!(
|
||||
pattern = %config.content_pattern,
|
||||
min_count = config.min_count,
|
||||
"Memory query trigger evaluation not fully implemented"
|
||||
);
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Evaluate context condition trigger
|
||||
async fn evaluate_context_condition(
|
||||
&self,
|
||||
config: &ContextConditionConfig,
|
||||
agent_id: &str,
|
||||
) -> Result<bool, String> {
|
||||
let context = self.get_cached_context(agent_id).await;
|
||||
|
||||
let mut results = Vec::new();
|
||||
|
||||
for condition in &config.conditions {
|
||||
let result = self.evaluate_condition_clause(condition, &context);
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
// Combine results based on combination mode
|
||||
let final_result = match config.combination {
|
||||
ConditionCombination::All => results.iter().all(|r| *r),
|
||||
ConditionCombination::Any => results.iter().any(|r| *r),
|
||||
ConditionCombination::None => results.iter().all(|r| !*r),
|
||||
};
|
||||
|
||||
Ok(final_result)
|
||||
}
|
||||
|
||||
/// Evaluate a single condition clause
|
||||
fn evaluate_condition_clause(
|
||||
&self,
|
||||
clause: &ContextConditionClause,
|
||||
context: &TriggerContextCache,
|
||||
) -> bool {
|
||||
match clause.field {
|
||||
ContextField::TimeOfDay => {
|
||||
let now = Utc::now();
|
||||
let current_hour = now.hour() as i32;
|
||||
self.compare_values(current_hour, &clause.operator, &clause.value)
|
||||
}
|
||||
ContextField::DayOfWeek => {
|
||||
let now = Utc::now();
|
||||
let current_day = now.weekday().num_days_from_monday() as i32;
|
||||
self.compare_values(current_day, &clause.operator, &clause.value)
|
||||
}
|
||||
ContextField::ActiveProject => {
|
||||
if let Some(project) = &context.active_project {
|
||||
self.compare_values(project.clone(), &clause.operator, &clause.value)
|
||||
} else {
|
||||
matches!(clause.operator, ComparisonOperator::NotExists)
|
||||
}
|
||||
}
|
||||
ContextField::RecentTopic => {
|
||||
if let Some(topic) = context.recent_topics.first() {
|
||||
self.compare_values(topic.clone(), &clause.operator, &clause.value)
|
||||
} else {
|
||||
matches!(clause.operator, ComparisonOperator::NotExists)
|
||||
}
|
||||
}
|
||||
ContextField::PendingTasks => {
|
||||
// Would need to query memory store
|
||||
false // Not implemented yet
|
||||
}
|
||||
ContextField::MemoryCount => {
|
||||
// Would need to query memory store
|
||||
false // Not implemented yet
|
||||
}
|
||||
ContextField::LastInteractionHours => {
|
||||
if let Some(last_updated) = context.last_updated {
|
||||
let hours = (Utc::now() - last_updated).num_hours();
|
||||
self.compare_values(hours as i32, &clause.operator, &clause.value)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
ContextField::ConversationIntent => {
|
||||
if let Some(intent) = &context.conversation_intent {
|
||||
self.compare_values(intent.clone(), &clause.operator, &clause.value)
|
||||
} else {
|
||||
matches!(clause.operator, ComparisonOperator::NotExists)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Compare values using operator
|
||||
fn compare_values<T>(&self, actual: T, operator: &ComparisonOperator, expected: &JsonValue) -> bool
|
||||
where
|
||||
T: Into<JsonValue>,
|
||||
{
|
||||
let actual_value = actual.into();
|
||||
|
||||
match operator {
|
||||
ComparisonOperator::Equals => &actual_value == expected,
|
||||
ComparisonOperator::NotEquals => &actual_value != expected,
|
||||
ComparisonOperator::Contains => {
|
||||
if let (Some(actual_str), Some(expected_str)) =
|
||||
(actual_value.as_str(), expected.as_str())
|
||||
{
|
||||
actual_str.contains(expected_str)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
ComparisonOperator::GreaterThan => {
|
||||
if let (Some(actual_num), Some(expected_num)) =
|
||||
(actual_value.as_i64(), expected.as_i64())
|
||||
{
|
||||
actual_num > expected_num
|
||||
} else if let (Some(actual_num), Some(expected_num)) =
|
||||
(actual_value.as_f64(), expected.as_f64())
|
||||
{
|
||||
actual_num > expected_num
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
ComparisonOperator::LessThan => {
|
||||
if let (Some(actual_num), Some(expected_num)) =
|
||||
(actual_value.as_i64(), expected.as_i64())
|
||||
{
|
||||
actual_num < expected_num
|
||||
} else if let (Some(actual_num), Some(expected_num)) =
|
||||
(actual_value.as_f64(), expected.as_f64())
|
||||
{
|
||||
actual_num < expected_num
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
ComparisonOperator::Exists => !actual_value.is_null(),
|
||||
ComparisonOperator::NotExists => actual_value.is_null(),
|
||||
ComparisonOperator::Matches => {
|
||||
if let (Some(actual_str), Some(expected_str)) =
|
||||
(actual_value.as_str(), expected.as_str())
|
||||
{
|
||||
compile_safe_regex(expected_str)
|
||||
.map(|re| re.is_match(actual_str))
|
||||
.unwrap_or_else(|e| {
|
||||
tracing::warn!(
|
||||
pattern = %expected_str,
|
||||
error = %e,
|
||||
"Regex pattern validation failed, treating as no match"
|
||||
);
|
||||
false
|
||||
})
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Evaluate identity state trigger
|
||||
async fn evaluate_identity_state(
|
||||
&self,
|
||||
config: &IdentityStateConfig,
|
||||
agent_id: &str,
|
||||
) -> Result<bool, String> {
|
||||
let mut manager = self.identity_manager.lock().await;
|
||||
let identity = manager.get_identity(agent_id);
|
||||
|
||||
// Get the target file content
|
||||
let content = match config.file {
|
||||
IdentityFile::Soul => identity.soul,
|
||||
IdentityFile::Instructions => identity.instructions,
|
||||
IdentityFile::User => identity.user_profile,
|
||||
};
|
||||
|
||||
// Check content pattern if specified
|
||||
if let Some(pattern) = &config.content_pattern {
|
||||
let re = compile_safe_regex(pattern)
|
||||
.map_err(|e| format!("Invalid regex pattern: {}", e))?;
|
||||
if !re.is_match(&content) {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
|
||||
// If any_change is true, we would need to track changes
|
||||
// For now, just return true
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Get cached context for an agent
|
||||
async fn get_cached_context(&self, _agent_id: &str) -> TriggerContextCache {
|
||||
self.context_cache.lock().await.clone()
|
||||
}
|
||||
|
||||
/// Evaluate composite trigger
|
||||
fn evaluate_composite<'a>(
|
||||
&'a self,
|
||||
config: &'a CompositeTriggerConfig,
|
||||
agent_id: &'a str,
|
||||
_depth: Option<usize>,
|
||||
) -> Pin<Box<dyn std::future::Future<Output = Result<bool, String>> + 'a>> {
|
||||
Box::pin(async move {
|
||||
let mut results = Vec::new();
|
||||
|
||||
for trigger in &config.triggers {
|
||||
let result = self.evaluate(trigger, agent_id).await?;
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
// Combine results based on combination mode
|
||||
let final_result = match config.combination {
|
||||
ConditionCombination::All => results.iter().all(|r| *r),
|
||||
ConditionCombination::Any => results.iter().any(|r| *r),
|
||||
ConditionCombination::None => results.iter().all(|r| !*r),
|
||||
};
|
||||
|
||||
Ok(final_result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// === Unit Tests ===
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
mod regex_validation {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_valid_simple_pattern() {
|
||||
let pattern = r"hello";
|
||||
assert!(compile_safe_regex(pattern).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_pattern_with_quantifiers() {
|
||||
let pattern = r"\d+";
|
||||
assert!(compile_safe_regex(pattern).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_pattern_with_groups() {
|
||||
let pattern = r"(foo|bar)\d{2,4}";
|
||||
assert!(compile_safe_regex(pattern).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_character_class() {
|
||||
let pattern = r"[a-zA-Z0-9_]+";
|
||||
assert!(compile_safe_regex(pattern).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pattern_too_long() {
|
||||
let pattern = "a".repeat(501);
|
||||
let result = compile_safe_regex(&pattern);
|
||||
assert!(matches!(result, Err(RegexValidationError::TooLong { .. })));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pattern_at_max_length() {
|
||||
let pattern = "a".repeat(500);
|
||||
let result = compile_safe_regex(&pattern);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested_quantifier_detection_simple() {
|
||||
// Classic ReDoS pattern: (a+)+
|
||||
// Our implementation detects this as dangerous
|
||||
let pattern = r"(a+)+";
|
||||
let result = validate_regex_pattern(pattern);
|
||||
assert!(
|
||||
matches!(result, Err(RegexValidationError::DangerousPattern(_))),
|
||||
"Expected nested quantifier pattern to be detected as dangerous"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deeply_nested_groups() {
|
||||
// Create a pattern with too many nested groups
|
||||
let pattern = "(".repeat(15) + &"a".repeat(10) + &")".repeat(15);
|
||||
let result = compile_safe_regex(&pattern);
|
||||
assert!(matches!(result, Err(RegexValidationError::TooDeeplyNested { .. })));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reasonably_nested_groups() {
|
||||
// Pattern with acceptable nesting
|
||||
let pattern = "(((foo|bar)))";
|
||||
let result = compile_safe_regex(pattern);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_regex_syntax() {
|
||||
let pattern = r"[unclosed";
|
||||
let result = compile_safe_regex(pattern);
|
||||
assert!(matches!(result, Err(RegexValidationError::InvalidSyntax(_))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escaped_characters_in_pattern() {
|
||||
let pattern = r"\[hello\]";
|
||||
let result = compile_safe_regex(pattern);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_complex_valid_pattern() {
|
||||
// Email-like pattern (simplified)
|
||||
let pattern = r"[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}";
|
||||
let result = compile_safe_regex(pattern);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
}
|
||||
|
||||
mod nesting_depth_calculation {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_no_nesting() {
|
||||
assert_eq!(calculate_nesting_depth("abc"), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_single_group() {
|
||||
assert_eq!(calculate_nesting_depth("(abc)"), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested_groups() {
|
||||
assert_eq!(calculate_nesting_depth("((abc))"), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_character_class() {
|
||||
assert_eq!(calculate_nesting_depth("[abc]"), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mixed_nesting() {
|
||||
assert_eq!(calculate_nesting_depth("([a-z]+)"), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escaped_parens() {
|
||||
// Escaped parens shouldn't count toward nesting
|
||||
assert_eq!(calculate_nesting_depth(r"\(abc\)"), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_groups_same_level() {
|
||||
assert_eq!(calculate_nesting_depth("(abc)(def)"), 1);
|
||||
}
|
||||
}
|
||||
|
||||
mod dangerous_pattern_detection {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_simple_quantifier_not_dangerous() {
|
||||
assert!(!contains_dangerous_redos_pattern(r"a+"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simple_group_not_dangerous() {
|
||||
assert!(!contains_dangerous_redos_pattern(r"(abc)"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quantified_group_not_dangerous() {
|
||||
assert!(!contains_dangerous_redos_pattern(r"(abc)+"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_alternation_not_dangerous() {
|
||||
assert!(!contains_dangerous_redos_pattern(r"(a|b)+"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
272
desktop/src-tauri/src/intelligence/validation.rs
Normal file
272
desktop/src-tauri/src/intelligence/validation.rs
Normal file
@@ -0,0 +1,272 @@
|
||||
//! Input validation utilities for the Intelligence Layer
|
||||
//!
|
||||
//! This module provides validation functions for common input types
|
||||
//! to prevent injection attacks, path traversal, and memory exhaustion.
|
||||
//!
|
||||
//! NOTE: Some functions are defined for future use and external API exposure.
|
||||
|
||||
#![allow(dead_code)] // Validation functions reserved for future API endpoints
|
||||
|
||||
use std::fmt;
|
||||
|
||||
/// Maximum length for identifier strings (agent_id, pipeline_id, skill_id, etc.)
|
||||
pub const MAX_IDENTIFIER_LENGTH: usize = 128;
|
||||
|
||||
/// Minimum length for identifier strings
|
||||
pub const MIN_IDENTIFIER_LENGTH: usize = 1;
|
||||
|
||||
/// Allowed characters in identifiers: alphanumeric, hyphen, underscore
|
||||
const IDENTIFIER_ALLOWED_CHARS: &str = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_";
|
||||
|
||||
/// Validation error types
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ValidationError {
|
||||
/// Identifier is too long
|
||||
IdentifierTooLong { field: String, max: usize, actual: usize },
|
||||
/// Identifier is too short or empty
|
||||
IdentifierTooShort { field: String, min: usize, actual: usize },
|
||||
/// Identifier contains invalid characters
|
||||
InvalidCharacters { field: String, invalid_chars: String },
|
||||
/// String exceeds maximum length
|
||||
StringTooLong { field: String, max: usize, actual: usize },
|
||||
/// Required field is missing or empty
|
||||
RequiredFieldEmpty { field: String },
|
||||
}
|
||||
|
||||
impl fmt::Display for ValidationError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::IdentifierTooLong { field, max, actual } => {
|
||||
write!(f, "Field '{}' is too long: {} characters (max: {})", field, actual, max)
|
||||
}
|
||||
Self::IdentifierTooShort { field, min, actual } => {
|
||||
write!(f, "Field '{}' is too short: {} characters (min: {})", field, actual, min)
|
||||
}
|
||||
Self::InvalidCharacters { field, invalid_chars } => {
|
||||
write!(f, "Field '{}' contains invalid characters: '{}'. Allowed: alphanumeric, '-', '_'", field, invalid_chars)
|
||||
}
|
||||
Self::StringTooLong { field, max, actual } => {
|
||||
write!(f, "Field '{}' is too long: {} characters (max: {})", field, actual, max)
|
||||
}
|
||||
Self::RequiredFieldEmpty { field } => {
|
||||
write!(f, "Required field '{}' is empty", field)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ValidationError {}
|
||||
|
||||
/// Validate an identifier (agent_id, pipeline_id, skill_id, etc.)
|
||||
///
|
||||
/// # Rules
|
||||
/// - Length: 1-128 characters
|
||||
/// - Characters: alphanumeric, hyphen (-), underscore (_)
|
||||
/// - Cannot start with hyphen or underscore
|
||||
///
|
||||
/// # Examples
|
||||
/// ```ignore
|
||||
/// use desktop_lib::intelligence::validation::validate_identifier;
|
||||
///
|
||||
/// assert!(validate_identifier("agent-123", "agent_id").is_ok());
|
||||
/// assert!(validate_identifier("my_skill", "skill_id").is_ok());
|
||||
/// assert!(validate_identifier("", "agent_id").is_err());
|
||||
/// assert!(validate_identifier("invalid@id", "agent_id").is_err());
|
||||
/// ```
|
||||
pub fn validate_identifier(value: &str, field_name: &str) -> Result<(), ValidationError> {
|
||||
let len = value.len();
|
||||
|
||||
// Check minimum length
|
||||
if len < MIN_IDENTIFIER_LENGTH {
|
||||
return Err(ValidationError::IdentifierTooShort {
|
||||
field: field_name.to_string(),
|
||||
min: MIN_IDENTIFIER_LENGTH,
|
||||
actual: len,
|
||||
});
|
||||
}
|
||||
|
||||
// Check maximum length
|
||||
if len > MAX_IDENTIFIER_LENGTH {
|
||||
return Err(ValidationError::IdentifierTooLong {
|
||||
field: field_name.to_string(),
|
||||
max: MAX_IDENTIFIER_LENGTH,
|
||||
actual: len,
|
||||
});
|
||||
}
|
||||
|
||||
// Check for invalid characters
|
||||
let invalid_chars: String = value
|
||||
.chars()
|
||||
.filter(|c| !IDENTIFIER_ALLOWED_CHARS.contains(*c))
|
||||
.collect();
|
||||
|
||||
if !invalid_chars.is_empty() {
|
||||
return Err(ValidationError::InvalidCharacters {
|
||||
field: field_name.to_string(),
|
||||
invalid_chars,
|
||||
});
|
||||
}
|
||||
|
||||
// Cannot start with hyphen or underscore (reserved for system use)
|
||||
if value.starts_with('-') || value.starts_with('_') {
|
||||
return Err(ValidationError::InvalidCharacters {
|
||||
field: field_name.to_string(),
|
||||
invalid_chars: value.chars().next().unwrap().to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Validate a string field with a maximum length
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `value` - The string to validate
|
||||
/// * `field_name` - Name of the field for error messages
|
||||
/// * `max_length` - Maximum allowed length
|
||||
///
|
||||
/// # Examples
|
||||
/// ```ignore
|
||||
/// use desktop_lib::intelligence::validation::validate_string_length;
|
||||
///
|
||||
/// assert!(validate_string_length("hello", "message", 100).is_ok());
|
||||
/// assert!(validate_string_length("", "message", 100).is_err());
|
||||
/// ```
|
||||
pub fn validate_string_length(value: &str, field_name: &str, max_length: usize) -> Result<(), ValidationError> {
|
||||
let len = value.len();
|
||||
|
||||
if len == 0 {
|
||||
return Err(ValidationError::RequiredFieldEmpty {
|
||||
field: field_name.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
if len > max_length {
|
||||
return Err(ValidationError::StringTooLong {
|
||||
field: field_name.to_string(),
|
||||
max: max_length,
|
||||
actual: len,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Validate an optional identifier field
|
||||
///
|
||||
/// Returns Ok if the value is None or if it contains a valid identifier.
|
||||
pub fn validate_optional_identifier(value: Option<&str>, field_name: &str) -> Result<(), ValidationError> {
|
||||
match value {
|
||||
None => Ok(()),
|
||||
Some(v) if v.is_empty() => Ok(()), // Empty string treated as None
|
||||
Some(v) => validate_identifier(v, field_name),
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate a list of identifiers
|
||||
pub fn validate_identifiers<'a, I>(values: I, field_name: &str) -> Result<(), ValidationError>
|
||||
where
|
||||
I: IntoIterator<Item = &'a str>,
|
||||
{
|
||||
for value in values {
|
||||
validate_identifier(value, field_name)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sanitize a string for safe logging (remove control characters, limit length)
|
||||
pub fn sanitize_for_logging(value: &str, max_len: usize) -> String {
|
||||
let sanitized: String = value
|
||||
.chars()
|
||||
.filter(|c| !c.is_control() || *c == '\n' || *c == '\t')
|
||||
.take(max_len)
|
||||
.collect();
|
||||
|
||||
if value.len() > max_len {
|
||||
format!("{}...", sanitized)
|
||||
} else {
|
||||
sanitized
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_valid_identifiers() {
|
||||
assert!(validate_identifier("agent-123", "agent_id").is_ok());
|
||||
assert!(validate_identifier("my_skill", "skill_id").is_ok());
|
||||
assert!(validate_identifier("Pipeline42", "pipeline_id").is_ok());
|
||||
assert!(validate_identifier("a", "test").is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_identifiers() {
|
||||
// Too short
|
||||
assert!(matches!(
|
||||
validate_identifier("", "agent_id"),
|
||||
Err(ValidationError::IdentifierTooShort { .. })
|
||||
));
|
||||
|
||||
// Too long
|
||||
let long_id = "a".repeat(200);
|
||||
assert!(matches!(
|
||||
validate_identifier(&long_id, "agent_id"),
|
||||
Err(ValidationError::IdentifierTooLong { .. })
|
||||
));
|
||||
|
||||
// Invalid characters
|
||||
assert!(matches!(
|
||||
validate_identifier("invalid@id", "agent_id"),
|
||||
Err(ValidationError::InvalidCharacters { .. })
|
||||
));
|
||||
|
||||
assert!(matches!(
|
||||
validate_identifier("invalid id", "agent_id"),
|
||||
Err(ValidationError::InvalidCharacters { .. })
|
||||
));
|
||||
|
||||
// Starts with reserved characters
|
||||
assert!(matches!(
|
||||
validate_identifier("-invalid", "agent_id"),
|
||||
Err(ValidationError::InvalidCharacters { .. })
|
||||
));
|
||||
|
||||
assert!(matches!(
|
||||
validate_identifier("_invalid", "agent_id"),
|
||||
Err(ValidationError::InvalidCharacters { .. })
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_string_length_validation() {
|
||||
assert!(validate_string_length("hello", "message", 100).is_ok());
|
||||
assert!(matches!(
|
||||
validate_string_length("", "message", 100),
|
||||
Err(ValidationError::RequiredFieldEmpty { .. })
|
||||
));
|
||||
|
||||
let long_string = "a".repeat(200);
|
||||
assert!(matches!(
|
||||
validate_string_length(&long_string, "message", 100),
|
||||
Err(ValidationError::StringTooLong { .. })
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_optional_identifier() {
|
||||
assert!(validate_optional_identifier(None, "agent_id").is_ok());
|
||||
assert!(validate_optional_identifier(Some(""), "agent_id").is_ok());
|
||||
assert!(validate_optional_identifier(Some("valid-id"), "agent_id").is_ok());
|
||||
assert!(validate_optional_identifier(Some("invalid@id"), "agent_id").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sanitize_for_logging() {
|
||||
assert_eq!(sanitize_for_logging("hello", 100), "hello");
|
||||
assert_eq!(sanitize_for_logging("hello\x00world", 100), "helloworld");
|
||||
assert_eq!(sanitize_for_logging("hello\nworld", 100), "hello\nworld");
|
||||
assert_eq!(sanitize_for_logging("hello world", 5), "hello...");
|
||||
}
|
||||
}
|
||||
@@ -11,9 +11,25 @@ use tokio::sync::Mutex;
|
||||
use zclaw_kernel::Kernel;
|
||||
use zclaw_types::{AgentConfig, AgentId, AgentInfo};
|
||||
|
||||
use crate::intelligence::validation::{validate_identifier, validate_string_length};
|
||||
|
||||
/// Kernel state wrapper for Tauri
|
||||
pub type KernelState = Arc<Mutex<Option<Kernel>>>;
|
||||
|
||||
/// Validate an agent ID string with clear error messages
|
||||
fn validate_agent_id(agent_id: &str) -> Result<String, String> {
|
||||
validate_identifier(agent_id, "agent_id")
|
||||
.map_err(|e| format!("Invalid agent_id: {}", e))?;
|
||||
Ok(agent_id.to_string())
|
||||
}
|
||||
|
||||
/// Validate a generic ID string (for skills, hands, triggers, etc.)
|
||||
fn validate_id(id: &str, field_name: &str) -> Result<String, String> {
|
||||
validate_identifier(id, field_name)
|
||||
.map_err(|e| format!("Invalid {}: {}", field_name, e))?;
|
||||
Ok(id.to_string())
|
||||
}
|
||||
|
||||
/// Agent creation request
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@@ -295,6 +311,9 @@ pub async fn agent_get(
|
||||
state: State<'_, KernelState>,
|
||||
agent_id: String,
|
||||
) -> Result<Option<AgentInfo>, String> {
|
||||
// Validate input
|
||||
let agent_id = validate_agent_id(&agent_id)?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
@@ -312,6 +331,9 @@ pub async fn agent_delete(
|
||||
state: State<'_, KernelState>,
|
||||
agent_id: String,
|
||||
) -> Result<(), String> {
|
||||
// Validate input
|
||||
let agent_id = validate_agent_id(&agent_id)?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
@@ -331,6 +353,11 @@ pub async fn agent_chat(
|
||||
state: State<'_, KernelState>,
|
||||
request: ChatRequest,
|
||||
) -> Result<ChatResponse, String> {
|
||||
// Validate inputs
|
||||
validate_agent_id(&request.agent_id)?;
|
||||
validate_string_length(&request.message, "message", 100000)
|
||||
.map_err(|e| format!("Invalid message: {}", e))?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
@@ -391,6 +418,11 @@ pub async fn agent_chat_stream(
|
||||
state: State<'_, KernelState>,
|
||||
request: StreamChatRequest,
|
||||
) -> Result<(), String> {
|
||||
// Validate inputs
|
||||
validate_agent_id(&request.agent_id)?;
|
||||
validate_string_length(&request.message, "message", 100000)
|
||||
.map_err(|e| format!("Invalid message: {}", e))?;
|
||||
|
||||
// Parse agent ID first
|
||||
let id: AgentId = request.agent_id.parse()
|
||||
.map_err(|_| "Invalid agent ID format".to_string())?;
|
||||
@@ -613,6 +645,9 @@ pub async fn skill_execute(
|
||||
context: SkillContext,
|
||||
input: serde_json::Value,
|
||||
) -> Result<SkillResult, String> {
|
||||
// Validate skill ID
|
||||
let id = validate_id(&id, "skill_id")?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
@@ -760,3 +795,286 @@ pub async fn hand_execute(
|
||||
|
||||
Ok(HandResult::from(result))
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Trigger Commands
|
||||
// ============================================================
|
||||
|
||||
/// Trigger configuration for creation/update
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TriggerConfigRequest {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub hand_id: String,
|
||||
pub trigger_type: TriggerTypeRequest,
|
||||
#[serde(default = "default_trigger_enabled")]
|
||||
pub enabled: bool,
|
||||
#[serde(default)]
|
||||
pub description: Option<String>,
|
||||
#[serde(default)]
|
||||
pub tags: Vec<String>,
|
||||
}
|
||||
|
||||
fn default_trigger_enabled() -> bool { true }
|
||||
|
||||
/// Trigger type for API
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum TriggerTypeRequest {
|
||||
Schedule { cron: String },
|
||||
Event { pattern: String },
|
||||
Webhook { path: String, secret: Option<String> },
|
||||
MessagePattern { pattern: String },
|
||||
FileSystem { path: String, events: Vec<String> },
|
||||
Manual,
|
||||
}
|
||||
|
||||
/// Trigger response
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TriggerResponse {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub hand_id: String,
|
||||
pub trigger_type: TriggerTypeRequest,
|
||||
pub enabled: bool,
|
||||
pub created_at: String,
|
||||
pub modified_at: String,
|
||||
pub description: Option<String>,
|
||||
pub tags: Vec<String>,
|
||||
}
|
||||
|
||||
impl From<zclaw_kernel::trigger_manager::TriggerEntry> for TriggerResponse {
|
||||
fn from(entry: zclaw_kernel::trigger_manager::TriggerEntry) -> Self {
|
||||
let trigger_type = match entry.config.trigger_type {
|
||||
zclaw_hands::TriggerType::Schedule { cron } => {
|
||||
TriggerTypeRequest::Schedule { cron }
|
||||
}
|
||||
zclaw_hands::TriggerType::Event { pattern } => {
|
||||
TriggerTypeRequest::Event { pattern }
|
||||
}
|
||||
zclaw_hands::TriggerType::Webhook { path, secret } => {
|
||||
TriggerTypeRequest::Webhook { path, secret }
|
||||
}
|
||||
zclaw_hands::TriggerType::MessagePattern { pattern } => {
|
||||
TriggerTypeRequest::MessagePattern { pattern }
|
||||
}
|
||||
zclaw_hands::TriggerType::FileSystem { path, events } => {
|
||||
TriggerTypeRequest::FileSystem {
|
||||
path,
|
||||
events: events.iter().map(|e| format!("{:?}", e).to_lowercase()).collect(),
|
||||
}
|
||||
}
|
||||
zclaw_hands::TriggerType::Manual => TriggerTypeRequest::Manual,
|
||||
};
|
||||
|
||||
Self {
|
||||
id: entry.config.id,
|
||||
name: entry.config.name,
|
||||
hand_id: entry.config.hand_id,
|
||||
trigger_type,
|
||||
enabled: entry.config.enabled,
|
||||
created_at: entry.created_at.to_rfc3339(),
|
||||
modified_at: entry.modified_at.to_rfc3339(),
|
||||
description: entry.description,
|
||||
tags: entry.tags,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// List all triggers
|
||||
#[tauri::command]
|
||||
pub async fn trigger_list(
|
||||
state: State<'_, KernelState>,
|
||||
) -> Result<Vec<TriggerResponse>, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let triggers = kernel.list_triggers().await;
|
||||
Ok(triggers.into_iter().map(TriggerResponse::from).collect())
|
||||
}
|
||||
|
||||
/// Get a specific trigger
|
||||
#[tauri::command]
|
||||
pub async fn trigger_get(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
) -> Result<Option<TriggerResponse>, String> {
|
||||
// Validate trigger ID
|
||||
let id = validate_id(&id, "trigger_id")?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
Ok(kernel.get_trigger(&id).await.map(TriggerResponse::from))
|
||||
}
|
||||
|
||||
/// Create a new trigger
|
||||
#[tauri::command]
|
||||
pub async fn trigger_create(
|
||||
state: State<'_, KernelState>,
|
||||
request: TriggerConfigRequest,
|
||||
) -> Result<TriggerResponse, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
// Convert request to config
|
||||
let trigger_type = match request.trigger_type {
|
||||
TriggerTypeRequest::Schedule { cron } => {
|
||||
zclaw_hands::TriggerType::Schedule { cron }
|
||||
}
|
||||
TriggerTypeRequest::Event { pattern } => {
|
||||
zclaw_hands::TriggerType::Event { pattern }
|
||||
}
|
||||
TriggerTypeRequest::Webhook { path, secret } => {
|
||||
zclaw_hands::TriggerType::Webhook { path, secret }
|
||||
}
|
||||
TriggerTypeRequest::MessagePattern { pattern } => {
|
||||
zclaw_hands::TriggerType::MessagePattern { pattern }
|
||||
}
|
||||
TriggerTypeRequest::FileSystem { path, events } => {
|
||||
zclaw_hands::TriggerType::FileSystem {
|
||||
path,
|
||||
events: events.iter().filter_map(|e| match e.as_str() {
|
||||
"created" => Some(zclaw_hands::FileEvent::Created),
|
||||
"modified" => Some(zclaw_hands::FileEvent::Modified),
|
||||
"deleted" => Some(zclaw_hands::FileEvent::Deleted),
|
||||
"any" => Some(zclaw_hands::FileEvent::Any),
|
||||
_ => None,
|
||||
}).collect(),
|
||||
}
|
||||
}
|
||||
TriggerTypeRequest::Manual => zclaw_hands::TriggerType::Manual,
|
||||
};
|
||||
|
||||
let config = zclaw_hands::TriggerConfig {
|
||||
id: request.id,
|
||||
name: request.name,
|
||||
hand_id: request.hand_id,
|
||||
trigger_type,
|
||||
enabled: request.enabled,
|
||||
max_executions_per_hour: 10,
|
||||
};
|
||||
|
||||
let entry = kernel.create_trigger(config).await
|
||||
.map_err(|e| format!("Failed to create trigger: {}", e))?;
|
||||
|
||||
Ok(TriggerResponse::from(entry))
|
||||
}
|
||||
|
||||
/// Update a trigger
|
||||
#[tauri::command]
|
||||
pub async fn trigger_update(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
name: Option<String>,
|
||||
enabled: Option<bool>,
|
||||
hand_id: Option<String>,
|
||||
) -> Result<TriggerResponse, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let update = zclaw_kernel::trigger_manager::TriggerUpdateRequest {
|
||||
name,
|
||||
enabled,
|
||||
hand_id,
|
||||
trigger_type: None,
|
||||
};
|
||||
|
||||
let entry = kernel.update_trigger(&id, update).await
|
||||
.map_err(|e| format!("Failed to update trigger: {}", e))?;
|
||||
|
||||
Ok(TriggerResponse::from(entry))
|
||||
}
|
||||
|
||||
/// Delete a trigger
|
||||
#[tauri::command]
|
||||
pub async fn trigger_delete(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
) -> Result<(), String> {
|
||||
// Validate trigger ID
|
||||
let id = validate_id(&id, "trigger_id")?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
kernel.delete_trigger(&id).await
|
||||
.map_err(|e| format!("Failed to delete trigger: {}", e))
|
||||
}
|
||||
|
||||
/// Execute a trigger manually
|
||||
#[tauri::command]
|
||||
pub async fn trigger_execute(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
input: serde_json::Value,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
// Validate trigger ID
|
||||
let id = validate_id(&id, "trigger_id")?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let result = kernel.execute_trigger(&id, input).await
|
||||
.map_err(|e| format!("Failed to execute trigger: {}", e))?;
|
||||
|
||||
Ok(serde_json::to_value(result).unwrap_or(serde_json::json!({})))
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Approval Commands
|
||||
// ============================================================
|
||||
|
||||
/// Approval response
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApprovalResponse {
|
||||
pub id: String,
|
||||
pub hand_id: String,
|
||||
pub status: String,
|
||||
pub created_at: String,
|
||||
pub input: serde_json::Value,
|
||||
}
|
||||
|
||||
/// List pending approvals
|
||||
#[tauri::command]
|
||||
pub async fn approval_list(
|
||||
state: State<'_, KernelState>,
|
||||
) -> Result<Vec<ApprovalResponse>, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let approvals = kernel.list_approvals().await;
|
||||
Ok(approvals.into_iter().map(|a| ApprovalResponse {
|
||||
id: a.id,
|
||||
hand_id: a.hand_id,
|
||||
status: a.status,
|
||||
created_at: a.created_at.to_rfc3339(),
|
||||
input: a.input,
|
||||
}).collect())
|
||||
}
|
||||
|
||||
/// Respond to an approval
|
||||
#[tauri::command]
|
||||
pub async fn approval_respond(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
approved: bool,
|
||||
reason: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
kernel.respond_to_approval(&id, approved, reason).await
|
||||
.map_err(|e| format!("Failed to respond to approval: {}", e))
|
||||
}
|
||||
|
||||
@@ -965,6 +965,7 @@ fn openfang_version(app: AppHandle) -> Result<VersionResponse, String> {
|
||||
/// Health status enum
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[allow(dead_code)] // Reserved for future health check expansion
|
||||
enum HealthStatus {
|
||||
Healthy,
|
||||
Unhealthy,
|
||||
@@ -1313,6 +1314,7 @@ pub fn run() {
|
||||
let heartbeat_state: intelligence::HeartbeatEngineState = std::sync::Arc::new(tokio::sync::Mutex::new(std::collections::HashMap::new()));
|
||||
let reflection_state: intelligence::ReflectionEngineState = std::sync::Arc::new(tokio::sync::Mutex::new(intelligence::ReflectionEngine::new(None)));
|
||||
let identity_state: intelligence::IdentityManagerState = std::sync::Arc::new(tokio::sync::Mutex::new(intelligence::AgentIdentityManager::new()));
|
||||
let persona_evolver_state: intelligence::PersonaEvolverStateHandle = std::sync::Arc::new(tokio::sync::Mutex::new(intelligence::PersonaEvolver::new(None)));
|
||||
|
||||
// Initialize internal ZCLAW Kernel state
|
||||
let kernel_state = kernel_commands::create_kernel_state();
|
||||
@@ -1327,6 +1329,7 @@ pub fn run() {
|
||||
.manage(heartbeat_state)
|
||||
.manage(reflection_state)
|
||||
.manage(identity_state)
|
||||
.manage(persona_evolver_state)
|
||||
.manage(kernel_state)
|
||||
.manage(pipeline_state)
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
@@ -1436,6 +1439,16 @@ pub fn run() {
|
||||
memory_commands::memory_get,
|
||||
memory_commands::memory_search,
|
||||
memory_commands::memory_delete,
|
||||
// Trigger management commands
|
||||
kernel_commands::trigger_list,
|
||||
kernel_commands::trigger_get,
|
||||
kernel_commands::trigger_create,
|
||||
kernel_commands::trigger_update,
|
||||
kernel_commands::trigger_delete,
|
||||
kernel_commands::trigger_execute,
|
||||
// Approval management commands
|
||||
kernel_commands::approval_list,
|
||||
kernel_commands::approval_respond,
|
||||
memory_commands::memory_delete_all,
|
||||
memory_commands::memory_stats,
|
||||
memory_commands::memory_export,
|
||||
@@ -1479,7 +1492,24 @@ pub fn run() {
|
||||
intelligence::identity::identity_get_snapshots,
|
||||
intelligence::identity::identity_restore_snapshot,
|
||||
intelligence::identity::identity_list_agents,
|
||||
intelligence::identity::identity_delete_agent
|
||||
intelligence::identity::identity_delete_agent,
|
||||
// Adaptive Intelligence Mesh (Phase 4)
|
||||
intelligence::mesh::mesh_init,
|
||||
intelligence::mesh::mesh_analyze,
|
||||
intelligence::mesh::mesh_record_activity,
|
||||
intelligence::mesh::mesh_get_patterns,
|
||||
intelligence::mesh::mesh_update_config,
|
||||
intelligence::mesh::mesh_decay_patterns,
|
||||
intelligence::mesh::mesh_accept_recommendation,
|
||||
intelligence::mesh::mesh_dismiss_recommendation,
|
||||
// Persona Evolver (Phase 4)
|
||||
intelligence::persona_evolver::persona_evolver_init,
|
||||
intelligence::persona_evolver::persona_evolve,
|
||||
intelligence::persona_evolver::persona_evolution_history,
|
||||
intelligence::persona_evolver::persona_evolver_state,
|
||||
intelligence::persona_evolver::persona_evolver_config,
|
||||
intelligence::persona_evolver::persona_evolver_update_config,
|
||||
intelligence::persona_evolver::persona_apply_proposal
|
||||
])
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
//! Memory Encryption Module
|
||||
//!
|
||||
//! Provides AES-256-GCM encryption for sensitive memory content.
|
||||
//!
|
||||
//! NOTE: Some constants and types are defined for future use.
|
||||
|
||||
#![allow(dead_code)] // Crypto utilities reserved for future encryption features
|
||||
|
||||
use aes_gcm::{
|
||||
aead::{Aead, KeyInit, OsRng},
|
||||
|
||||
@@ -59,6 +59,7 @@ impl<'r> sqlx::FromRow<'r, SqliteRow> for PersistentMemory {
|
||||
pub struct MemorySearchQuery {
|
||||
pub agent_id: Option<String>,
|
||||
pub memory_type: Option<String>,
|
||||
#[allow(dead_code)] // Reserved for future tag-based filtering
|
||||
pub tags: Option<Vec<String>>,
|
||||
pub query: Option<String>,
|
||||
pub min_importance: Option<i32>,
|
||||
|
||||
@@ -7,11 +7,11 @@ use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Emitter, State};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::{Mutex, RwLock};
|
||||
use tokio::sync::RwLock;
|
||||
use serde_json::Value;
|
||||
|
||||
use zclaw_pipeline::{
|
||||
Pipeline, PipelineRun, PipelineProgress, RunStatus,
|
||||
Pipeline, RunStatus,
|
||||
parse_pipeline_yaml,
|
||||
PipelineExecutor,
|
||||
ActionRegistry,
|
||||
@@ -146,7 +146,7 @@ pub async fn pipeline_list(
|
||||
|
||||
// Update state
|
||||
let mut state_pipelines = state.pipelines.write().await;
|
||||
let mut state_paths = state.pipeline_paths.write().await;
|
||||
let state_paths = state.pipeline_paths.write().await;
|
||||
|
||||
for info in &pipelines {
|
||||
if let Some(path) = state_paths.get(&info.id) {
|
||||
|
||||
@@ -21,7 +21,6 @@ import {
|
||||
Grid,
|
||||
Volume2,
|
||||
VolumeX,
|
||||
Settings,
|
||||
Download,
|
||||
Share2,
|
||||
} from 'lucide-react';
|
||||
@@ -78,7 +77,7 @@ interface SceneRendererProps {
|
||||
showNarration: boolean;
|
||||
}
|
||||
|
||||
function SceneRenderer({ scene, isPlaying, showNarration }: SceneRendererProps) {
|
||||
function SceneRenderer({ scene, showNarration }: SceneRendererProps) {
|
||||
const renderContent = () => {
|
||||
switch (scene.type) {
|
||||
case 'title':
|
||||
@@ -240,7 +239,7 @@ function OutlinePanel({
|
||||
{section.title}
|
||||
</p>
|
||||
<div className="space-y-1">
|
||||
{section.scenes.map((sceneId, sceneIndex) => {
|
||||
{section.scenes.map((sceneId) => {
|
||||
const globalIndex = scenes.findIndex(s => s.id === sceneId);
|
||||
const isActive = globalIndex === currentIndex;
|
||||
const scene = scenes.find(s => s.id === sceneId);
|
||||
@@ -271,7 +270,6 @@ function OutlinePanel({
|
||||
|
||||
export function ClassroomPreviewer({
|
||||
data,
|
||||
onClose,
|
||||
onExport,
|
||||
}: ClassroomPreviewerProps) {
|
||||
const [currentSceneIndex, setCurrentSceneIndex] = useState(0);
|
||||
@@ -281,7 +279,7 @@ export function ClassroomPreviewer({
|
||||
const [isFullscreen, setIsFullscreen] = useState(false);
|
||||
const [viewMode, setViewMode] = useState<'slides' | 'grid'>('slides');
|
||||
|
||||
const { showToast } = useToast();
|
||||
const { toast } = useToast();
|
||||
const currentScene = data.scenes[currentSceneIndex];
|
||||
const totalScenes = data.scenes.length;
|
||||
|
||||
@@ -310,12 +308,12 @@ export function ClassroomPreviewer({
|
||||
nextScene();
|
||||
} else {
|
||||
setIsPlaying(false);
|
||||
showToast('课堂播放完成', 'success');
|
||||
toast('课堂播放完成', 'success');
|
||||
}
|
||||
}, duration);
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}, [isPlaying, currentSceneIndex, currentScene, totalScenes, nextScene, showToast]);
|
||||
}, [isPlaying, currentSceneIndex, currentScene, totalScenes, nextScene, toast]);
|
||||
|
||||
// Keyboard navigation
|
||||
useEffect(() => {
|
||||
@@ -352,7 +350,7 @@ export function ClassroomPreviewer({
|
||||
if (onExport) {
|
||||
onExport(format);
|
||||
} else {
|
||||
showToast(`导出 ${format.toUpperCase()} 功能开发中...`, 'info');
|
||||
toast(`导出 ${format.toUpperCase()} 功能开发中...`, 'info');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ interface PipelineResultPreviewProps {
|
||||
onClose?: () => void;
|
||||
}
|
||||
|
||||
type PreviewMode = 'auto' | 'json' | 'markdown' | 'classroom';
|
||||
type PreviewMode = 'auto' | 'json' | 'markdown' | 'classroom' | 'files';
|
||||
|
||||
// === Utility Functions ===
|
||||
|
||||
@@ -123,14 +123,14 @@ interface JsonPreviewProps {
|
||||
|
||||
function JsonPreview({ data }: JsonPreviewProps) {
|
||||
const [copied, setCopied] = useState(false);
|
||||
const { showToast } = useToast();
|
||||
const { toast } = useToast();
|
||||
|
||||
const jsonString = JSON.stringify(data, null, 2);
|
||||
|
||||
const handleCopy = async () => {
|
||||
await navigator.clipboard.writeText(jsonString);
|
||||
setCopied(true);
|
||||
showToast('已复制到剪贴板', 'success');
|
||||
toast('已复制到剪贴板', 'success');
|
||||
setTimeout(() => setCopied(false), 2000);
|
||||
};
|
||||
|
||||
@@ -190,7 +190,6 @@ export function PipelineResultPreview({
|
||||
onClose,
|
||||
}: PipelineResultPreviewProps) {
|
||||
const [mode, setMode] = useState<PreviewMode>('auto');
|
||||
const { showToast } = useToast();
|
||||
|
||||
// Determine the best preview mode
|
||||
const outputs = result.outputs as Record<string, unknown> | undefined;
|
||||
|
||||
@@ -7,16 +7,13 @@
|
||||
* Pipelines orchestrate Skills and Hands to accomplish complex tasks.
|
||||
*/
|
||||
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { useState } from 'react';
|
||||
import {
|
||||
Play,
|
||||
RefreshCw,
|
||||
Search,
|
||||
ChevronRight,
|
||||
Loader2,
|
||||
CheckCircle,
|
||||
XCircle,
|
||||
Clock,
|
||||
Package,
|
||||
Filter,
|
||||
X,
|
||||
@@ -26,7 +23,6 @@ import {
|
||||
PipelineInfo,
|
||||
PipelineRunResponse,
|
||||
usePipelines,
|
||||
usePipelineRun,
|
||||
validateInputs,
|
||||
getDefaultForType,
|
||||
formatInputType,
|
||||
@@ -378,7 +374,7 @@ export function PipelinesPanel() {
|
||||
const [selectedCategory, setSelectedCategory] = useState<string | null>(null);
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
const [selectedPipeline, setSelectedPipeline] = useState<PipelineInfo | null>(null);
|
||||
const { showToast } = useToast();
|
||||
const { toast } = useToast();
|
||||
|
||||
const { pipelines, loading, error, refresh } = usePipelines({
|
||||
category: selectedCategory ?? undefined,
|
||||
@@ -406,9 +402,9 @@ export function PipelinesPanel() {
|
||||
const handleRunComplete = (result: PipelineRunResponse) => {
|
||||
setSelectedPipeline(null);
|
||||
if (result.status === 'completed') {
|
||||
showToast('Pipeline 执行完成', 'success');
|
||||
toast('Pipeline 执行完成', 'success');
|
||||
} else {
|
||||
showToast(`Pipeline 执行失败: ${result.error}`, 'error');
|
||||
toast(`Pipeline 执行失败: ${result.error}`, 'error');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,21 +1,208 @@
|
||||
import { useEffect } from 'react';
|
||||
import { Radio, RefreshCw, MessageCircle, Settings2 } from 'lucide-react';
|
||||
/**
|
||||
* IMChannels - IM Channel Management UI
|
||||
*
|
||||
* Displays and manages IM channel configurations.
|
||||
* Supports viewing, configuring, and adding new channels.
|
||||
*/
|
||||
import { useState, useEffect } from 'react';
|
||||
import { Radio, RefreshCw, MessageCircle, Settings2, Plus, X, Check, AlertCircle, ExternalLink } from 'lucide-react';
|
||||
import { useConnectionStore } from '../../store/connectionStore';
|
||||
import { useConfigStore } from '../../store/configStore';
|
||||
import { useConfigStore, type ChannelInfo } from '../../store/configStore';
|
||||
import { useAgentStore } from '../../store/agentStore';
|
||||
|
||||
const CHANNEL_ICONS: Record<string, string> = {
|
||||
feishu: '飞',
|
||||
qqbot: 'QQ',
|
||||
wechat: '微',
|
||||
discord: 'D',
|
||||
slack: 'S',
|
||||
telegram: 'T',
|
||||
};
|
||||
|
||||
const CHANNEL_CONFIG_FIELDS: Record<string, { key: string; label: string; type: string; placeholder: string; required: boolean }[]> = {
|
||||
feishu: [
|
||||
{ key: 'appId', label: 'App ID', type: 'text', placeholder: 'cli_xxx', required: true },
|
||||
{ key: 'appSecret', label: 'App Secret', type: 'password', placeholder: '••••••••', required: true },
|
||||
],
|
||||
discord: [
|
||||
{ key: 'botToken', label: 'Bot Token', type: 'password', placeholder: 'OTk2NzY4...', required: true },
|
||||
{ key: 'guildId', label: 'Guild ID (可选)', type: 'text', placeholder: '123456789', required: false },
|
||||
],
|
||||
slack: [
|
||||
{ key: 'botToken', label: 'Bot Token', type: 'password', placeholder: 'xoxb-...', required: true },
|
||||
{ key: 'appToken', label: 'App Token', type: 'password', placeholder: 'xapp-...', required: false },
|
||||
],
|
||||
telegram: [
|
||||
{ key: 'botToken', label: 'Bot Token', type: 'password', placeholder: '123456:ABC...', required: true },
|
||||
],
|
||||
qqbot: [
|
||||
{ key: 'appId', label: 'App ID', type: 'text', placeholder: '1234567890', required: true },
|
||||
{ key: 'token', label: 'Token', type: 'password', placeholder: '••••••••', required: true },
|
||||
],
|
||||
wechat: [
|
||||
{ key: 'corpId', label: 'Corp ID', type: 'text', placeholder: 'wwxxx', required: true },
|
||||
{ key: 'agentId', label: 'Agent ID', type: 'text', placeholder: '1000001', required: true },
|
||||
{ key: 'secret', label: 'Secret', type: 'password', placeholder: '••••••••', required: true },
|
||||
],
|
||||
};
|
||||
|
||||
const KNOWN_CHANNELS = [
|
||||
{ type: 'feishu', label: '飞书 (Feishu/Lark)', description: '企业即时通讯平台' },
|
||||
{ type: 'discord', label: 'Discord', description: '游戏社区和语音聊天' },
|
||||
{ type: 'slack', label: 'Slack', description: '团队协作平台' },
|
||||
{ type: 'telegram', label: 'Telegram', description: '加密即时通讯' },
|
||||
{ type: 'qqbot', label: 'QQ 机器人', description: '腾讯QQ官方机器人' },
|
||||
{ type: 'wechat', label: '企业微信', description: '企业微信机器人' },
|
||||
];
|
||||
|
||||
interface ChannelConfigModalProps {
|
||||
channel: ChannelInfo | null;
|
||||
channelType: string | null;
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
onSave: (config: Record<string, string>) => Promise<void>;
|
||||
isSaving: boolean;
|
||||
}
|
||||
|
||||
function ChannelConfigModal({ channel, channelType, isOpen, onClose, onSave, isSaving }: ChannelConfigModalProps) {
|
||||
const [config, setConfig] = useState<Record<string, string>>({});
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const fields = channelType ? CHANNEL_CONFIG_FIELDS[channelType] || [] : [];
|
||||
|
||||
useEffect(() => {
|
||||
if (channel?.config) {
|
||||
setConfig(channel.config as Record<string, string>);
|
||||
} else {
|
||||
setConfig({});
|
||||
}
|
||||
setError(null);
|
||||
}, [channel, channelType]);
|
||||
|
||||
if (!isOpen || !channelType) return null;
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setError(null);
|
||||
|
||||
// Validate required fields
|
||||
for (const field of fields) {
|
||||
if (field.required && !config[field.key]?.trim()) {
|
||||
setError(`请填写 ${field.label}`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await onSave(config);
|
||||
onClose();
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : '保存失败');
|
||||
}
|
||||
};
|
||||
|
||||
const channelInfo = KNOWN_CHANNELS.find(c => c.type === channelType);
|
||||
|
||||
return (
|
||||
<div className="fixed inset-0 bg-black/50 flex items-center justify-center z-50">
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl shadow-xl w-full max-w-md mx-4">
|
||||
<div className="flex items-center justify-between p-4 border-b border-gray-200 dark:border-gray-700">
|
||||
<h3 className="text-lg font-semibold text-gray-900 dark:text-white">
|
||||
{channel ? `配置 ${channel.label}` : `添加 ${channelInfo?.label || channelType}`}
|
||||
</h3>
|
||||
<button
|
||||
onClick={onClose}
|
||||
className="p-1 hover:bg-gray-100 dark:hover:bg-gray-700 rounded"
|
||||
>
|
||||
<X className="w-5 h-5 text-gray-500" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<form onSubmit={handleSubmit} className="p-4 space-y-4">
|
||||
{channelInfo && (
|
||||
<p className="text-sm text-gray-500 dark:text-gray-400">
|
||||
{channelInfo.description}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{fields.length === 0 ? (
|
||||
<div className="text-center py-8 text-gray-500 dark:text-gray-400">
|
||||
<AlertCircle className="w-8 h-8 mx-auto mb-2 opacity-50" />
|
||||
<p>该通道类型暂不支持通过 UI 配置</p>
|
||||
<p className="text-xs mt-1">请通过配置文件或 CLI 进行配置</p>
|
||||
</div>
|
||||
) : (
|
||||
fields.map((field) => (
|
||||
<div key={field.key}>
|
||||
<label className="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1">
|
||||
{field.label}
|
||||
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||
</label>
|
||||
<input
|
||||
type={field.type}
|
||||
value={config[field.key] || ''}
|
||||
onChange={(e) => setConfig({ ...config, [field.key]: e.target.value })}
|
||||
placeholder={field.placeholder}
|
||||
className="w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||
/>
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
|
||||
{error && (
|
||||
<div className="p-3 bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg text-sm text-red-600 dark:text-red-400">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{fields.length > 0 && (
|
||||
<div className="flex gap-3 pt-2">
|
||||
<button
|
||||
type="button"
|
||||
onClick={onClose}
|
||||
className="flex-1 px-4 py-2 border border-gray-300 dark:border-gray-600 rounded-lg text-gray-700 dark:text-gray-300 hover:bg-gray-50 dark:hover:bg-gray-700"
|
||||
>
|
||||
取消
|
||||
</button>
|
||||
<button
|
||||
type="submit"
|
||||
disabled={isSaving}
|
||||
className="flex-1 px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 disabled:opacity-50 flex items-center justify-center gap-2"
|
||||
>
|
||||
{isSaving ? (
|
||||
<>
|
||||
<RefreshCw className="w-4 h-4 animate-spin" />
|
||||
保存中...
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Check className="w-4 h-4" />
|
||||
保存
|
||||
</>
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export function IMChannels() {
|
||||
const channels = useConfigStore((s) => s.channels);
|
||||
const loadChannels = useConfigStore((s) => s.loadChannels);
|
||||
const createChannel = useConfigStore((s) => s.createChannel);
|
||||
const updateChannel = useConfigStore((s) => s.updateChannel);
|
||||
const connectionState = useConnectionStore((s) => s.connectionState);
|
||||
const loadPluginStatus = useAgentStore((s) => s.loadPluginStatus);
|
||||
|
||||
const [isModalOpen, setIsModalOpen] = useState(false);
|
||||
const [selectedChannel, setSelectedChannel] = useState<ChannelInfo | null>(null);
|
||||
const [newChannelType, setNewChannelType] = useState<string | null>(null);
|
||||
const [isSaving, setIsSaving] = useState(false);
|
||||
const [showAddMenu, setShowAddMenu] = useState(false);
|
||||
|
||||
const connected = connectionState === 'connected';
|
||||
const loading = connectionState === 'connecting' || connectionState === 'reconnecting' || connectionState === 'handshaking';
|
||||
|
||||
@@ -29,20 +216,47 @@ export function IMChannels() {
|
||||
loadPluginStatus().then(() => loadChannels());
|
||||
};
|
||||
|
||||
const knownChannels = [
|
||||
{ id: 'feishu', type: 'feishu', label: '飞书 (Feishu)' },
|
||||
{ id: 'qqbot', type: 'qqbot', label: 'QQ 机器人' },
|
||||
{ id: 'wechat', type: 'wechat', label: '微信' },
|
||||
];
|
||||
const handleConfigure = (channel: ChannelInfo) => {
|
||||
setSelectedChannel(channel);
|
||||
setNewChannelType(channel.type);
|
||||
setIsModalOpen(true);
|
||||
};
|
||||
|
||||
const availableChannels = knownChannels.filter(
|
||||
const handleAddChannel = (type: string) => {
|
||||
setSelectedChannel(null);
|
||||
setNewChannelType(type);
|
||||
setIsModalOpen(true);
|
||||
setShowAddMenu(false);
|
||||
};
|
||||
|
||||
const handleSaveConfig = async (config: Record<string, string>) => {
|
||||
setIsSaving(true);
|
||||
try {
|
||||
if (selectedChannel) {
|
||||
await updateChannel(selectedChannel.id, { config });
|
||||
} else if (newChannelType) {
|
||||
const channelInfo = KNOWN_CHANNELS.find(c => c.type === newChannelType);
|
||||
await createChannel({
|
||||
type: newChannelType,
|
||||
name: channelInfo?.label || newChannelType,
|
||||
config,
|
||||
enabled: true,
|
||||
});
|
||||
}
|
||||
await loadChannels();
|
||||
} finally {
|
||||
setIsSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
const availableChannels = KNOWN_CHANNELS.filter(
|
||||
(channel) => !channels.some((item) => item.type === channel.type)
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="max-w-3xl">
|
||||
<div className="flex justify-between items-center mb-6">
|
||||
<h1 className="text-xl font-bold text-gray-900">IM 频道</h1>
|
||||
<h1 className="text-xl font-bold text-gray-900 dark:text-white">IM 频道</h1>
|
||||
<div className="flex gap-2">
|
||||
<span className="text-xs text-gray-400 flex items-center">
|
||||
{connected ? `${channels.length} 个已识别频道` : loading ? '连接中...' : '未连接 Gateway'}
|
||||
@@ -58,12 +272,12 @@ export function IMChannels() {
|
||||
</div>
|
||||
|
||||
{!connected ? (
|
||||
<div className="bg-white rounded-xl border border-gray-200 h-64 flex flex-col items-center justify-center mb-6 shadow-sm text-gray-400">
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 h-64 flex flex-col items-center justify-center mb-6 shadow-sm text-gray-400">
|
||||
<Radio className="w-8 h-8 mb-3 opacity-40" />
|
||||
<span className="text-sm">连接 Gateway 后查看真实 IM 频道状态</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="bg-white rounded-xl border border-gray-200 mb-6 shadow-sm divide-y divide-gray-100">
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 mb-6 shadow-sm divide-y divide-gray-100 dark:divide-gray-700">
|
||||
{channels.length > 0 ? channels.map((channel) => (
|
||||
<div key={channel.id} className="p-4 flex items-center gap-4">
|
||||
<div className={`w-10 h-10 rounded-xl flex items-center justify-center text-white text-sm font-semibold ${
|
||||
@@ -71,24 +285,30 @@ export function IMChannels() {
|
||||
? 'bg-gradient-to-br from-blue-500 to-indigo-500'
|
||||
: channel.status === 'error'
|
||||
? 'bg-gradient-to-br from-red-500 to-rose-500'
|
||||
: 'bg-gray-300'
|
||||
: 'bg-gray-300 dark:bg-gray-600'
|
||||
}`}>
|
||||
{CHANNEL_ICONS[channel.type] || <MessageCircle className="w-4 h-4" />}
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="text-sm font-medium text-gray-900">{channel.label}</div>
|
||||
<div className="text-sm font-medium text-gray-900 dark:text-white">{channel.label}</div>
|
||||
<div className={`text-xs mt-1 ${
|
||||
channel.status === 'active'
|
||||
? 'text-green-600'
|
||||
? 'text-green-600 dark:text-green-400'
|
||||
: channel.status === 'error'
|
||||
? 'text-red-500'
|
||||
? 'text-red-500 dark:text-red-400'
|
||||
: 'text-gray-400'
|
||||
}`}>
|
||||
{channel.status === 'active' ? '已连接' : channel.status === 'error' ? channel.error || '错误' : '未配置'}
|
||||
{channel.accounts !== undefined && channel.accounts > 0 ? ` · ${channel.accounts} 个账号` : ''}
|
||||
</div>
|
||||
</div>
|
||||
<div className="text-xs text-gray-400">{channel.type}</div>
|
||||
<button
|
||||
onClick={() => handleConfigure(channel)}
|
||||
className="p-2 text-gray-400 hover:text-blue-600 dark:hover:text-blue-400 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg transition-colors"
|
||||
title="配置"
|
||||
>
|
||||
<Settings2 className="w-4 h-4" />
|
||||
</button>
|
||||
</div>
|
||||
)) : (
|
||||
<div className="h-40 flex items-center justify-center text-sm text-gray-400">
|
||||
@@ -98,23 +318,88 @@ export function IMChannels() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Add Channel Section */}
|
||||
{connected && availableChannels.length > 0 && (
|
||||
<div className="mb-6">
|
||||
<div className="flex items-center justify-between mb-3">
|
||||
<div className="text-xs text-gray-500 dark:text-gray-400">添加新频道</div>
|
||||
<div className="relative">
|
||||
<button
|
||||
onClick={() => setShowAddMenu(!showAddMenu)}
|
||||
className="text-xs text-white bg-blue-500 hover:bg-blue-600 px-3 py-1.5 rounded-lg flex items-center gap-1 transition-colors"
|
||||
>
|
||||
<Plus className="w-3 h-3" /> 添加频道
|
||||
</button>
|
||||
{showAddMenu && (
|
||||
<div className="absolute right-0 mt-1 w-48 bg-white dark:bg-gray-800 rounded-lg shadow-lg border border-gray-200 dark:border-gray-700 z-10">
|
||||
{availableChannels.map((channel) => (
|
||||
<button
|
||||
key={channel.type}
|
||||
onClick={() => handleAddChannel(channel.type)}
|
||||
className="w-full px-4 py-2 text-left text-sm hover:bg-gray-100 dark:hover:bg-gray-700 first:rounded-t-lg last:rounded-b-lg flex items-center gap-2"
|
||||
>
|
||||
<span className="w-6 h-6 rounded bg-gray-100 dark:bg-gray-700 flex items-center justify-center text-xs">
|
||||
{CHANNEL_ICONS[channel.type] || '?'}
|
||||
</span>
|
||||
<div>
|
||||
<div className="font-medium text-gray-900 dark:text-white">{channel.label}</div>
|
||||
<div className="text-xs text-gray-500">{channel.description}</div>
|
||||
</div>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Planned Channels */}
|
||||
<div>
|
||||
<div className="text-xs text-gray-500 mb-3">规划中的接入渠道</div>
|
||||
<div className="text-xs text-gray-500 dark:text-gray-400 mb-3">规划中的接入渠道</div>
|
||||
<div className="flex flex-wrap gap-3">
|
||||
{availableChannels.map((channel) => (
|
||||
<span
|
||||
key={channel.id}
|
||||
className="text-xs text-gray-500 bg-gray-100 px-4 py-2 rounded-lg"
|
||||
key={channel.type}
|
||||
className="text-xs text-gray-500 dark:text-gray-400 bg-gray-100 dark:bg-gray-700 px-4 py-2 rounded-lg"
|
||||
>
|
||||
{channel.label}
|
||||
</span>
|
||||
))}
|
||||
<div className="text-xs text-gray-400 flex items-center gap-1">
|
||||
<Settings2 className="w-3 h-3" />
|
||||
当前页面仅展示已识别到的真实频道状态;channel、account、binding 的创建与配置仍需通过 Gateway 或插件侧完成。
|
||||
{availableChannels.length === 0 && (
|
||||
<div className="text-xs text-green-600 dark:text-green-400 flex items-center gap-1">
|
||||
<Check className="w-3 h-3" />
|
||||
所有支持的渠道已配置
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* External Link Notice */}
|
||||
<div className="mt-6 p-4 bg-blue-50 dark:bg-blue-900/20 rounded-lg border border-blue-200 dark:border-blue-800">
|
||||
<div className="flex items-start gap-2">
|
||||
<ExternalLink className="w-4 h-4 text-blue-500 mt-0.5" />
|
||||
<div className="text-xs text-blue-700 dark:text-blue-300">
|
||||
<p className="font-medium mb-1">高级配置</p>
|
||||
<p>账号绑定、消息路由等高级功能需要在 Gateway 配置文件中完成。</p>
|
||||
<p className="mt-1">配置文件路径: <code className="bg-blue-100 dark:bg-blue-800 px-1 rounded">~/.openfang/openfang.toml</code></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Config Modal */}
|
||||
<ChannelConfigModal
|
||||
channel={selectedChannel}
|
||||
channelType={newChannelType}
|
||||
isOpen={isModalOpen}
|
||||
onClose={() => {
|
||||
setIsModalOpen(false);
|
||||
setSelectedChannel(null);
|
||||
setNewChannelType(null);
|
||||
}}
|
||||
onSave={handleSaveConfig}
|
||||
isSaving={isSaving}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
382
desktop/src/components/Settings/SecureStorage.tsx
Normal file
382
desktop/src/components/Settings/SecureStorage.tsx
Normal file
@@ -0,0 +1,382 @@
|
||||
/**
|
||||
* SecureStorage - OS Keyring/Keychain Management UI
|
||||
*
|
||||
* Allows users to view, add, and delete securely stored credentials
|
||||
* using the OS keyring (Windows DPAPI, macOS Keychain, Linux Secret Service).
|
||||
*/
|
||||
import { useState, useEffect } from 'react';
|
||||
import {
|
||||
Key,
|
||||
Plus,
|
||||
Trash2,
|
||||
Eye,
|
||||
EyeOff,
|
||||
RefreshCw,
|
||||
AlertCircle,
|
||||
CheckCircle,
|
||||
Shield,
|
||||
ShieldOff,
|
||||
} from 'lucide-react';
|
||||
import { secureStorage, isSecureStorageAvailable } from '../../lib/secure-storage';
|
||||
|
||||
interface StoredKey {
|
||||
key: string;
|
||||
hasValue: boolean;
|
||||
preview?: string;
|
||||
}
|
||||
|
||||
// Known storage keys used by the application
|
||||
const KNOWN_KEYS = [
|
||||
{ key: 'zclaw_api_key', label: 'API Key', description: 'LLM API 密钥' },
|
||||
{ key: 'zclaw_device_keys_private', label: 'Device Private Key', description: '设备私钥 (Ed25519)' },
|
||||
{ key: 'zclaw_gateway_token', label: 'Gateway Token', description: 'Gateway 认证令牌' },
|
||||
{ key: 'zclaw_feishu_secret', label: '飞书 Secret', description: '飞书应用密钥' },
|
||||
{ key: 'zclaw_discord_token', label: 'Discord Token', description: 'Discord Bot Token' },
|
||||
{ key: 'zclaw_slack_token', label: 'Slack Token', description: 'Slack Bot Token' },
|
||||
{ key: 'zclaw_telegram_token', label: 'Telegram Token', description: 'Telegram Bot Token' },
|
||||
];
|
||||
|
||||
export function SecureStorage() {
|
||||
const [isAvailable, setIsAvailable] = useState<boolean | null>(null);
|
||||
const [storedKeys, setStoredKeys] = useState<StoredKey[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [showAddForm, setShowAddForm] = useState(false);
|
||||
const [newKey, setNewKey] = useState('');
|
||||
const [newValue, setNewValue] = useState('');
|
||||
const [showValue, setShowValue] = useState<Record<string, boolean>>({});
|
||||
const [revealedValues, setRevealedValues] = useState<Record<string, string>>({});
|
||||
const [isSaving, setIsSaving] = useState(false);
|
||||
const [isDeleting, setIsDeleting] = useState<string | null>(null);
|
||||
const [message, setMessage] = useState<{ type: 'success' | 'error'; text: string } | null>(null);
|
||||
|
||||
const loadStoredKeys = async () => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const available = await isSecureStorageAvailable();
|
||||
setIsAvailable(available);
|
||||
|
||||
const keys: StoredKey[] = [];
|
||||
for (const knownKey of KNOWN_KEYS) {
|
||||
const value = await secureStorage.get(knownKey.key);
|
||||
keys.push({
|
||||
key: knownKey.key,
|
||||
hasValue: !!value,
|
||||
preview: value ? `${value.slice(0, 8)}${value.length > 8 ? '...' : ''}` : undefined,
|
||||
});
|
||||
}
|
||||
setStoredKeys(keys);
|
||||
} catch (error) {
|
||||
console.error('Failed to load stored keys:', error);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
loadStoredKeys();
|
||||
}, []);
|
||||
|
||||
const handleReveal = async (key: string) => {
|
||||
if (revealedValues[key]) {
|
||||
// Hide if already revealed
|
||||
setRevealedValues((prev) => {
|
||||
const next = { ...prev };
|
||||
delete next[key];
|
||||
return next;
|
||||
});
|
||||
setShowValue((prev) => ({ ...prev, [key]: false }));
|
||||
} else {
|
||||
// Reveal the value
|
||||
const value = await secureStorage.get(key);
|
||||
if (value) {
|
||||
setRevealedValues((prev) => ({ ...prev, [key]: value }));
|
||||
setShowValue((prev) => ({ ...prev, [key]: true }));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleAddKey = async () => {
|
||||
if (!newKey.trim() || !newValue.trim()) {
|
||||
setMessage({ type: 'error', text: '请填写密钥名称和值' });
|
||||
return;
|
||||
}
|
||||
|
||||
setIsSaving(true);
|
||||
setMessage(null);
|
||||
try {
|
||||
await secureStorage.set(newKey.trim(), newValue.trim());
|
||||
setMessage({ type: 'success', text: '密钥已保存' });
|
||||
setNewKey('');
|
||||
setNewValue('');
|
||||
setShowAddForm(false);
|
||||
await loadStoredKeys();
|
||||
} catch (error) {
|
||||
setMessage({ type: 'error', text: `保存失败: ${error instanceof Error ? error.message : '未知错误'}` });
|
||||
} finally {
|
||||
setIsSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDeleteKey = async (key: string) => {
|
||||
if (!confirm(`确定要删除密钥 "${key}" 吗?此操作无法撤销。`)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setIsDeleting(key);
|
||||
setMessage(null);
|
||||
try {
|
||||
await secureStorage.delete(key);
|
||||
setMessage({ type: 'success', text: '密钥已删除' });
|
||||
setRevealedValues((prev) => {
|
||||
const next = { ...prev };
|
||||
delete next[key];
|
||||
return next;
|
||||
});
|
||||
await loadStoredKeys();
|
||||
} catch (error) {
|
||||
setMessage({ type: 'error', text: `删除失败: ${error instanceof Error ? error.message : '未知错误'}` });
|
||||
} finally {
|
||||
setIsDeleting(null);
|
||||
}
|
||||
};
|
||||
|
||||
const getKeyLabel = (key: string) => {
|
||||
const known = KNOWN_KEYS.find((k) => k.key === key);
|
||||
return known ? known.label : key;
|
||||
};
|
||||
|
||||
const getKeyDescription = (key: string) => {
|
||||
const known = KNOWN_KEYS.find((k) => k.key === key);
|
||||
return known?.description;
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="max-w-3xl">
|
||||
<div className="flex justify-between items-center mb-6">
|
||||
<div>
|
||||
<h1 className="text-xl font-bold text-gray-900 dark:text-white">安全存储</h1>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mt-1">
|
||||
使用系统密钥库 (Keyring/Keychain) 安全存储敏感信息
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex gap-2 items-center">
|
||||
{isAvailable !== null && (
|
||||
<span className={`text-xs flex items-center gap-1 ${isAvailable ? 'text-green-600' : 'text-amber-600'}`}>
|
||||
{isAvailable ? (
|
||||
<>
|
||||
<Shield className="w-3 h-3" /> Keyring 可用
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<ShieldOff className="w-3 h-3" /> 使用加密本地存储
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
<button
|
||||
onClick={loadStoredKeys}
|
||||
disabled={isLoading}
|
||||
className="text-xs text-white bg-orange-500 hover:bg-orange-600 px-3 py-1.5 rounded-lg flex items-center gap-1 transition-colors disabled:opacity-50"
|
||||
>
|
||||
<RefreshCw className={`w-3 h-3 ${isLoading ? 'animate-spin' : ''}`} /> 刷新
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Status Banner */}
|
||||
{isAvailable === false && (
|
||||
<div className="mb-6 p-4 bg-amber-50 dark:bg-amber-900/20 border border-amber-200 dark:border-amber-800 rounded-lg">
|
||||
<div className="flex items-start gap-2">
|
||||
<AlertCircle className="w-4 h-4 text-amber-500 mt-0.5" />
|
||||
<div className="text-xs text-amber-700 dark:text-amber-300">
|
||||
<p className="font-medium">Keyring 不可用</p>
|
||||
<p className="mt-1">
|
||||
系统密钥库不可用,将使用 AES-GCM 加密的本地存储作为后备方案。
|
||||
建议在 Tauri 环境中运行以获得最佳安全性。
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Message */}
|
||||
{message && (
|
||||
<div className={`mb-4 p-3 rounded-lg flex items-center gap-2 ${
|
||||
message.type === 'success'
|
||||
? 'bg-green-50 dark:bg-green-900/20 text-green-700 dark:text-green-300'
|
||||
: 'bg-red-50 dark:bg-red-900/20 text-red-700 dark:text-red-300'
|
||||
}`}>
|
||||
{message.type === 'success' ? (
|
||||
<CheckCircle className="w-4 h-4" />
|
||||
) : (
|
||||
<AlertCircle className="w-4 h-4" />
|
||||
)}
|
||||
{message.text}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Stored Keys List */}
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 mb-6 shadow-sm">
|
||||
{isLoading ? (
|
||||
<div className="h-40 flex items-center justify-center text-sm text-gray-400">
|
||||
<RefreshCw className="w-4 h-4 animate-spin mr-2" />
|
||||
加载中...
|
||||
</div>
|
||||
) : storedKeys.length > 0 ? (
|
||||
<div className="divide-y divide-gray-100 dark:divide-gray-700">
|
||||
{storedKeys.map((item) => (
|
||||
<div key={item.key} className="p-4">
|
||||
<div className="flex items-center gap-4">
|
||||
<div className={`w-10 h-10 rounded-xl flex items-center justify-center ${
|
||||
item.hasValue
|
||||
? 'bg-gradient-to-br from-green-500 to-emerald-500 text-white'
|
||||
: 'bg-gray-200 dark:bg-gray-700 text-gray-400'
|
||||
}`}>
|
||||
<Key className="w-4 h-4" />
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="text-sm font-medium text-gray-900 dark:text-white">
|
||||
{getKeyLabel(item.key)}
|
||||
</div>
|
||||
<div className="text-xs text-gray-500 dark:text-gray-400 mt-0.5">
|
||||
{getKeyDescription(item.key) || item.key}
|
||||
</div>
|
||||
{item.hasValue && (
|
||||
<div className="text-xs text-gray-400 dark:text-gray-500 mt-1 font-mono">
|
||||
{showValue[item.key] ? (
|
||||
<span className="break-all">{revealedValues[item.key]}</span>
|
||||
) : (
|
||||
<span>{item.preview}</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{item.hasValue && (
|
||||
<>
|
||||
<button
|
||||
onClick={() => handleReveal(item.key)}
|
||||
className="p-2 text-gray-400 hover:text-blue-600 dark:hover:text-blue-400 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg transition-colors"
|
||||
title={showValue[item.key] ? '隐藏' : '显示'}
|
||||
>
|
||||
{showValue[item.key] ? (
|
||||
<EyeOff className="w-4 h-4" />
|
||||
) : (
|
||||
<Eye className="w-4 h-4" />
|
||||
)}
|
||||
</button>
|
||||
<button
|
||||
onClick={() => handleDeleteKey(item.key)}
|
||||
disabled={isDeleting === item.key}
|
||||
className="p-2 text-gray-400 hover:text-red-600 dark:hover:text-red-400 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg transition-colors disabled:opacity-50"
|
||||
title="删除"
|
||||
>
|
||||
{isDeleting === item.key ? (
|
||||
<RefreshCw className="w-4 h-4 animate-spin" />
|
||||
) : (
|
||||
<Trash2 className="w-4 h-4" />
|
||||
)}
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{!item.hasValue && (
|
||||
<span className="text-xs text-gray-400 dark:text-gray-500 px-2">未设置</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<div className="h-40 flex items-center justify-center text-sm text-gray-400">
|
||||
暂无存储的密钥
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Add New Key */}
|
||||
<div className="mb-6">
|
||||
{!showAddForm ? (
|
||||
<button
|
||||
onClick={() => setShowAddForm(true)}
|
||||
className="w-full p-4 border-2 border-dashed border-gray-300 dark:border-gray-600 rounded-xl text-gray-500 dark:text-gray-400 hover:border-orange-400 hover:text-orange-500 transition-colors flex items-center justify-center gap-2"
|
||||
>
|
||||
<Plus className="w-4 h-4" />
|
||||
<span className="text-sm">添加新密钥</span>
|
||||
</button>
|
||||
) : (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 p-4 shadow-sm">
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-white mb-4">添加新密钥</h3>
|
||||
<div className="space-y-3">
|
||||
<div>
|
||||
<label className="block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1">
|
||||
密钥名称
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={newKey}
|
||||
onChange={(e) => setNewKey(e.target.value)}
|
||||
placeholder="例如: zclaw_custom_key"
|
||||
className="w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-white text-sm focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1">
|
||||
密钥值
|
||||
</label>
|
||||
<input
|
||||
type="password"
|
||||
value={newValue}
|
||||
onChange={(e) => setNewValue(e.target.value)}
|
||||
placeholder="输入密钥值"
|
||||
className="w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-white text-sm focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex gap-2 pt-2">
|
||||
<button
|
||||
onClick={() => {
|
||||
setShowAddForm(false);
|
||||
setNewKey('');
|
||||
setNewValue('');
|
||||
setMessage(null);
|
||||
}}
|
||||
className="flex-1 px-4 py-2 border border-gray-300 dark:border-gray-600 rounded-lg text-gray-700 dark:text-gray-300 hover:bg-gray-50 dark:hover:bg-gray-700 text-sm"
|
||||
>
|
||||
取消
|
||||
</button>
|
||||
<button
|
||||
onClick={handleAddKey}
|
||||
disabled={isSaving || !newKey.trim() || !newValue.trim()}
|
||||
className="flex-1 px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 disabled:opacity-50 flex items-center justify-center gap-2 text-sm"
|
||||
>
|
||||
{isSaving ? (
|
||||
<>
|
||||
<RefreshCw className="w-3 h-3 animate-spin" />
|
||||
保存中...
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<CheckCircle className="w-3 h-3" />
|
||||
保存
|
||||
</>
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Info Section */}
|
||||
<div className="p-4 bg-gray-50 dark:bg-gray-800/50 rounded-lg border border-gray-200 dark:border-gray-700">
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-white mb-2">关于安全存储</h3>
|
||||
<ul className="text-xs text-gray-500 dark:text-gray-400 space-y-1">
|
||||
<li>• Windows: 使用 DPAPI 加密</li>
|
||||
<li>• macOS: 使用 Keychain 存储</li>
|
||||
<li>• Linux: 使用 Secret Service API (gnome-keyring, kwallet 等)</li>
|
||||
<li>• 后备方案: AES-GCM 加密的 localStorage</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -16,6 +16,8 @@ import {
|
||||
ClipboardList,
|
||||
Clock,
|
||||
Heart,
|
||||
Key,
|
||||
Database,
|
||||
} from 'lucide-react';
|
||||
import { silentErrorHandler } from '../../lib/error-utils';
|
||||
import { General } from './General';
|
||||
@@ -33,6 +35,8 @@ import { SecurityStatus } from '../SecurityStatus';
|
||||
import { SecurityLayersPanel } from '../SecurityLayersPanel';
|
||||
import { TaskList } from '../TaskList';
|
||||
import { HeartbeatConfig } from '../HeartbeatConfig';
|
||||
import { SecureStorage } from './SecureStorage';
|
||||
import { VikingPanel } from '../VikingPanel';
|
||||
|
||||
interface SettingsLayoutProps {
|
||||
onBack: () => void;
|
||||
@@ -49,6 +53,8 @@ type SettingsPage =
|
||||
| 'workspace'
|
||||
| 'privacy'
|
||||
| 'security'
|
||||
| 'storage'
|
||||
| 'viking'
|
||||
| 'audit'
|
||||
| 'tasks'
|
||||
| 'heartbeat'
|
||||
@@ -65,6 +71,8 @@ const menuItems: { id: SettingsPage; label: string; icon: React.ReactNode }[] =
|
||||
{ id: 'im', label: 'IM 频道', icon: <MessageSquare className="w-4 h-4" /> },
|
||||
{ id: 'workspace', label: '工作区', icon: <FolderOpen className="w-4 h-4" /> },
|
||||
{ id: 'privacy', label: '数据与隐私', icon: <Shield className="w-4 h-4" /> },
|
||||
{ id: 'storage', label: '安全存储', icon: <Key className="w-4 h-4" /> },
|
||||
{ id: 'viking', label: '语义记忆', icon: <Database className="w-4 h-4" /> },
|
||||
{ id: 'security', label: '安全状态', icon: <Shield className="w-4 h-4" /> },
|
||||
{ id: 'audit', label: '审计日志', icon: <ClipboardList className="w-4 h-4" /> },
|
||||
{ id: 'tasks', label: '定时任务', icon: <Clock className="w-4 h-4" /> },
|
||||
@@ -88,6 +96,7 @@ export function SettingsLayout({ onBack }: SettingsLayoutProps) {
|
||||
case 'im': return <IMChannels />;
|
||||
case 'workspace': return <Workspace />;
|
||||
case 'privacy': return <Privacy />;
|
||||
case 'storage': return <SecureStorage />;
|
||||
case 'security': return (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
@@ -121,6 +130,7 @@ export function SettingsLayout({ onBack }: SettingsLayoutProps) {
|
||||
<HeartbeatConfig />
|
||||
</div>
|
||||
);
|
||||
case 'viking': return <VikingPanel />;
|
||||
case 'feedback': return <Feedback />;
|
||||
case 'about': return <About />;
|
||||
default: return <General />;
|
||||
|
||||
288
desktop/src/components/VikingPanel.tsx
Normal file
288
desktop/src/components/VikingPanel.tsx
Normal file
@@ -0,0 +1,288 @@
|
||||
/**
|
||||
* VikingPanel - OpenViking Semantic Memory UI
|
||||
*
|
||||
* Provides interface for semantic search and knowledge base management.
|
||||
* OpenViking is an optional sidecar for semantic memory operations.
|
||||
*/
|
||||
import { useState, useEffect } from 'react';
|
||||
import {
|
||||
Search,
|
||||
RefreshCw,
|
||||
AlertCircle,
|
||||
CheckCircle,
|
||||
FileText,
|
||||
Server,
|
||||
Play,
|
||||
Square,
|
||||
} from 'lucide-react';
|
||||
import {
|
||||
getVikingStatus,
|
||||
findVikingResources,
|
||||
getVikingServerStatus,
|
||||
startVikingServer,
|
||||
stopVikingServer,
|
||||
} from '../lib/viking-client';
|
||||
import type { VikingStatus, VikingFindResult } from '../lib/viking-client';
|
||||
|
||||
export function VikingPanel() {
|
||||
const [status, setStatus] = useState<VikingStatus | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
const [searchResults, setSearchResults] = useState<VikingFindResult[]>([]);
|
||||
const [isSearching, setIsSearching] = useState(false);
|
||||
const [serverRunning, setServerRunning] = useState(false);
|
||||
const [message, setMessage] = useState<{ type: 'success' | 'error'; text: string } | null>(null);
|
||||
|
||||
const loadStatus = async () => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const vikingStatus = await getVikingStatus();
|
||||
setStatus(vikingStatus);
|
||||
|
||||
const serverStatus = await getVikingServerStatus();
|
||||
setServerRunning(serverStatus.running);
|
||||
} catch (error) {
|
||||
console.error('Failed to load Viking status:', error);
|
||||
setStatus({ available: false, error: String(error) });
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
loadStatus();
|
||||
}, []);
|
||||
|
||||
const handleSearch = async () => {
|
||||
if (!searchQuery.trim()) return;
|
||||
|
||||
setIsSearching(true);
|
||||
setMessage(null);
|
||||
try {
|
||||
const results = await findVikingResources(searchQuery, undefined, 10);
|
||||
setSearchResults(results);
|
||||
if (results.length === 0) {
|
||||
setMessage({ type: 'error', text: '未找到匹配的资源' });
|
||||
}
|
||||
} catch (error) {
|
||||
setMessage({
|
||||
type: 'error',
|
||||
text: `搜索失败: ${error instanceof Error ? error.message : '未知错误'}`,
|
||||
});
|
||||
} finally {
|
||||
setIsSearching(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleServerToggle = async () => {
|
||||
try {
|
||||
if (serverRunning) {
|
||||
await stopVikingServer();
|
||||
setServerRunning(false);
|
||||
setMessage({ type: 'success', text: '服务器已停止' });
|
||||
} else {
|
||||
await startVikingServer();
|
||||
setServerRunning(true);
|
||||
setMessage({ type: 'success', text: '服务器已启动' });
|
||||
}
|
||||
} catch (error) {
|
||||
setMessage({
|
||||
type: 'error',
|
||||
text: `操作失败: ${error instanceof Error ? error.message : '未知错误'}`,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="max-w-4xl">
|
||||
{/* Header */}
|
||||
<div className="flex justify-between items-center mb-6">
|
||||
<div>
|
||||
<h1 className="text-xl font-bold text-gray-900 dark:text-white">语义记忆</h1>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mt-1">
|
||||
OpenViking 语义搜索引擎
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex gap-2 items-center">
|
||||
{status?.available && (
|
||||
<span className="text-xs flex items-center gap-1 text-green-600">
|
||||
<CheckCircle className="w-3 h-3" /> 可用
|
||||
</span>
|
||||
)}
|
||||
<button
|
||||
onClick={loadStatus}
|
||||
disabled={isLoading}
|
||||
className="text-xs text-white bg-orange-500 hover:bg-orange-600 px-3 py-1.5 rounded-lg flex items-center gap-1 transition-colors disabled:opacity-50"
|
||||
>
|
||||
<RefreshCw className={`w-3 h-3 ${isLoading ? 'animate-spin' : ''}`} /> 刷新
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Status Banner */}
|
||||
{!status?.available && (
|
||||
<div className="mb-6 p-4 bg-amber-50 dark:bg-amber-900/20 border border-amber-200 dark:border-amber-800 rounded-lg">
|
||||
<div className="flex items-start gap-2">
|
||||
<AlertCircle className="w-4 h-4 text-amber-500 mt-0.5" />
|
||||
<div className="text-xs text-amber-700 dark:text-amber-300">
|
||||
<p className="font-medium">OpenViking CLI 不可用</p>
|
||||
<p className="mt-1">
|
||||
请安装 OpenViking CLI 或设置{' '}
|
||||
<code className="bg-amber-100 dark:bg-amber-800 px-1 rounded">ZCLAW_VIKING_BIN</code> 环境变量。
|
||||
</p>
|
||||
{status?.error && (
|
||||
<p className="mt-1 text-amber-600 dark:text-amber-400 font-mono text-xs">
|
||||
{status.error}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Message */}
|
||||
{message && (
|
||||
<div
|
||||
className={`mb-4 p-3 rounded-lg flex items-center gap-2 ${
|
||||
message.type === 'success'
|
||||
? 'bg-green-50 dark:bg-green-900/20 text-green-700 dark:text-green-300'
|
||||
: 'bg-red-50 dark:bg-red-900/20 text-red-700 dark:text-red-300'
|
||||
}`}
|
||||
>
|
||||
{message.type === 'success' ? (
|
||||
<CheckCircle className="w-4 h-4" />
|
||||
) : (
|
||||
<AlertCircle className="w-4 h-4" />
|
||||
)}
|
||||
{message.text}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Server Control */}
|
||||
{status?.available && (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 p-4 mb-6 shadow-sm">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-3">
|
||||
<div
|
||||
className={`w-10 h-10 rounded-xl flex items-center justify-center ${
|
||||
serverRunning
|
||||
? 'bg-gradient-to-br from-green-500 to-emerald-500 text-white'
|
||||
: 'bg-gray-200 dark:bg-gray-700 text-gray-400'
|
||||
}`}
|
||||
>
|
||||
<Server className="w-4 h-4" />
|
||||
</div>
|
||||
<div>
|
||||
<div className="text-sm font-medium text-gray-900 dark:text-white">
|
||||
Viking Server
|
||||
</div>
|
||||
<div className="text-xs text-gray-500 dark:text-gray-400">
|
||||
{serverRunning ? '运行中' : '已停止'}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<button
|
||||
onClick={handleServerToggle}
|
||||
className={`px-4 py-2 rounded-lg flex items-center gap-2 text-sm transition-colors ${
|
||||
serverRunning
|
||||
? 'bg-red-100 text-red-600 hover:bg-red-200 dark:bg-red-900/30 dark:text-red-400'
|
||||
: 'bg-green-100 text-green-600 hover:bg-green-200 dark:bg-green-900/30 dark:text-green-400'
|
||||
}`}
|
||||
>
|
||||
{serverRunning ? (
|
||||
<>
|
||||
<Square className="w-4 h-4" /> 停止
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Play className="w-4 h-4" /> 启动
|
||||
</>
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Search Box */}
|
||||
{status?.available && (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 p-4 mb-6 shadow-sm">
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-white mb-3">语义搜索</h3>
|
||||
<div className="flex gap-2">
|
||||
<input
|
||||
type="text"
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
onKeyDown={(e) => e.key === 'Enter' && handleSearch()}
|
||||
placeholder="输入自然语言查询..."
|
||||
className="flex-1 px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-white text-sm focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||
/>
|
||||
<button
|
||||
onClick={handleSearch}
|
||||
disabled={isSearching || !searchQuery.trim()}
|
||||
className="px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 disabled:opacity-50 flex items-center gap-2 text-sm"
|
||||
>
|
||||
{isSearching ? (
|
||||
<RefreshCw className="w-4 h-4 animate-spin" />
|
||||
) : (
|
||||
<Search className="w-4 h-4" />
|
||||
)}
|
||||
搜索
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Search Results */}
|
||||
{searchResults.length > 0 && (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 shadow-sm divide-y divide-gray-100 dark:divide-gray-700">
|
||||
<div className="p-3 border-b border-gray-200 dark:border-gray-700">
|
||||
<span className="text-xs text-gray-500">
|
||||
找到 {searchResults.length} 个结果
|
||||
</span>
|
||||
</div>
|
||||
{searchResults.map((result, index) => (
|
||||
<div key={`${result.uri}-${index}`} className="p-4">
|
||||
<div className="flex items-start gap-3">
|
||||
<div className="w-8 h-8 rounded-lg bg-blue-100 dark:bg-blue-900/30 flex items-center justify-center flex-shrink-0">
|
||||
<FileText className="w-4 h-4 text-blue-600 dark:text-blue-400" />
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-sm font-medium text-gray-900 dark:text-white truncate">
|
||||
{result.uri}
|
||||
</span>
|
||||
<span className="text-xs text-gray-400 bg-gray-100 dark:bg-gray-700 px-2 py-0.5 rounded">
|
||||
{result.level}
|
||||
</span>
|
||||
<span className="text-xs text-blue-600 dark:text-blue-400">
|
||||
{Math.round(result.score * 100)}%
|
||||
</span>
|
||||
</div>
|
||||
{result.overview && (
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mt-1 line-clamp-2">
|
||||
{result.overview}
|
||||
</p>
|
||||
)}
|
||||
<p className="text-xs text-gray-600 dark:text-gray-300 mt-2 line-clamp-3 font-mono">
|
||||
{result.content}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Info Section */}
|
||||
<div className="mt-6 p-4 bg-gray-50 dark:bg-gray-800/50 rounded-lg border border-gray-200 dark:border-gray-700">
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-white mb-2">关于 OpenViking</h3>
|
||||
<ul className="text-xs text-gray-500 dark:text-gray-400 space-y-1">
|
||||
<li>• 语义搜索引擎,支持自然语言查询</li>
|
||||
<li>• 自动提取和索引知识资源</li>
|
||||
<li>• 支持多种文档格式和代码文件</li>
|
||||
<li>• 可作为本地知识库增强 AI 对话</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -4,7 +4,7 @@
|
||||
* Draggable palette of available node types.
|
||||
*/
|
||||
|
||||
import React, { DragEvent } from 'react';
|
||||
import { DragEvent } from 'react';
|
||||
import type { NodePaletteItem, NodeCategory } from '../../lib/workflow-builder/types';
|
||||
|
||||
interface NodePaletteProps {
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* Panel for editing node properties.
|
||||
*/
|
||||
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { useState, useEffect } from 'react';
|
||||
import type { WorkflowNodeData } from '../../lib/workflow-builder/types';
|
||||
|
||||
interface PropertyPanelProps {
|
||||
@@ -16,7 +16,6 @@ interface PropertyPanelProps {
|
||||
}
|
||||
|
||||
export function PropertyPanel({
|
||||
nodeId,
|
||||
nodeData,
|
||||
onUpdate,
|
||||
onDelete,
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* Pipeline DSL configurations.
|
||||
*/
|
||||
|
||||
import React, { useCallback, useRef, useEffect } from 'react';
|
||||
import { useCallback, useRef, useEffect } from 'react';
|
||||
import {
|
||||
ReactFlow,
|
||||
Controls,
|
||||
@@ -17,17 +17,17 @@ import {
|
||||
useNodesState,
|
||||
useEdgesState,
|
||||
Node,
|
||||
NodeChange,
|
||||
EdgeChange,
|
||||
Edge,
|
||||
NodeTypes,
|
||||
Panel,
|
||||
ReactFlowProvider,
|
||||
useReactFlow,
|
||||
} from '@xyflow/react';
|
||||
import '@xyflow/react/dist/style.css';
|
||||
|
||||
import { useWorkflowBuilderStore, nodePaletteItems, paletteCategories } from '../../store/workflowBuilderStore';
|
||||
import type { WorkflowNodeType, WorkflowNodeData } from '../../lib/workflow-builder/types';
|
||||
import { validateCanvas } from '../../lib/workflow-builder/yaml-converter';
|
||||
import { useWorkflowBuilderStore, paletteCategories } from '../../store/workflowBuilderStore';
|
||||
import type { WorkflowNodeData, WorkflowNodeType } from '../../lib/workflow-builder/types';
|
||||
|
||||
// Import custom node components
|
||||
import { InputNode } from './nodes/InputNode';
|
||||
@@ -66,7 +66,7 @@ const nodeTypes: NodeTypes = {
|
||||
|
||||
export function WorkflowBuilderInternal() {
|
||||
const reactFlowWrapper = useRef<HTMLDivElement>(null);
|
||||
const { screenToFlowPosition, fitView } = useReactFlow();
|
||||
const { screenToFlowPosition } = useReactFlow();
|
||||
|
||||
const {
|
||||
canvas,
|
||||
@@ -84,8 +84,8 @@ export function WorkflowBuilderInternal() {
|
||||
} = useWorkflowBuilderStore();
|
||||
|
||||
// Local state for React Flow
|
||||
const [nodes, setNodes, onNodesChange] = useNodesState([]);
|
||||
const [edges, setEdges, onEdgesChange] = useEdgesState([]);
|
||||
const [nodes, setNodes, onNodesChange] = useNodesState<Node<WorkflowNodeData>>([]);
|
||||
const [edges, setEdges, onEdgesChange] = useEdgesState<Edge>([]);
|
||||
|
||||
// Sync canvas state with React Flow
|
||||
useEffect(() => {
|
||||
@@ -94,7 +94,7 @@ export function WorkflowBuilderInternal() {
|
||||
id: n.id,
|
||||
type: n.type,
|
||||
position: n.position,
|
||||
data: n.data,
|
||||
data: n.data as WorkflowNodeData,
|
||||
})));
|
||||
setEdges(canvas.edges.map(e => ({
|
||||
id: e.id,
|
||||
@@ -111,7 +111,7 @@ export function WorkflowBuilderInternal() {
|
||||
|
||||
// Handle node changes (position, selection)
|
||||
const handleNodesChange = useCallback(
|
||||
(changes) => {
|
||||
(changes: NodeChange<Node<WorkflowNodeData>>[]) => {
|
||||
onNodesChange(changes);
|
||||
|
||||
// Sync position changes back to store
|
||||
@@ -132,7 +132,7 @@ export function WorkflowBuilderInternal() {
|
||||
|
||||
// Handle edge changes
|
||||
const handleEdgesChange = useCallback(
|
||||
(changes) => {
|
||||
(changes: EdgeChange[]) => {
|
||||
onEdgesChange(changes);
|
||||
},
|
||||
[onEdgesChange]
|
||||
@@ -235,7 +235,7 @@ export function WorkflowBuilderInternal() {
|
||||
{/* Node Palette */}
|
||||
<NodePalette
|
||||
categories={paletteCategories}
|
||||
onDragStart={(type) => {
|
||||
onDragStart={() => {
|
||||
setDragging(true);
|
||||
}}
|
||||
onDragEnd={() => {
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* Toolbar with actions for the workflow builder.
|
||||
*/
|
||||
|
||||
import React, { useState } from 'react';
|
||||
import { useState } from 'react';
|
||||
import type { ValidationResult } from '../../lib/workflow-builder/types';
|
||||
import { canvasToYaml } from '../../lib/workflow-builder/yaml-converter';
|
||||
import { useWorkflowBuilderStore } from '../../store/workflowBuilderStore';
|
||||
|
||||
@@ -4,11 +4,13 @@
|
||||
* Node for conditional branching.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import { memo } from 'react';
|
||||
import { Handle, Position, NodeProps, Node } from '@xyflow/react';
|
||||
import type { ConditionNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const ConditionNode = memo(({ data, selected }: NodeProps<ConditionNodeData>) => {
|
||||
type ConditionNodeType = Node<ConditionNodeData>;
|
||||
|
||||
export const ConditionNode = memo(({ data, selected }: NodeProps<ConditionNodeType>) => {
|
||||
const branchCount = data.branches.length + (data.hasDefault ? 1 : 0);
|
||||
|
||||
return (
|
||||
@@ -39,7 +41,7 @@ export const ConditionNode = memo(({ data, selected }: NodeProps<ConditionNodeDa
|
||||
|
||||
{/* Branches */}
|
||||
<div className="space-y-1">
|
||||
{data.branches.map((branch, index) => (
|
||||
{data.branches.map((branch: { label?: string; when: string }, index: number) => (
|
||||
<div key={index} className="flex items-center justify-between">
|
||||
<div className="relative">
|
||||
{/* Branch Output Handle */}
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
* Node for exporting workflow results to various formats.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import { memo } from 'react';
|
||||
import { Handle, Position, NodeProps, Node } from '@xyflow/react';
|
||||
import type { ExportNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const ExportNode = memo(({ data, selected }: NodeProps<ExportNodeData>) => {
|
||||
export const ExportNode = memo(({ data, selected }: NodeProps<Node<ExportNodeData>>) => {
|
||||
const formatLabels: Record<string, string> = {
|
||||
pptx: 'PowerPoint',
|
||||
html: 'HTML',
|
||||
|
||||
@@ -4,11 +4,13 @@
|
||||
* Node for executing hand actions.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import { memo } from 'react';
|
||||
import { Handle, Position, NodeProps, Node } from '@xyflow/react';
|
||||
import type { HandNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const HandNode = memo(({ data, selected }: NodeProps<HandNodeData>) => {
|
||||
type HandNodeType = Node<HandNodeData>;
|
||||
|
||||
export const HandNode = memo(({ data, selected }: NodeProps<HandNodeType>) => {
|
||||
const hasHand = Boolean(data.handId);
|
||||
const hasAction = Boolean(data.action);
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
* Node for making HTTP requests.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import { memo } from 'react';
|
||||
import { Handle, Position, NodeProps, Node } from '@xyflow/react';
|
||||
import type { HttpNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
const methodColors: Record<string, string> = {
|
||||
@@ -16,7 +16,7 @@ const methodColors: Record<string, string> = {
|
||||
PATCH: 'bg-purple-100 text-purple-700',
|
||||
};
|
||||
|
||||
export const HttpNode = memo(({ data, selected }: NodeProps<HttpNodeData>) => {
|
||||
export const HttpNode = memo(({ data, selected }: NodeProps<Node<HttpNodeData>>) => {
|
||||
const hasUrl = Boolean(data.url);
|
||||
|
||||
return (
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
* Node for defining workflow input variables.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import { memo } from 'react';
|
||||
import { Handle, Position, NodeProps, Node } from '@xyflow/react';
|
||||
import type { InputNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const InputNode = memo(({ data, selected }: NodeProps<InputNodeData>) => {
|
||||
export const InputNode = memo(({ data, selected }: NodeProps<Node<InputNodeData>>) => {
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
* Node for LLM generation actions.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import { memo } from 'react';
|
||||
import { Handle, Position, NodeProps, Node } from '@xyflow/react';
|
||||
import type { LlmNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const LlmNode = memo(({ data, selected }: NodeProps<LlmNodeData>) => {
|
||||
export const LlmNode = memo(({ data, selected }: NodeProps<Node<LlmNodeData>>) => {
|
||||
const templatePreview = data.template.length > 50
|
||||
? data.template.slice(0, 50) + '...'
|
||||
: data.template || 'No template';
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
* Node for executing skill orchestration graphs (DAGs).
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import { memo } from 'react';
|
||||
import { Handle, Position, NodeProps, Node } from '@xyflow/react';
|
||||
import type { OrchestrationNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const OrchestrationNode = memo(({ data, selected }: NodeProps<OrchestrationNodeData>) => {
|
||||
export const OrchestrationNode = memo(({ data, selected }: NodeProps<Node<OrchestrationNodeData>>) => {
|
||||
const hasGraphId = Boolean(data.graphId);
|
||||
const hasGraph = Boolean(data.graph);
|
||||
const inputCount = Object.keys(data.inputMappings).length;
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
* Node for parallel execution of steps.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import { memo } from 'react';
|
||||
import { Handle, Position, NodeProps, Node } from '@xyflow/react';
|
||||
import type { ParallelNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const ParallelNode = memo(({ data, selected }: NodeProps<ParallelNodeData>) => {
|
||||
export const ParallelNode = memo(({ data, selected }: NodeProps<Node<ParallelNodeData>>) => {
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
* Node for executing skills.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import { memo } from 'react';
|
||||
import { Handle, Position, NodeProps, Node } from '@xyflow/react';
|
||||
import type { SkillNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const SkillNode = memo(({ data, selected }: NodeProps<SkillNodeData>) => {
|
||||
export const SkillNode = memo(({ data, selected }: NodeProps<Node<SkillNodeData>>) => {
|
||||
const hasSkill = Boolean(data.skillId);
|
||||
|
||||
return (
|
||||
|
||||
340
desktop/src/components/WorkflowRecommendations.tsx
Normal file
340
desktop/src/components/WorkflowRecommendations.tsx
Normal file
@@ -0,0 +1,340 @@
|
||||
/**
|
||||
* Workflow Recommendations Component
|
||||
*
|
||||
* Displays proactive workflow recommendations from the Adaptive Intelligence Mesh.
|
||||
* Shows detected patterns and suggested workflows based on user behavior.
|
||||
*/
|
||||
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { motion, AnimatePresence } from 'framer-motion';
|
||||
import { useMeshStore } from '../store/meshStore';
|
||||
import type { WorkflowRecommendation, BehaviorPattern, PatternTypeVariant } from '../lib/intelligence-client';
|
||||
|
||||
// === Main Component ===
|
||||
|
||||
export const WorkflowRecommendations: React.FC = () => {
|
||||
const {
|
||||
recommendations,
|
||||
patterns,
|
||||
isLoading,
|
||||
error,
|
||||
analyze,
|
||||
acceptRecommendation,
|
||||
dismissRecommendation,
|
||||
} = useMeshStore();
|
||||
|
||||
const [selectedPattern, setSelectedPattern] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
// Initial analysis
|
||||
analyze();
|
||||
}, [analyze]);
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex items-center justify-center p-8">
|
||||
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-500" />
|
||||
<span className="ml-3 text-gray-400">Analyzing patterns...</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className="p-4 bg-red-500/10 border border-red-500/20 rounded-lg">
|
||||
<p className="text-red-400 text-sm">{error}</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
{/* Recommendations Section */}
|
||||
<section>
|
||||
<h3 className="text-lg font-semibold text-white mb-4 flex items-center gap-2">
|
||||
<span className="text-2xl">💡</span>
|
||||
Recommended Workflows
|
||||
{recommendations.length > 0 && (
|
||||
<span className="ml-2 px-2 py-0.5 bg-blue-500/20 text-blue-400 text-xs rounded-full">
|
||||
{recommendations.length}
|
||||
</span>
|
||||
)}
|
||||
</h3>
|
||||
|
||||
<AnimatePresence mode="popLayout">
|
||||
{recommendations.length === 0 ? (
|
||||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
className="p-6 bg-gray-800/30 rounded-lg border border-gray-700/50 text-center"
|
||||
>
|
||||
<p className="text-gray-400">No recommendations available yet.</p>
|
||||
<p className="text-gray-500 text-sm mt-2">
|
||||
Continue using the app to build up behavior patterns.
|
||||
</p>
|
||||
</motion.div>
|
||||
) : (
|
||||
<div className="space-y-3">
|
||||
{recommendations.map((rec) => (
|
||||
<RecommendationCard
|
||||
key={rec.id}
|
||||
recommendation={rec}
|
||||
onAccept={() => acceptRecommendation(rec.id)}
|
||||
onDismiss={() => dismissRecommendation(rec.id)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</section>
|
||||
|
||||
{/* Detected Patterns Section */}
|
||||
<section>
|
||||
<h3 className="text-lg font-semibold text-white mb-4 flex items-center gap-2">
|
||||
<span className="text-2xl">📊</span>
|
||||
Detected Patterns
|
||||
{patterns.length > 0 && (
|
||||
<span className="ml-2 px-2 py-0.5 bg-purple-500/20 text-purple-400 text-xs rounded-full">
|
||||
{patterns.length}
|
||||
</span>
|
||||
)}
|
||||
</h3>
|
||||
|
||||
{patterns.length === 0 ? (
|
||||
<div className="p-6 bg-gray-800/30 rounded-lg border border-gray-700/50 text-center">
|
||||
<p className="text-gray-400">No patterns detected yet.</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="grid gap-3">
|
||||
{patterns.map((pattern) => (
|
||||
<PatternCard
|
||||
key={pattern.id}
|
||||
pattern={pattern}
|
||||
isSelected={selectedPattern === pattern.id}
|
||||
onClick={() =>
|
||||
setSelectedPattern(
|
||||
selectedPattern === pattern.id ? null : pattern.id
|
||||
)
|
||||
}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</section>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
// === Sub-Components ===
|
||||
|
||||
interface RecommendationCardProps {
|
||||
recommendation: WorkflowRecommendation;
|
||||
onAccept: () => void;
|
||||
onDismiss: () => void;
|
||||
}
|
||||
|
||||
const RecommendationCard: React.FC<RecommendationCardProps> = ({
|
||||
recommendation,
|
||||
onAccept,
|
||||
onDismiss,
|
||||
}) => {
|
||||
const confidencePercent = Math.round(recommendation.confidence * 100);
|
||||
|
||||
const getConfidenceColor = (confidence: number) => {
|
||||
if (confidence >= 0.8) return 'text-green-400';
|
||||
if (confidence >= 0.6) return 'text-yellow-400';
|
||||
return 'text-orange-400';
|
||||
};
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
layout
|
||||
initial={{ opacity: 0, y: -10 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
exit={{ opacity: 0, scale: 0.95 }}
|
||||
className="p-4 bg-gray-800/50 rounded-lg border border-gray-700/50 hover:border-blue-500/30 transition-colors"
|
||||
>
|
||||
<div className="flex items-start justify-between gap-4">
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<h4 className="text-white font-medium truncate">
|
||||
{recommendation.pipeline_id}
|
||||
</h4>
|
||||
<span
|
||||
className={`text-xs font-mono ${getConfidenceColor(
|
||||
recommendation.confidence
|
||||
)}`}
|
||||
>
|
||||
{confidencePercent}%
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<p className="text-gray-400 text-sm mb-3">{recommendation.reason}</p>
|
||||
|
||||
{/* Suggested Inputs */}
|
||||
{Object.keys(recommendation.suggested_inputs).length > 0 && (
|
||||
<div className="mb-3">
|
||||
<p className="text-xs text-gray-500 mb-1">Suggested inputs:</p>
|
||||
<div className="flex flex-wrap gap-1">
|
||||
{Object.entries(recommendation.suggested_inputs).map(
|
||||
([key, value]) => (
|
||||
<span
|
||||
key={key}
|
||||
className="px-2 py-0.5 bg-gray-700/50 text-gray-300 text-xs rounded"
|
||||
>
|
||||
{key}: {String(value).slice(0, 20)}
|
||||
</span>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Matched Patterns */}
|
||||
{recommendation.patterns_matched.length > 0 && (
|
||||
<div className="text-xs text-gray-500">
|
||||
Based on {recommendation.patterns_matched.length} pattern(s)
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Actions */}
|
||||
<div className="flex gap-2 shrink-0">
|
||||
<button
|
||||
onClick={onAccept}
|
||||
className="px-3 py-1.5 bg-blue-500 hover:bg-blue-600 text-white text-sm rounded transition-colors"
|
||||
>
|
||||
Accept
|
||||
</button>
|
||||
<button
|
||||
onClick={onDismiss}
|
||||
className="px-3 py-1.5 bg-gray-700 hover:bg-gray-600 text-gray-300 text-sm rounded transition-colors"
|
||||
>
|
||||
Dismiss
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Confidence Bar */}
|
||||
<div className="mt-3 h-1 bg-gray-700 rounded-full overflow-hidden">
|
||||
<motion.div
|
||||
initial={{ width: 0 }}
|
||||
animate={{ width: `${confidencePercent}%` }}
|
||||
className={`h-full ${
|
||||
recommendation.confidence >= 0.8
|
||||
? 'bg-green-500'
|
||||
: recommendation.confidence >= 0.6
|
||||
? 'bg-yellow-500'
|
||||
: 'bg-orange-500'
|
||||
}`}
|
||||
/>
|
||||
</div>
|
||||
</motion.div>
|
||||
);
|
||||
};
|
||||
|
||||
interface PatternCardProps {
|
||||
pattern: BehaviorPattern;
|
||||
isSelected: boolean;
|
||||
onClick: () => void;
|
||||
}
|
||||
|
||||
const PatternCard: React.FC<PatternCardProps> = ({
|
||||
pattern,
|
||||
isSelected,
|
||||
onClick,
|
||||
}) => {
|
||||
const getPatternTypeLabel = (type: PatternTypeVariant | string) => {
|
||||
// Handle object format
|
||||
const typeStr = typeof type === 'string' ? type : type.type;
|
||||
|
||||
switch (typeStr) {
|
||||
case 'SkillCombination':
|
||||
return { label: 'Skill Combo', icon: '⚡' };
|
||||
case 'TemporalTrigger':
|
||||
return { label: 'Time Trigger', icon: '⏰' };
|
||||
case 'TaskPipelineMapping':
|
||||
return { label: 'Task Mapping', icon: '🔄' };
|
||||
case 'InputPattern':
|
||||
return { label: 'Input Pattern', icon: '📝' };
|
||||
default:
|
||||
return { label: typeStr, icon: '📊' };
|
||||
}
|
||||
};
|
||||
|
||||
const { label, icon } = getPatternTypeLabel(pattern.pattern_type as PatternTypeVariant);
|
||||
const confidencePercent = Math.round(pattern.confidence * 100);
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
layout
|
||||
onClick={onClick}
|
||||
className={`p-3 rounded-lg border cursor-pointer transition-colors ${
|
||||
isSelected
|
||||
? 'bg-purple-500/10 border-purple-500/50'
|
||||
: 'bg-gray-800/30 border-gray-700/50 hover:border-gray-600'
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-lg">{icon}</span>
|
||||
<span className="text-white font-medium">{label}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-xs text-gray-400">
|
||||
{pattern.frequency}x used
|
||||
</span>
|
||||
<span
|
||||
className={`text-xs font-mono ${
|
||||
pattern.confidence >= 0.6
|
||||
? 'text-green-400'
|
||||
: 'text-yellow-400'
|
||||
}`}
|
||||
>
|
||||
{confidencePercent}%
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<AnimatePresence>
|
||||
{isSelected && (
|
||||
<motion.div
|
||||
initial={{ height: 0, opacity: 0 }}
|
||||
animate={{ height: 'auto', opacity: 1 }}
|
||||
exit={{ height: 0, opacity: 0 }}
|
||||
className="mt-3 pt-3 border-t border-gray-700/50 overflow-hidden"
|
||||
>
|
||||
<div className="space-y-2 text-sm">
|
||||
<div>
|
||||
<span className="text-gray-500">ID:</span>{' '}
|
||||
<span className="text-gray-300 font-mono text-xs">
|
||||
{pattern.id}
|
||||
</span>
|
||||
</div>
|
||||
<div>
|
||||
<span className="text-gray-500">First seen:</span>{' '}
|
||||
<span className="text-gray-300">
|
||||
{new Date(pattern.first_occurrence).toLocaleDateString()}
|
||||
</span>
|
||||
</div>
|
||||
<div>
|
||||
<span className="text-gray-500">Last seen:</span>{' '}
|
||||
<span className="text-gray-300">
|
||||
{new Date(pattern.last_occurrence).toLocaleDateString()}
|
||||
</span>
|
||||
</div>
|
||||
{pattern.context.intent && (
|
||||
<div>
|
||||
<span className="text-gray-500">Intent:</span>{' '}
|
||||
<span className="text-gray-300">{pattern.context.intent}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</motion.div>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</motion.div>
|
||||
);
|
||||
};
|
||||
|
||||
export default WorkflowRecommendations;
|
||||
@@ -128,8 +128,142 @@ export type {
|
||||
IdentityFiles,
|
||||
IdentityChangeProposal,
|
||||
IdentitySnapshot,
|
||||
MemoryEntryForAnalysis,
|
||||
} from './intelligence-backend';
|
||||
|
||||
// === Mesh Types ===
|
||||
|
||||
export interface BehaviorPattern {
|
||||
id: string;
|
||||
pattern_type: PatternTypeVariant;
|
||||
frequency: number;
|
||||
last_occurrence: string;
|
||||
first_occurrence: string;
|
||||
confidence: number;
|
||||
context: PatternContext;
|
||||
}
|
||||
|
||||
export function getPatternTypeString(patternType: PatternTypeVariant): string {
|
||||
if (typeof patternType === 'string') {
|
||||
return patternType;
|
||||
}
|
||||
return patternType.type;
|
||||
}
|
||||
|
||||
export type PatternTypeVariant =
|
||||
| { type: 'SkillCombination'; skill_ids: string[] }
|
||||
| { type: 'TemporalTrigger'; hand_id: string; time_pattern: string }
|
||||
| { type: 'TaskPipelineMapping'; task_type: string; pipeline_id: string }
|
||||
| { type: 'InputPattern'; keywords: string[]; intent: string };
|
||||
|
||||
export interface PatternContext {
|
||||
skill_ids?: string[];
|
||||
recent_topics?: string[];
|
||||
intent?: string;
|
||||
time_of_day?: number;
|
||||
day_of_week?: number;
|
||||
}
|
||||
|
||||
export interface WorkflowRecommendation {
|
||||
id: string;
|
||||
pipeline_id: string;
|
||||
confidence: number;
|
||||
reason: string;
|
||||
suggested_inputs: Record<string, unknown>;
|
||||
patterns_matched: string[];
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface MeshConfig {
|
||||
enabled: boolean;
|
||||
min_confidence: number;
|
||||
max_recommendations: number;
|
||||
analysis_window_hours: number;
|
||||
}
|
||||
|
||||
export interface MeshAnalysisResult {
|
||||
recommendations: WorkflowRecommendation[];
|
||||
patterns_detected: number;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export type ActivityType =
|
||||
| { type: 'skill_used'; skill_ids: string[] }
|
||||
| { type: 'pipeline_executed'; task_type: string; pipeline_id: string }
|
||||
| { type: 'input_received'; keywords: string[]; intent: string };
|
||||
|
||||
// === Persona Evolver Types ===
|
||||
|
||||
export type EvolutionChangeType =
|
||||
| 'instruction_addition'
|
||||
| 'instruction_refinement'
|
||||
| 'trait_addition'
|
||||
| 'style_adjustment'
|
||||
| 'domain_expansion';
|
||||
|
||||
export type InsightCategory =
|
||||
| 'communication_style'
|
||||
| 'technical_expertise'
|
||||
| 'task_efficiency'
|
||||
| 'user_preference'
|
||||
| 'knowledge_gap';
|
||||
|
||||
export type IdentityFileType = 'soul' | 'instructions';
|
||||
export type ProposalStatus = 'pending' | 'approved' | 'rejected';
|
||||
|
||||
export interface EvolutionProposal {
|
||||
id: string;
|
||||
agent_id: string;
|
||||
target_file: IdentityFileType;
|
||||
change_type: EvolutionChangeType;
|
||||
reason: string;
|
||||
current_content: string;
|
||||
proposed_content: string;
|
||||
confidence: number;
|
||||
evidence: string[];
|
||||
status: ProposalStatus;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export interface ProfileUpdate {
|
||||
section: string;
|
||||
previous: string;
|
||||
updated: string;
|
||||
source: string;
|
||||
}
|
||||
|
||||
export interface EvolutionInsight {
|
||||
category: InsightCategory;
|
||||
observation: string;
|
||||
recommendation: string;
|
||||
confidence: number;
|
||||
}
|
||||
|
||||
export interface EvolutionResult {
|
||||
agent_id: string;
|
||||
timestamp: string;
|
||||
profile_updates: ProfileUpdate[];
|
||||
proposals: EvolutionProposal[];
|
||||
insights: EvolutionInsight[];
|
||||
evolved: boolean;
|
||||
}
|
||||
|
||||
export interface PersonaEvolverConfig {
|
||||
auto_profile_update: boolean;
|
||||
min_preferences_for_update: number;
|
||||
min_conversations_for_evolution: number;
|
||||
enable_instruction_refinement: boolean;
|
||||
enable_soul_evolution: boolean;
|
||||
max_proposals_per_cycle: number;
|
||||
}
|
||||
|
||||
export interface PersonaEvolverState {
|
||||
last_evolution: string | null;
|
||||
total_evolutions: number;
|
||||
pending_proposals: number;
|
||||
profile_enrichment_score: number;
|
||||
}
|
||||
|
||||
// === Type Conversion Utilities ===
|
||||
|
||||
/**
|
||||
|
||||
@@ -753,36 +753,210 @@ export class KernelClient {
|
||||
});
|
||||
}
|
||||
|
||||
// === Triggers API (stubs for compatibility) ===
|
||||
// === Triggers API ===
|
||||
|
||||
async listTriggers(): Promise<{ triggers?: { id: string; type: string; enabled: boolean }[] }> {
|
||||
return { triggers: [] };
|
||||
/**
|
||||
* List all triggers
|
||||
* Returns empty array on error for graceful degradation
|
||||
*/
|
||||
async listTriggers(): Promise<{
|
||||
triggers?: Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: string;
|
||||
enabled: boolean;
|
||||
createdAt: string;
|
||||
modifiedAt: string;
|
||||
description?: string;
|
||||
tags: string[];
|
||||
}>
|
||||
}> {
|
||||
try {
|
||||
const triggers = await invoke<Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: string;
|
||||
enabled: boolean;
|
||||
createdAt: string;
|
||||
modifiedAt: string;
|
||||
description?: string;
|
||||
tags: string[];
|
||||
}>>('trigger_list');
|
||||
return { triggers };
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] listTriggers failed: ${this.formatError(error)}`);
|
||||
return { triggers: [] };
|
||||
}
|
||||
}
|
||||
|
||||
async getTrigger(_id: string): Promise<{ id: string; type: string; enabled: boolean } | null> {
|
||||
return null;
|
||||
/**
|
||||
* Get a single trigger by ID
|
||||
* Returns null on error for graceful degradation
|
||||
*/
|
||||
async getTrigger(id: string): Promise<{
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: string;
|
||||
enabled: boolean;
|
||||
createdAt: string;
|
||||
modifiedAt: string;
|
||||
description?: string;
|
||||
tags: string[];
|
||||
} | null> {
|
||||
try {
|
||||
return await invoke<{
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: string;
|
||||
enabled: boolean;
|
||||
createdAt: string;
|
||||
modifiedAt: string;
|
||||
description?: string;
|
||||
tags: string[];
|
||||
} | null>('trigger_get', { id });
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] getTrigger(${id}) failed: ${this.formatError(error)}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async createTrigger(_trigger: { type: string; name?: string; enabled?: boolean; config?: Record<string, unknown>; handName?: string; workflowId?: string }): Promise<{ id?: string } | null> {
|
||||
return null;
|
||||
/**
|
||||
* Create a new trigger
|
||||
* Returns null on error for graceful degradation
|
||||
*/
|
||||
async createTrigger(trigger: {
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: { type: string; cron?: string; pattern?: string; path?: string; secret?: string; events?: string[] };
|
||||
enabled?: boolean;
|
||||
description?: string;
|
||||
tags?: string[];
|
||||
}): Promise<{
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: string;
|
||||
enabled: boolean;
|
||||
createdAt: string;
|
||||
modifiedAt: string;
|
||||
description?: string;
|
||||
tags: string[];
|
||||
} | null> {
|
||||
try {
|
||||
return await invoke<{
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: string;
|
||||
enabled: boolean;
|
||||
createdAt: string;
|
||||
modifiedAt: string;
|
||||
description?: string;
|
||||
tags: string[];
|
||||
}>('trigger_create', { request: trigger });
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] createTrigger(${trigger.id}) failed: ${this.formatError(error)}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async updateTrigger(_id: string, _updates: { name?: string; enabled?: boolean; config?: Record<string, unknown>; handName?: string; workflowId?: string }): Promise<{ id: string }> {
|
||||
throw new Error('Triggers not implemented');
|
||||
/**
|
||||
* Update an existing trigger
|
||||
* Throws on error as this is a mutation operation that callers need to handle
|
||||
*/
|
||||
async updateTrigger(id: string, updates: {
|
||||
name?: string;
|
||||
enabled?: boolean;
|
||||
handId?: string;
|
||||
triggerType?: { type: string; cron?: string; pattern?: string; path?: string; secret?: string; events?: string[] };
|
||||
}): Promise<{
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: string;
|
||||
enabled: boolean;
|
||||
createdAt: string;
|
||||
modifiedAt: string;
|
||||
description?: string;
|
||||
tags: string[];
|
||||
}> {
|
||||
try {
|
||||
return await invoke<{
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: string;
|
||||
enabled: boolean;
|
||||
createdAt: string;
|
||||
modifiedAt: string;
|
||||
description?: string;
|
||||
tags: string[];
|
||||
}>('trigger_update', { id, updates });
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] updateTrigger(${id}) failed: ${this.formatError(error)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async deleteTrigger(_id: string): Promise<{ status: string }> {
|
||||
throw new Error('Triggers not implemented');
|
||||
/**
|
||||
* Delete a trigger
|
||||
* Throws on error as this is a destructive operation that callers need to handle
|
||||
*/
|
||||
async deleteTrigger(id: string): Promise<void> {
|
||||
try {
|
||||
await invoke('trigger_delete', { id });
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] deleteTrigger(${id}) failed: ${this.formatError(error)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// === Approvals API (stubs for compatibility) ===
|
||||
|
||||
async listApprovals(_status?: string): Promise<{ approvals?: unknown[] }> {
|
||||
return { approvals: [] };
|
||||
/**
|
||||
* Execute a trigger
|
||||
* Throws on error as callers need to know if execution failed
|
||||
*/
|
||||
async executeTrigger(id: string, input?: Record<string, unknown>): Promise<Record<string, unknown>> {
|
||||
try {
|
||||
return await invoke<Record<string, unknown>>('trigger_execute', { id, input: input || {} });
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] executeTrigger(${id}) failed: ${this.formatError(error)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async respondToApproval(_approvalId: string, _approved: boolean, _reason?: string): Promise<{ status: string }> {
|
||||
throw new Error('Approvals not implemented');
|
||||
// === Approvals API ===
|
||||
|
||||
async listApprovals(_status?: string): Promise<{
|
||||
approvals: Array<{
|
||||
id: string;
|
||||
handId: string;
|
||||
status: string;
|
||||
createdAt: string;
|
||||
input: Record<string, unknown>;
|
||||
}>
|
||||
}> {
|
||||
try {
|
||||
const approvals = await invoke<Array<{
|
||||
id: string;
|
||||
handId: string;
|
||||
status: string;
|
||||
createdAt: string;
|
||||
input: Record<string, unknown>;
|
||||
}>>('approval_list');
|
||||
return { approvals };
|
||||
} catch (error) {
|
||||
console.error('[kernel-client] listApprovals error:', error);
|
||||
return { approvals: [] };
|
||||
}
|
||||
}
|
||||
|
||||
async respondToApproval(approvalId: string, approved: boolean, reason?: string): Promise<void> {
|
||||
return invoke('approval_respond', { id: approvalId, approved, reason });
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -871,6 +1045,16 @@ export class KernelClient {
|
||||
private log(level: string, message: string): void {
|
||||
this.onLog?.(level, message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format error for consistent logging
|
||||
*/
|
||||
private formatError(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
return String(error);
|
||||
}
|
||||
}
|
||||
|
||||
// === Singleton ===
|
||||
|
||||
@@ -139,7 +139,6 @@ export class PipelineRecommender {
|
||||
}
|
||||
|
||||
const recommendations: PipelineRecommendation[] = [];
|
||||
const messageLower = message.toLowerCase();
|
||||
|
||||
for (const pattern of INTENT_PATTERNS) {
|
||||
const matches = pattern.keywords
|
||||
|
||||
174
desktop/src/lib/viking-client.ts
Normal file
174
desktop/src/lib/viking-client.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
/**
|
||||
* OpenViking Client - Semantic Memory Operations
|
||||
*
|
||||
* Client for interacting with OpenViking CLI sidecar.
|
||||
* Provides semantic search, resource management, and knowledge base operations.
|
||||
*/
|
||||
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
|
||||
// === Types ===
|
||||
|
||||
export interface VikingStatus {
|
||||
available: boolean;
|
||||
version?: string;
|
||||
dataDir?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface VikingResource {
|
||||
uri: string;
|
||||
name: string;
|
||||
resourceType: string;
|
||||
size?: number;
|
||||
modifiedAt?: string;
|
||||
}
|
||||
|
||||
export interface VikingFindResult {
|
||||
uri: string;
|
||||
score: number;
|
||||
content: string;
|
||||
level: string;
|
||||
overview?: string;
|
||||
}
|
||||
|
||||
export interface VikingGrepResult {
|
||||
uri: string;
|
||||
line: number;
|
||||
content: string;
|
||||
matchStart: number;
|
||||
matchEnd: number;
|
||||
}
|
||||
|
||||
export interface VikingAddResult {
|
||||
uri: string;
|
||||
status: string;
|
||||
}
|
||||
|
||||
// === Client Functions ===
|
||||
|
||||
/**
|
||||
* Check if OpenViking CLI is available
|
||||
*/
|
||||
export async function getVikingStatus(): Promise<VikingStatus> {
|
||||
return invoke<VikingStatus>('viking_status');
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a resource to OpenViking from file
|
||||
*/
|
||||
export async function addVikingResource(
|
||||
uri: string,
|
||||
content: string
|
||||
): Promise<VikingAddResult> {
|
||||
return invoke<VikingAddResult>('viking_add', { uri, content });
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a resource with inline content
|
||||
*/
|
||||
export async function addVikingResourceInline(
|
||||
uri: string,
|
||||
content: string
|
||||
): Promise<VikingAddResult> {
|
||||
return invoke<VikingAddResult>('viking_add_inline', { uri, content });
|
||||
}
|
||||
|
||||
/**
|
||||
* Find resources by semantic search
|
||||
*/
|
||||
export async function findVikingResources(
|
||||
query: string,
|
||||
scope?: string,
|
||||
limit?: number
|
||||
): Promise<VikingFindResult[]> {
|
||||
return invoke<VikingFindResult[]>('viking_find', { query, scope, limit });
|
||||
}
|
||||
|
||||
/**
|
||||
* Grep resources by pattern
|
||||
*/
|
||||
export async function grepVikingResources(
|
||||
pattern: string,
|
||||
uri?: string,
|
||||
caseSensitive?: boolean,
|
||||
limit?: number
|
||||
): Promise<VikingGrepResult[]> {
|
||||
return invoke<VikingGrepResult[]>('viking_grep', {
|
||||
pattern,
|
||||
uri,
|
||||
caseSensitive,
|
||||
limit,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* List resources at a path
|
||||
*/
|
||||
export async function listVikingResources(path: string): Promise<VikingResource[]> {
|
||||
return invoke<VikingResource[]>('viking_ls', { path });
|
||||
}
|
||||
|
||||
/**
|
||||
* Read resource content
|
||||
*/
|
||||
export async function readVikingResource(
|
||||
uri: string,
|
||||
level?: string
|
||||
): Promise<string> {
|
||||
return invoke<string>('viking_read', { uri, level });
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a resource
|
||||
*/
|
||||
export async function removeVikingResource(uri: string): Promise<void> {
|
||||
return invoke<void>('viking_remove', { uri });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get resource tree
|
||||
*/
|
||||
export async function getVikingTree(
|
||||
path: string,
|
||||
depth?: number
|
||||
): Promise<Record<string, unknown>> {
|
||||
return invoke<Record<string, unknown>>('viking_tree', { path, depth });
|
||||
}
|
||||
|
||||
// === Server Functions ===
|
||||
|
||||
export interface VikingServerStatus {
|
||||
running: boolean;
|
||||
port?: number;
|
||||
pid?: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Viking server status
|
||||
*/
|
||||
export async function getVikingServerStatus(): Promise<VikingServerStatus> {
|
||||
return invoke<VikingServerStatus>('viking_server_status');
|
||||
}
|
||||
|
||||
/**
|
||||
* Start Viking server
|
||||
*/
|
||||
export async function startVikingServer(): Promise<void> {
|
||||
return invoke<void>('viking_server_start');
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop Viking server
|
||||
*/
|
||||
export async function stopVikingServer(): Promise<void> {
|
||||
return invoke<void>('viking_server_stop');
|
||||
}
|
||||
|
||||
/**
|
||||
* Restart Viking server
|
||||
*/
|
||||
export async function restartVikingServer(): Promise<void> {
|
||||
return invoke<void>('viking_server_restart');
|
||||
}
|
||||
@@ -43,10 +43,13 @@ export interface WorkspaceInfo {
|
||||
export interface ChannelInfo {
|
||||
id: string;
|
||||
type: string;
|
||||
name: string;
|
||||
label: string;
|
||||
status: 'active' | 'inactive' | 'error';
|
||||
enabled?: boolean;
|
||||
accounts?: number;
|
||||
error?: string;
|
||||
config?: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface ScheduledTask {
|
||||
@@ -292,12 +295,13 @@ export const useConfigStore = create<ConfigStateSlice & ConfigActionsSlice>((set
|
||||
channels.push({
|
||||
id: 'feishu',
|
||||
type: 'feishu',
|
||||
name: 'feishu',
|
||||
label: '飞书 (Feishu)',
|
||||
status: feishu?.configured ? 'active' : 'inactive',
|
||||
accounts: feishu?.accounts || 0,
|
||||
});
|
||||
} catch {
|
||||
channels.push({ id: 'feishu', type: 'feishu', label: '飞书 (Feishu)', status: 'inactive' });
|
||||
channels.push({ id: 'feishu', type: 'feishu', name: 'feishu', label: '飞书 (Feishu)', status: 'inactive' });
|
||||
}
|
||||
|
||||
set({ channels });
|
||||
|
||||
161
desktop/src/store/meshStore.ts
Normal file
161
desktop/src/store/meshStore.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
/**
|
||||
* Mesh Store - State management for Adaptive Intelligence Mesh
|
||||
*
|
||||
* Manages workflow recommendations and behavior patterns.
|
||||
*/
|
||||
|
||||
import { create } from 'zustand';
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import type {
|
||||
WorkflowRecommendation,
|
||||
BehaviorPattern,
|
||||
MeshConfig,
|
||||
MeshAnalysisResult,
|
||||
PatternContext,
|
||||
ActivityType,
|
||||
} from '../lib/intelligence-client';
|
||||
|
||||
// === Types ===
|
||||
|
||||
export interface MeshState {
|
||||
// State
|
||||
recommendations: WorkflowRecommendation[];
|
||||
patterns: BehaviorPattern[];
|
||||
config: MeshConfig;
|
||||
isLoading: boolean;
|
||||
error: string | null;
|
||||
lastAnalysis: string | null;
|
||||
|
||||
// Actions
|
||||
analyze: () => Promise<void>;
|
||||
acceptRecommendation: (recommendationId: string) => Promise<void>;
|
||||
dismissRecommendation: (recommendationId: string) => Promise<void>;
|
||||
recordActivity: (activity: ActivityType, context: PatternContext) => Promise<void>;
|
||||
getPatterns: () => Promise<void>;
|
||||
updateConfig: (config: Partial<MeshConfig>) => Promise<void>;
|
||||
decayPatterns: () => Promise<void>;
|
||||
clearError: () => void;
|
||||
}
|
||||
|
||||
// === Store ===
|
||||
|
||||
export const useMeshStore = create<MeshState>((set, get) => ({
|
||||
// Initial state
|
||||
recommendations: [],
|
||||
patterns: [],
|
||||
config: {
|
||||
enabled: true,
|
||||
min_confidence: 0.6,
|
||||
max_recommendations: 5,
|
||||
analysis_window_hours: 24,
|
||||
},
|
||||
isLoading: false,
|
||||
error: null,
|
||||
lastAnalysis: null,
|
||||
|
||||
// Actions
|
||||
analyze: async () => {
|
||||
set({ isLoading: true, error: null });
|
||||
try {
|
||||
const agentId = localStorage.getItem('currentAgentId') || 'default';
|
||||
const result = await invoke<MeshAnalysisResult>('mesh_analyze', { agentId });
|
||||
|
||||
set({
|
||||
recommendations: result.recommendations,
|
||||
patterns: [], // Will be populated by getPatterns
|
||||
lastAnalysis: result.timestamp,
|
||||
isLoading: false,
|
||||
});
|
||||
|
||||
// Also fetch patterns
|
||||
await get().getPatterns();
|
||||
} catch (err) {
|
||||
set({
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
isLoading: false,
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
acceptRecommendation: async (recommendationId: string) => {
|
||||
try {
|
||||
const agentId = localStorage.getItem('currentAgentId') || 'default';
|
||||
await invoke('mesh_accept_recommendation', { agentId, recommendationId });
|
||||
|
||||
// Remove from local state
|
||||
set((state) => ({
|
||||
recommendations: state.recommendations.filter((r) => r.id !== recommendationId),
|
||||
}));
|
||||
} catch (err) {
|
||||
set({ error: err instanceof Error ? err.message : String(err) });
|
||||
}
|
||||
},
|
||||
|
||||
dismissRecommendation: async (recommendationId: string) => {
|
||||
try {
|
||||
const agentId = localStorage.getItem('currentAgentId') || 'default';
|
||||
await invoke('mesh_dismiss_recommendation', { agentId, recommendationId });
|
||||
|
||||
// Remove from local state
|
||||
set((state) => ({
|
||||
recommendations: state.recommendations.filter((r) => r.id !== recommendationId),
|
||||
}));
|
||||
} catch (err) {
|
||||
set({ error: err instanceof Error ? err.message : String(err) });
|
||||
}
|
||||
},
|
||||
|
||||
recordActivity: async (activity: ActivityType, context: PatternContext) => {
|
||||
try {
|
||||
const agentId = localStorage.getItem('currentAgentId') || 'default';
|
||||
await invoke('mesh_record_activity', { agentId, activityType: activity, context });
|
||||
} catch (err) {
|
||||
console.error('Failed to record activity:', err);
|
||||
}
|
||||
},
|
||||
|
||||
getPatterns: async () => {
|
||||
try {
|
||||
const agentId = localStorage.getItem('currentAgentId') || 'default';
|
||||
const patterns = await invoke<BehaviorPattern[]>('mesh_get_patterns', { agentId });
|
||||
set({ patterns });
|
||||
} catch (err) {
|
||||
console.error('Failed to get patterns:', err);
|
||||
}
|
||||
},
|
||||
|
||||
updateConfig: async (config: Partial<MeshConfig>) => {
|
||||
try {
|
||||
const agentId = localStorage.getItem('currentAgentId') || 'default';
|
||||
const newConfig = { ...get().config, ...config };
|
||||
await invoke('mesh_update_config', { agentId, config: newConfig });
|
||||
set({ config: newConfig });
|
||||
} catch (err) {
|
||||
set({ error: err instanceof Error ? err.message : String(err) });
|
||||
}
|
||||
},
|
||||
|
||||
decayPatterns: async () => {
|
||||
try {
|
||||
const agentId = localStorage.getItem('currentAgentId') || 'default';
|
||||
await invoke('mesh_decay_patterns', { agentId });
|
||||
// Refresh patterns after decay
|
||||
await get().getPatterns();
|
||||
} catch (err) {
|
||||
console.error('Failed to decay patterns:', err);
|
||||
}
|
||||
},
|
||||
|
||||
clearError: () => set({ error: null }),
|
||||
}));
|
||||
|
||||
// === Types for intelligence-client ===
|
||||
|
||||
export type {
|
||||
WorkflowRecommendation,
|
||||
BehaviorPattern,
|
||||
MeshConfig,
|
||||
MeshAnalysisResult,
|
||||
PatternContext,
|
||||
ActivityType,
|
||||
};
|
||||
195
desktop/src/store/personaStore.ts
Normal file
195
desktop/src/store/personaStore.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
/**
|
||||
* Persona Evolution Store
|
||||
*
|
||||
* Manages persona evolution state and proposals.
|
||||
*/
|
||||
|
||||
import { create } from 'zustand';
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import type {
|
||||
EvolutionResult,
|
||||
EvolutionProposal,
|
||||
PersonaEvolverConfig,
|
||||
PersonaEvolverState,
|
||||
MemoryEntryForAnalysis,
|
||||
} from '../lib/intelligence-client';
|
||||
|
||||
export interface PersonaEvolutionStore {
|
||||
// State
|
||||
currentAgentId: string;
|
||||
proposals: EvolutionProposal[];
|
||||
history: EvolutionResult[];
|
||||
isLoading: boolean;
|
||||
error: string | null;
|
||||
config: PersonaEvolverConfig | null;
|
||||
state: PersonaEvolverState | null;
|
||||
showProposalsPanel: boolean;
|
||||
|
||||
// Actions
|
||||
setCurrentAgentId: (agentId: string) => void;
|
||||
setShowProposalsPanel: (show: boolean) => void;
|
||||
|
||||
// Evolution Actions
|
||||
runEvolution: (memories: MemoryEntryForAnalysis[]) => Promise<EvolutionResult | null>;
|
||||
loadEvolutionHistory: (limit?: number) => Promise<void>;
|
||||
loadEvolverState: () => Promise<void>;
|
||||
loadEvolverConfig: () => Promise<void>;
|
||||
updateConfig: (config: Partial<PersonaEvolverConfig>) => Promise<void>;
|
||||
|
||||
// Proposal Actions
|
||||
getPendingProposals: () => EvolutionProposal[];
|
||||
applyProposal: (proposal: EvolutionProposal) => Promise<boolean>;
|
||||
dismissProposal: (proposalId: string) => void;
|
||||
clearProposals: () => void;
|
||||
}
|
||||
|
||||
export const usePersonaEvolutionStore = create<PersonaEvolutionStore>((set, get) => ({
|
||||
// Initial State
|
||||
currentAgentId: '',
|
||||
proposals: [],
|
||||
history: [],
|
||||
isLoading: false,
|
||||
error: null,
|
||||
config: null,
|
||||
state: null,
|
||||
showProposalsPanel: false,
|
||||
|
||||
// Setters
|
||||
setCurrentAgentId: (agentId: string) => set({ currentAgentId: agentId }),
|
||||
setShowProposalsPanel: (show: boolean) => set({ showProposalsPanel: show }),
|
||||
|
||||
// Run evolution cycle for current agent
|
||||
runEvolution: async (memories: MemoryEntryForAnalysis[]) => {
|
||||
const { currentAgentId } = get();
|
||||
if (!currentAgentId) {
|
||||
set({ error: 'No agent selected' });
|
||||
return null;
|
||||
}
|
||||
|
||||
set({ isLoading: true, error: null });
|
||||
|
||||
try {
|
||||
const result = await invoke<EvolutionResult>('persona_evolve', {
|
||||
agentId: currentAgentId,
|
||||
memories,
|
||||
});
|
||||
|
||||
// Update state with results
|
||||
set((state) => ({
|
||||
history: [result, ...state.history].slice(0, 20),
|
||||
proposals: [...result.proposals, ...state.proposals],
|
||||
isLoading: false,
|
||||
showProposalsPanel: result.proposals.length > 0,
|
||||
}));
|
||||
|
||||
return result;
|
||||
} catch (err) {
|
||||
const errorMsg = err instanceof Error ? err.message : String(err);
|
||||
set({ error: errorMsg, isLoading: false });
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
// Load evolution history
|
||||
loadEvolutionHistory: async (limit = 10) => {
|
||||
set({ isLoading: true, error: null });
|
||||
|
||||
try {
|
||||
const history = await invoke<EvolutionResult[]>('persona_evolution_history', {
|
||||
limit,
|
||||
});
|
||||
set({ history, isLoading: false });
|
||||
} catch (err) {
|
||||
const errorMsg = err instanceof Error ? err.message : String(err);
|
||||
set({ error: errorMsg, isLoading: false });
|
||||
}
|
||||
},
|
||||
|
||||
// Load evolver state
|
||||
loadEvolverState: async () => {
|
||||
try {
|
||||
const state = await invoke<PersonaEvolverState>('persona_evolver_state');
|
||||
set({ state });
|
||||
} catch (err) {
|
||||
console.error('[PersonaStore] Failed to load evolver state:', err);
|
||||
}
|
||||
},
|
||||
|
||||
// Load evolver config
|
||||
loadEvolverConfig: async () => {
|
||||
try {
|
||||
const config = await invoke<PersonaEvolverConfig>('persona_evolver_config');
|
||||
set({ config });
|
||||
} catch (err) {
|
||||
console.error('[PersonaStore] Failed to load evolver config:', err);
|
||||
}
|
||||
},
|
||||
|
||||
// Update evolver config
|
||||
updateConfig: async (newConfig: Partial<PersonaEvolverConfig>) => {
|
||||
const { config } = get();
|
||||
if (!config) return;
|
||||
|
||||
const updatedConfig = { ...config, ...newConfig };
|
||||
|
||||
try {
|
||||
await invoke('persona_evolver_update_config', { config: updatedConfig });
|
||||
set({ config: updatedConfig });
|
||||
} catch (err) {
|
||||
const errorMsg = err instanceof Error ? err.message : String(err);
|
||||
set({ error: errorMsg });
|
||||
}
|
||||
},
|
||||
|
||||
// Get pending proposals sorted by confidence
|
||||
getPendingProposals: () => {
|
||||
const { proposals } = get();
|
||||
return proposals
|
||||
.filter((p) => p.status === 'pending')
|
||||
.sort((a, b) => b.confidence - a.confidence);
|
||||
},
|
||||
|
||||
// Apply a proposal (approve)
|
||||
applyProposal: async (proposal: EvolutionProposal) => {
|
||||
set({ isLoading: true, error: null });
|
||||
|
||||
try {
|
||||
await invoke('persona_apply_proposal', { proposal });
|
||||
|
||||
// Remove from pending list
|
||||
set((state) => ({
|
||||
proposals: state.proposals.filter((p) => p.id !== proposal.id),
|
||||
isLoading: false,
|
||||
}));
|
||||
|
||||
return true;
|
||||
} catch (err) {
|
||||
const errorMsg = err instanceof Error ? err.message : String(err);
|
||||
set({ error: errorMsg, isLoading: false });
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
// Dismiss a proposal (reject)
|
||||
dismissProposal: (proposalId: string) => {
|
||||
set((state) => ({
|
||||
proposals: state.proposals.filter((p) => p.id !== proposalId),
|
||||
}));
|
||||
},
|
||||
|
||||
// Clear all proposals
|
||||
clearProposals: () => set({ proposals: [] }),
|
||||
}));
|
||||
|
||||
// Export convenience hooks
|
||||
export const usePendingProposals = () =>
|
||||
usePersonaEvolutionStore((state) => state.getPendingProposals());
|
||||
|
||||
export const useEvolutionHistory = () =>
|
||||
usePersonaEvolutionStore((state) => state.history);
|
||||
|
||||
export const useEvolverConfig = () =>
|
||||
usePersonaEvolutionStore((state) => state.config);
|
||||
|
||||
export const useEvolverState = () =>
|
||||
usePersonaEvolutionStore((state) => state.state);
|
||||
Reference in New Issue
Block a user