Files
zclaw_openfang/desktop/src-tauri/src/intelligence/reflection.rs
iven 2843bd204f
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
chore: 更新测试注释 — 阈值已从 5 降为 3
2026-04-11 14:26:53 +08:00

1016 lines
36 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

//! Reflection Engine - Agent self-improvement through conversation analysis
//!
//! Periodically analyzes recent conversations to:
//! - Identify behavioral patterns (positive and negative)
//! - Generate improvement suggestions
//! - Propose identity file changes (with user approval)
//! - Create meta-memories about agent performance
//!
//! Phase 3 of Intelligence Layer Migration.
//! Reference: ZCLAW_AGENT_INTELLIGENCE_EVOLUTION.md §6.4.2
//!
//! NOTE: Some methods are reserved for future self-improvement features.
// NOTE: #[tauri::command] functions are registered via invoke_handler! at runtime,
// which the Rust compiler does not track as "use". Module-level allow required
// for Tauri-commanded functions. Genuinely unused methods annotated individually.
#![allow(dead_code)]
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::Arc;
// Re-export from zclaw-runtime for LLM integration
use zclaw_runtime::driver::{CompletionRequest, ContentBlock, LlmDriver};
// === Types ===
/// Reflection configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReflectionConfig {
#[serde(default = "default_trigger_conversations")]
pub trigger_after_conversations: usize,
#[serde(default = "default_trigger_hours")]
pub trigger_after_hours: u64,
#[serde(default)]
pub allow_soul_modification: bool,
#[serde(default = "default_require_approval")]
pub require_approval: bool,
#[serde(default = "default_use_llm")]
pub use_llm: bool,
#[serde(default = "default_llm_fallback")]
pub llm_fallback_to_rules: bool,
}
fn default_trigger_conversations() -> usize { 5 }
fn default_trigger_hours() -> u64 { 24 }
fn default_require_approval() -> bool { true }
fn default_use_llm() -> bool { true }
fn default_llm_fallback() -> bool { true }
impl Default for ReflectionConfig {
fn default() -> Self {
Self {
trigger_after_conversations: 5,
trigger_after_hours: 24,
allow_soul_modification: true, // Allow soul modification by default for self-evolution
require_approval: true,
use_llm: true,
llm_fallback_to_rules: true,
}
}
}
/// Observed pattern from analysis
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PatternObservation {
pub observation: String,
pub frequency: usize,
pub sentiment: Sentiment,
pub evidence: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Sentiment {
Positive,
Negative,
Neutral,
}
/// Improvement suggestion
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImprovementSuggestion {
pub area: String,
pub suggestion: String,
pub priority: Priority,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Priority {
High,
Medium,
Low,
}
/// Identity change proposal
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct IdentityChangeProposal {
pub agent_id: String,
pub field: String,
pub current_value: String,
pub proposed_value: String,
pub reason: String,
}
/// Result of reflection
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReflectionResult {
pub patterns: Vec<PatternObservation>,
pub improvements: Vec<ImprovementSuggestion>,
pub identity_proposals: Vec<IdentityChangeProposal>,
pub new_memories: usize,
pub timestamp: String,
/// P2-07: Whether rules-based fallback was used instead of LLM
#[serde(default)]
pub used_fallback: bool,
}
/// Reflection state
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReflectionState {
pub conversations_since_reflection: usize,
pub last_reflection_time: Option<String>,
pub last_reflection_agent_id: Option<String>,
}
impl Default for ReflectionState {
fn default() -> Self {
Self {
conversations_since_reflection: 0,
last_reflection_time: None,
last_reflection_agent_id: None,
}
}
}
// === Memory Entry (simplified for analysis) ===
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MemoryEntryForAnalysis {
pub memory_type: String,
pub content: String,
pub importance: usize,
pub access_count: usize,
pub tags: Vec<String>,
}
// === Reflection Engine ===
pub struct ReflectionEngine {
config: ReflectionConfig,
state: ReflectionState,
history: Vec<ReflectionResult>,
}
impl ReflectionEngine {
pub fn new(config: Option<ReflectionConfig>) -> Self {
Self {
config: config.unwrap_or_default(),
state: ReflectionState::default(),
history: Vec::new(),
}
}
/// Record that a conversation happened
pub fn record_conversation(&mut self) {
self.state.conversations_since_reflection += 1;
}
/// Check if it's time for reflection
pub fn should_reflect(&self) -> bool {
// Conversation count trigger
if self.state.conversations_since_reflection >= self.config.trigger_after_conversations {
return true;
}
// Time-based trigger
if let Some(last_time) = &self.state.last_reflection_time {
if let Ok(last) = DateTime::parse_from_rfc3339(last_time) {
let elapsed = Utc::now().signed_duration_since(last);
let hours_since = elapsed.num_hours() as u64;
if hours_since >= self.config.trigger_after_hours {
return true;
}
}
} else {
// Never reflected before, trigger after initial conversations
return self.state.conversations_since_reflection >= 3;
}
false
}
/// Execute reflection cycle
pub async fn reflect(
&mut self,
agent_id: &str,
memories: &[MemoryEntryForAnalysis],
driver: Option<Arc<dyn LlmDriver>>,
) -> ReflectionResult {
// P2-07: Track whether rules-based fallback was used
let mut used_fallback = !self.config.use_llm;
// 1. Analyze memory patterns (LLM if configured, rules fallback)
let patterns = if self.config.use_llm {
if let Some(ref llm) = driver {
match self.analyze_patterns_with_llm(memories, llm).await {
Ok(p) => p,
Err(e) => {
tracing::warn!("[reflection] LLM analysis failed, falling back to rules: {}", e);
used_fallback = true;
if self.config.llm_fallback_to_rules {
self.analyze_patterns(memories)
} else {
Vec::new()
}
}
}
} else {
tracing::debug!("[reflection] use_llm=true but no driver available, using rules");
used_fallback = true;
self.analyze_patterns(memories)
}
} else {
self.analyze_patterns(memories)
};
// 2. Generate improvement suggestions
let improvements = self.generate_improvements(&patterns, memories);
// 3. Propose identity changes if patterns warrant it
let identity_proposals: Vec<IdentityChangeProposal> = if self.config.allow_soul_modification {
self.propose_identity_changes(agent_id, &patterns)
} else {
vec![]
};
// 4. Count new memories (would be saved)
// Include LLM-generated patterns and high-priority improvements
let new_memories = patterns.iter()
.filter(|p| p.frequency >= 1 || p.frequency >= 2)
.count()
+ improvements.iter()
.filter(|i| matches!(i.priority, Priority::High))
.count();
// Include all LLM-proposed improvements
// 5. Build result
let result = ReflectionResult {
patterns,
improvements,
identity_proposals,
new_memories,
timestamp: Utc::now().to_rfc3339(),
used_fallback, // P2-07: expose fallback status to callers
};
// 6. Update state
self.state.conversations_since_reflection = 0;
self.state.last_reflection_time = Some(result.timestamp.clone());
self.state.last_reflection_agent_id = Some(agent_id.to_string());
// 7. Store in history
self.history.push(result.clone());
if self.history.len() > 20 {
self.history = self.history.split_off(10);
}
// 8. Persist result, state, and history to VikingStorage (fire-and-forget)
let state_to_persist = self.state.clone();
let result_to_persist = result.clone();
let agent_id_owned = agent_id.to_string();
tokio::spawn(async move {
if let Ok(storage) = crate::viking_commands::get_storage().await {
// Persist state as JSON string
let state_key = format!("reflection:state:{}", agent_id_owned);
if let Ok(state_json) = serde_json::to_string(&state_to_persist) {
if let Err(e) = zclaw_growth::VikingStorage::store_metadata_json(
&*storage, &state_key, &state_json,
).await {
tracing::warn!("[reflection] Failed to persist state: {}", e);
}
}
// Persist latest result as JSON string
let result_key = format!("reflection:latest:{}", agent_id_owned);
if let Ok(result_json) = serde_json::to_string(&result_to_persist) {
if let Err(e) = zclaw_growth::VikingStorage::store_metadata_json(
&*storage, &result_key, &result_json,
).await {
tracing::warn!("[reflection] Failed to persist result: {}", e);
}
}
// Persist full history array (append new result)
let history_key = format!("reflection:history:{}", agent_id_owned);
let mut history: Vec<ReflectionResult> =
match zclaw_growth::VikingStorage::get_metadata_json(
&*storage, &history_key,
).await {
Ok(Some(json)) => serde_json::from_str(&json).unwrap_or_default(),
_ => Vec::new(),
};
history.push(result_to_persist);
// Keep last 20 entries
if history.len() > 20 {
history = history.split_off(history.len() - 20);
}
if let Ok(history_json) = serde_json::to_string(&history) {
if let Err(e) = zclaw_growth::VikingStorage::store_metadata_json(
&*storage, &history_key, &history_json,
).await {
tracing::warn!("[reflection] Failed to persist history: {}", e);
}
}
}
});
result
}
/// Analyze patterns using LLM for deeper behavioral insights
async fn analyze_patterns_with_llm(
&self,
memories: &[MemoryEntryForAnalysis],
driver: &Arc<dyn LlmDriver>,
) -> Result<Vec<PatternObservation>, String> {
if memories.is_empty() {
return Ok(Vec::new());
}
// Build memory summary for the prompt
let memory_summary: String = memories.iter().enumerate().map(|(i, m)| {
format!("{}. [{}] (重要性:{}, 访问:{}) {}",
i + 1, m.memory_type, m.importance, m.access_count, m.content)
}).collect::<Vec<_>>().join("\n");
let system_prompt = r#"你是行为分析专家。分析以下 Agent 记忆条目,识别行为模式和趋势。
请返回 JSON 数组,每个元素包含:
- "observation": string — 模式描述(中文)
- "frequency": number — 该模式出现的频率估计1-10
- "sentiment": "positive" | "negative" | "neutral" — 情感倾向
- "evidence": string[] — 支持该观察的证据记忆内容摘要最多3条
只返回 JSON 数组,不要其他内容。如果没有明显模式,返回空数组。"#
.to_string();
let request = CompletionRequest {
model: driver.provider().to_string(),
system: Some(system_prompt),
messages: vec![zclaw_types::Message::assistant(
format!("分析以下记忆条目:\n\n{}", memory_summary)
)],
max_tokens: Some(2048),
temperature: Some(0.3),
stream: false,
..Default::default()
};
let response = driver.complete(request).await
.map_err(|e| format!("LLM 调用失败: {}", e))?;
// Extract text from response
let text = response.content.iter()
.filter_map(|block| match block {
ContentBlock::Text { text } => Some(text.as_str()),
_ => None,
})
.collect::<Vec<_>>()
.join("");
// Parse JSON response (handle markdown code blocks)
let json_str = extract_json_from_llm_response(&text);
serde_json::from_str::<Vec<PatternObservation>>(&json_str)
.map_err(|e| format!("解析 LLM 响应失败: {} — 原始响应: {}", e, &text[..text.len().min(200)]))
}
/// Analyze patterns in memories (rule-based fallback)
fn analyze_patterns(&self, memories: &[MemoryEntryForAnalysis]) -> Vec<PatternObservation> {
let mut patterns = Vec::new();
// Analyze memory type distribution
let mut type_counts: HashMap<String, usize> = HashMap::new();
for m in memories {
*type_counts.entry(m.memory_type.clone()).or_insert(0) += 1;
}
// Pattern: Too many tasks accumulating
let task_count = *type_counts.get("task").unwrap_or(&0);
if task_count >= 3 {
let evidence: Vec<String> = memories
.iter()
.filter(|m| m.memory_type == "task")
.take(3)
.map(|m| m.content.clone())
.collect();
patterns.push(PatternObservation {
observation: format!("积累了 {} 个待办任务,可能存在任务管理不善", task_count),
frequency: task_count,
sentiment: Sentiment::Negative,
evidence,
});
}
// Pattern: Strong preference accumulation
let pref_count = *type_counts.get("preference").unwrap_or(&0);
if pref_count >= 3 {
let evidence: Vec<String> = memories
.iter()
.filter(|m| m.memory_type == "preference")
.take(3)
.map(|m| m.content.clone())
.collect();
patterns.push(PatternObservation {
observation: format!("已记录 {} 个用户偏好,对用户习惯有较好理解", pref_count),
frequency: pref_count,
sentiment: Sentiment::Positive,
evidence,
});
}
// Pattern: Many lessons learned
let lesson_count = *type_counts.get("lesson").unwrap_or(&0);
if lesson_count >= 3 {
let evidence: Vec<String> = memories
.iter()
.filter(|m| m.memory_type == "lesson")
.take(3)
.map(|m| m.content.clone())
.collect();
patterns.push(PatternObservation {
observation: format!("积累了 {} 条经验教训,知识库在成长", lesson_count),
frequency: lesson_count,
sentiment: Sentiment::Positive,
evidence,
});
}
// Pattern: High-importance items being accessed frequently
let high_access: Vec<_> = memories
.iter()
.filter(|m| m.access_count >= 5 && m.importance >= 7)
.collect();
if high_access.len() >= 2 {
let evidence: Vec<String> = high_access.iter().take(3).map(|m| m.content.clone()).collect();
patterns.push(PatternObservation {
observation: format!("有 {} 条高频访问的重要记忆,核心知识正在形成", high_access.len()),
frequency: high_access.len(),
sentiment: Sentiment::Positive,
evidence,
});
}
// Pattern: Low-importance memories accumulating
let low_importance_count = memories.iter().filter(|m| m.importance <= 3).count();
if low_importance_count > 15 {
patterns.push(PatternObservation {
observation: format!("有 {} 条低重要性记忆,可考虑清理", low_importance_count),
frequency: low_importance_count,
sentiment: Sentiment::Neutral,
evidence: vec![],
});
}
// Pattern: Tag analysis - recurring topics
let mut tag_counts: HashMap<String, usize> = HashMap::new();
for m in memories {
for tag in &m.tags {
if tag != "auto-extracted" {
*tag_counts.entry(tag.clone()).or_insert(0) += 1;
}
}
}
let mut frequent_tags: Vec<_> = tag_counts
.iter()
.filter(|(_, count)| **count >= 5)
.map(|(tag, count)| (tag.clone(), *count))
.collect();
frequent_tags.sort_by(|a, b| b.1.cmp(&a.1));
if !frequent_tags.is_empty() {
let tag_str: Vec<String> = frequent_tags
.iter()
.take(5)
.map(|(tag, count)| format!("{}({}次)", tag, count))
.collect();
patterns.push(PatternObservation {
observation: format!("反复出现的主题: {}", tag_str.join(", ")),
frequency: frequent_tags[0].1,
sentiment: Sentiment::Neutral,
evidence: frequent_tags.iter().take(5).map(|(t, _)| t.clone()).collect(),
});
}
patterns
}
/// Generate improvement suggestions
fn generate_improvements(
&self,
patterns: &[PatternObservation],
memories: &[MemoryEntryForAnalysis],
) -> Vec<ImprovementSuggestion> {
let mut improvements = Vec::new();
// Suggestion: Clear pending tasks
if patterns.iter().any(|p| p.observation.contains("待办任务")) {
improvements.push(ImprovementSuggestion {
area: "任务管理".to_string(),
suggestion: "清理已完成的任务记忆,对长期未处理的任务降低重要性或标记为已取消".to_string(),
priority: Priority::High,
});
}
// Suggestion: Prune low-importance memories
if patterns.iter().any(|p| p.observation.contains("低重要性")) {
improvements.push(ImprovementSuggestion {
area: "记忆管理".to_string(),
suggestion: "执行记忆清理移除30天以上未访问且重要性低于3的记忆".to_string(),
priority: Priority::Medium,
});
}
// Suggestion: User profile enrichment
let pref_count = memories.iter().filter(|m| m.memory_type == "preference").count();
if pref_count < 3 {
improvements.push(ImprovementSuggestion {
area: "用户理解".to_string(),
suggestion: "主动在对话中了解用户偏好(沟通风格、技术栈、工作习惯),丰富用户画像".to_string(),
priority: Priority::Medium,
});
}
// Suggestion: Knowledge consolidation
let fact_count = memories.iter().filter(|m| m.memory_type == "fact").count();
if fact_count > 20 {
improvements.push(ImprovementSuggestion {
area: "知识整合".to_string(),
suggestion: "合并相似的事实记忆,提高检索效率。可将相关事实整合为结构化的项目/用户档案".to_string(),
priority: Priority::Low,
});
}
improvements
}
/// Propose identity changes based on patterns
fn propose_identity_changes(
&self,
agent_id: &str,
patterns: &[PatternObservation],
) -> Vec<IdentityChangeProposal> {
let mut proposals = Vec::new();
// If many negative patterns, propose instruction update
let negative_patterns: Vec<_> = patterns
.iter()
.filter(|p| matches!(p.sentiment, Sentiment::Negative))
.collect();
if negative_patterns.len() >= 2 {
let additions: Vec<String> = negative_patterns
.iter()
.map(|p| format!("- 注意: {}", p.observation))
.collect();
proposals.push(IdentityChangeProposal {
agent_id: agent_id.to_string(),
field: "instructions".to_string(),
current_value: "...".to_string(),
proposed_value: format!("\n\n## 自我反思改进\n{}", additions.join("\n")),
reason: format!(
"基于 {} 个负面模式观察,建议在指令中增加自我改进提醒",
negative_patterns.len()
),
});
}
proposals
}
/// Get reflection history
pub fn get_history(&self, limit: usize) -> Vec<&ReflectionResult> {
self.history.iter().rev().take(limit).collect()
}
/// Get current state
pub fn get_state(&self) -> &ReflectionState {
&self.state
}
/// Get configuration
pub fn get_config(&self) -> &ReflectionConfig {
&self.config
}
/// Update configuration
pub fn update_config(&mut self, config: ReflectionConfig) {
self.config = config;
}
/// Restore state from VikingStorage metadata (called during init)
///
/// Spawns an async task to read persisted state and result from VikingStorage.
/// Results are placed in global caches, consumed one-shot by intelligence_hooks.
pub fn restore_state(&self, agent_id: &str) {
let rt = tokio::runtime::Handle::current();
let state_key = format!("reflection:state:{}", agent_id);
let result_key = format!("reflection:latest:{}", agent_id);
let agent_id_owned = agent_id.to_string();
rt.spawn(async move {
match crate::viking_commands::get_storage().await {
Ok(storage) => {
// Restore state
match zclaw_growth::VikingStorage::get_metadata_json(
&*storage, &state_key,
).await {
Ok(Some(state_json)) => {
if let Ok(persisted_state) = serde_json::from_str::<ReflectionState>(&state_json) {
tracing::info!(
"[reflection] Restored state for {}: {} conversations since last reflection",
agent_id_owned,
persisted_state.conversations_since_reflection
);
let cache = get_state_cache();
if let Ok(mut cache) = cache.write() {
cache.insert(agent_id_owned.clone(), persisted_state);
}
}
}
Ok(None) => {
tracing::debug!("[reflection] No persisted state for {}", agent_id_owned);
}
Err(e) => {
tracing::warn!("[reflection] Failed to read state: {}", e);
}
}
// Restore latest result into history
match zclaw_growth::VikingStorage::get_metadata_json(
&*storage, &result_key,
).await {
Ok(Some(result_json)) => {
if let Ok(persisted_result) = serde_json::from_str::<ReflectionResult>(&result_json) {
let cache = get_result_cache();
if let Ok(mut cache) = cache.write() {
cache.insert(agent_id_owned.clone(), persisted_result);
}
}
}
Ok(None) => {}
Err(e) => {
tracing::warn!("[reflection] Failed to read result: {}", e);
}
}
}
Err(e) => {
tracing::warn!("[reflection] Storage unavailable during restore: {}", e);
}
}
});
}
/// Apply a restored state (called from intelligence_hooks after restore completes)
pub fn apply_restored_state(&mut self, state: ReflectionState) {
self.state = state;
}
/// Apply a restored latest result to history
pub fn apply_restored_result(&mut self, result: ReflectionResult) {
self.history.push(result);
}
}
// === State Restoration Cache ===
use std::sync::RwLock as StdRwLock;
use std::sync::OnceLock as StdOnceLock;
/// Temporary cache for restored reflection state (bridges async init ↔ sync apply)
static REFLECTION_STATE_CACHE: StdOnceLock<StdRwLock<HashMap<String, ReflectionState>>> = StdOnceLock::new();
/// Temporary cache for restored reflection result
static REFLECTION_RESULT_CACHE: StdOnceLock<StdRwLock<HashMap<String, ReflectionResult>>> = StdOnceLock::new();
fn get_state_cache() -> &'static StdRwLock<HashMap<String, ReflectionState>> {
REFLECTION_STATE_CACHE.get_or_init(|| StdRwLock::new(HashMap::new()))
}
fn get_result_cache() -> &'static StdRwLock<HashMap<String, ReflectionResult>> {
REFLECTION_RESULT_CACHE.get_or_init(|| StdRwLock::new(HashMap::new()))
}
/// Pop restored state from cache (one-shot, removes after read)
pub fn pop_restored_state(agent_id: &str) -> Option<ReflectionState> {
let cache = get_state_cache();
if let Ok(mut cache) = cache.write() {
cache.remove(agent_id)
} else {
None
}
}
/// Pop restored result from cache (one-shot, removes after read)
pub fn pop_restored_result(agent_id: &str) -> Option<ReflectionResult> {
let cache = get_result_cache();
if let Ok(mut cache) = cache.write() {
cache.remove(agent_id)
} else {
None
}
}
/// Peek restored state from cache (non-destructive read)
pub fn peek_restored_state(agent_id: &str) -> Option<ReflectionState> {
let cache = get_state_cache();
cache.read().ok()?.get(agent_id).cloned()
}
/// Peek restored result from cache (non-destructive read)
pub fn peek_restored_result(agent_id: &str) -> Option<ReflectionResult> {
let cache = get_result_cache();
cache.read().ok()?.get(agent_id).cloned()
}
// === Tauri Commands ===
use tokio::sync::Mutex;
pub type ReflectionEngineState = Arc<Mutex<ReflectionEngine>>;
/// Initialize reflection engine with config
/// Updates the shared state with new configuration
// @connected
#[tauri::command]
pub async fn reflection_init(
config: Option<ReflectionConfig>,
state: tauri::State<'_, ReflectionEngineState>,
) -> Result<bool, String> {
let mut engine = state.lock().await;
if let Some(cfg) = config {
engine.update_config(cfg);
}
Ok(true)
}
/// Record a conversation
// @connected
#[tauri::command]
pub async fn reflection_record_conversation(
state: tauri::State<'_, ReflectionEngineState>,
) -> Result<(), String> {
let mut engine = state.lock().await;
engine.record_conversation();
Ok(())
}
/// Check if reflection should run
// @connected
#[tauri::command]
pub async fn reflection_should_reflect(
state: tauri::State<'_, ReflectionEngineState>,
) -> Result<bool, String> {
let engine = state.lock().await;
Ok(engine.should_reflect())
}
/// Execute reflection
// @connected
#[tauri::command]
pub async fn reflection_reflect(
agent_id: String,
memories: Vec<MemoryEntryForAnalysis>,
state: tauri::State<'_, ReflectionEngineState>,
kernel_state: tauri::State<'_, crate::kernel_commands::KernelState>,
) -> Result<ReflectionResult, String> {
let driver = {
let kernel_lock = kernel_state.lock().await;
kernel_lock.as_ref().map(|k| k.driver())
};
let mut engine = state.lock().await;
Ok(engine.reflect(&agent_id, &memories, driver).await)
}
/// Get reflection history
///
/// Returns in-memory history first. If empty and an agent_id is provided,
/// falls back to the persisted history array from VikingStorage metadata,
/// then to the single latest result for backward compatibility.
// @connected
#[tauri::command]
pub async fn reflection_get_history(
limit: Option<usize>,
agent_id: Option<String>,
state: tauri::State<'_, ReflectionEngineState>,
) -> Result<Vec<ReflectionResult>, String> {
let limit = limit.unwrap_or(10);
let engine = state.lock().await;
let mut results: Vec<ReflectionResult> = engine.get_history(limit)
.into_iter()
.cloned()
.collect();
// If no in-memory results and we have an agent_id, load persisted history
if results.is_empty() {
if let Some(ref aid) = agent_id {
if let Ok(storage) = crate::viking_commands::get_storage().await {
let history_key = format!("reflection:history:{}", aid);
match zclaw_growth::VikingStorage::get_metadata_json(&*storage, &history_key).await {
Ok(Some(json)) => {
if let Ok(mut persisted) = serde_json::from_str::<Vec<ReflectionResult>>(&json) {
persisted.reverse();
persisted.truncate(limit);
results = persisted;
}
}
Ok(None) => {
// Fallback: try loading single latest result (pre-history format)
let latest_key = format!("reflection:latest:{}", aid);
if let Ok(Some(json)) = zclaw_growth::VikingStorage::get_metadata_json(
&*storage, &latest_key,
).await {
if let Ok(persisted) = serde_json::from_str::<ReflectionResult>(&json) {
results.push(persisted);
}
}
}
Err(e) => {
tracing::warn!("[reflection] Failed to load persisted history: {}", e);
}
}
}
}
}
Ok(results)
}
/// Get reflection state
// @connected
#[tauri::command]
pub async fn reflection_get_state(
state: tauri::State<'_, ReflectionEngineState>,
) -> Result<ReflectionState, String> {
let engine = state.lock().await;
Ok(engine.get_state().clone())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_should_reflect_initial() {
let mut engine = ReflectionEngine::new(None);
assert!(!engine.should_reflect());
// After 3 conversations
for _ in 0..3 {
engine.record_conversation();
}
assert!(engine.should_reflect());
}
#[test]
fn test_analyze_patterns() {
let engine = ReflectionEngine::new(None);
let memories = vec![
MemoryEntryForAnalysis {
memory_type: "task".to_string(),
content: "Task 1".to_string(),
importance: 7,
access_count: 1,
tags: vec![],
},
MemoryEntryForAnalysis {
memory_type: "task".to_string(),
content: "Task 2".to_string(),
importance: 8,
access_count: 2,
tags: vec![],
},
];
let patterns = engine.analyze_patterns(&memories);
// Should not trigger (only 2 tasks, threshold is 3)
assert!(!patterns.iter().any(|p| p.observation.contains("待办任务")));
}
#[tokio::test]
async fn test_reflection_cycle_full() {
// Use config with use_llm=false to test rules-based path without driver
let config = ReflectionConfig {
use_llm: false,
trigger_after_conversations: 5,
..Default::default()
};
let mut engine = ReflectionEngine::new(Some(config));
// Record 5 conversations
for _ in 0..5 {
engine.record_conversation();
}
assert!(engine.should_reflect(), "Should trigger after 5 conversations");
// Provide memories with enough task entries to exceed threshold (5)
let mut memories = Vec::new();
for i in 0..6 {
memories.push(MemoryEntryForAnalysis {
memory_type: "task".to_string(),
content: format!("待办任务 {}", i),
importance: 7,
access_count: 1,
tags: vec!["任务".to_string()],
});
}
// Add some preferences and knowledge
memories.push(MemoryEntryForAnalysis {
memory_type: "preferences".to_string(),
content: "用户偏好简洁回复".to_string(),
importance: 8,
access_count: 3,
tags: vec!["偏好".to_string()],
});
memories.push(MemoryEntryForAnalysis {
memory_type: "knowledge".to_string(),
content: "用户熟悉 Rust 编程".to_string(),
importance: 6,
access_count: 2,
tags: vec!["知识".to_string()],
});
// Run reflection (no LLM driver, rules-based)
let result = engine.reflect("agent-test", &memories, None).await;
// Verify result structure
assert!(!result.patterns.is_empty(), "Should detect patterns from memories");
assert!(!result.improvements.is_empty(), "Should generate improvements");
assert!(!result.timestamp.is_empty(), "Should have timestamp");
assert!(result.used_fallback, "Should use rules-based fallback when no LLM driver");
// Verify state reset after reflection
assert!(!engine.should_reflect(), "Counter should reset after reflection");
// Verify history stored
assert_eq!(engine.history.len(), 1, "History should contain 1 reflection result");
}
#[tokio::test]
async fn test_reflection_generates_identity_proposals() {
let config = ReflectionConfig {
use_llm: false,
allow_soul_modification: true,
trigger_after_conversations: 3,
..Default::default()
};
let mut engine = ReflectionEngine::new(Some(config));
for _ in 0..3 {
engine.record_conversation();
}
// High-frequency preference pattern should trigger identity proposal
let memories: Vec<MemoryEntryForAnalysis> = (0..7)
.map(|i| MemoryEntryForAnalysis {
memory_type: "preferences".to_string(),
content: format!("偏好项目 {}", i),
importance: 8,
access_count: 5,
tags: vec!["偏好".to_string()],
})
.collect();
let result = engine.reflect("agent-identity-test", &memories, None).await;
// Identity proposals are generated when patterns are strong enough
// (rules-based may not always produce proposals, but result structure should be valid)
assert!(result.identity_proposals.len() <= 5, "Identity proposals should be bounded");
}
}
// === Helpers ===
/// Extract JSON from LLM response, handling markdown code blocks and extra text
fn extract_json_from_llm_response(text: &str) -> String {
let trimmed = text.trim();
// Try to find JSON array in markdown code block
if let Some(start) = trimmed.find("```json") {
if let Some(content_start) = trimmed[start..].find('\n') {
if let Some(end) = trimmed[content_start..].find("```") {
return trimmed[content_start + 1..content_start + end].trim().to_string();
}
}
}
// Try to find bare JSON array
if let Some(start) = trimmed.find('[') {
if let Some(end) = trimmed.rfind(']') {
return trimmed[start..end + 1].to_string();
}
}
trimmed.to_string()
}