feat(audit): 审计修复第四轮 — 跨会话搜索、LLM压缩集成、Presentation渲染器
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

- S9: MessageSearch 新增 Session/Global 双模式,Global 调用 VikingStorage memory_search
- M4b: LLM 压缩器集成到 kernel AgentLoop,支持 use_llm 配置切换
- M4c: 压缩时自动提取记忆到 VikingStorage (runtime + tauri 双路径)
- H6: 新增 ChartRenderer(recharts)、Document/Slideshow 完整渲染
- 累计修复 23 项,整体完成度 ~72%,真实可用率 ~80%
This commit is contained in:
iven
2026-03-27 11:44:14 +08:00
parent 7ae6990c97
commit 30b2515f07
16 changed files with 2121 additions and 245 deletions

View File

@@ -539,14 +539,26 @@ pub fn compactor_check_threshold(
/// Execute compaction
#[tauri::command]
pub fn compactor_compact(
pub async fn compactor_compact(
messages: Vec<CompactableMessage>,
agent_id: String,
conversation_id: Option<String>,
config: Option<CompactionConfig>,
) -> CompactionResult {
let memory_flush = config
.as_ref()
.map(|c| c.memory_flush_enabled)
.unwrap_or(false);
let flushed = if memory_flush {
flush_old_messages_to_memory(&messages, &agent_id, conversation_id.as_deref()).await
} else {
0
};
let compactor = ContextCompactor::new(config);
compactor.compact(&messages, &agent_id, conversation_id.as_deref())
let mut result = compactor.compact(&messages, &agent_id, conversation_id.as_deref());
result.flushed_memories = flushed;
result
}
/// Execute compaction with optional LLM-based summary
@@ -558,10 +570,95 @@ pub async fn compactor_compact_llm(
compaction_config: Option<CompactionConfig>,
llm_config: Option<LlmSummaryConfig>,
) -> CompactionResult {
let memory_flush = compaction_config
.as_ref()
.map(|c| c.memory_flush_enabled)
.unwrap_or(false);
let flushed = if memory_flush {
flush_old_messages_to_memory(&messages, &agent_id, conversation_id.as_deref()).await
} else {
0
};
let compactor = ContextCompactor::new(compaction_config);
compactor
let mut result = compactor
.compact_with_llm(&messages, &agent_id, conversation_id.as_deref(), llm_config.as_ref())
.await
.await;
result.flushed_memories = flushed;
result
}
/// Flush important messages from the old (pre-compaction) portion to VikingStorage.
///
/// Extracts user messages and key assistant responses as session memories
/// so that information is preserved even after messages are compacted away.
async fn flush_old_messages_to_memory(
messages: &[CompactableMessage],
agent_id: &str,
_conversation_id: Option<&str>,
) -> usize {
let storage = match crate::viking_commands::get_storage().await {
Ok(s) => s,
Err(e) => {
tracing::warn!("[Compactor] Cannot get storage for memory flush: {}", e);
return 0;
}
};
let mut flushed = 0usize;
let mut prev_was_user = false;
for msg in messages {
// Flush user messages as session memories (they contain user intent/preferences)
if msg.role == "user" && msg.content.len() > 10 {
let entry = zclaw_growth::MemoryEntry::new(
agent_id,
zclaw_growth::MemoryType::Session,
"compaction_flush",
msg.content.clone(),
)
.with_importance(4);
match zclaw_growth::VikingStorage::store(storage.as_ref(), &entry).await {
Ok(_) => flushed += 1,
Err(e) => {
tracing::debug!("[Compactor] Memory flush failed for user msg: {}", e);
}
}
prev_was_user = true;
} else if msg.role == "assistant" && prev_was_user {
// Flush the assistant response that follows a user message (contains answers)
if msg.content.len() > 20 {
let entry = zclaw_growth::MemoryEntry::new(
agent_id,
zclaw_growth::MemoryType::Session,
"compaction_flush",
msg.content.clone(),
)
.with_importance(3);
match zclaw_growth::VikingStorage::store(storage.as_ref(), &entry).await {
Ok(_) => flushed += 1,
Err(e) => {
tracing::debug!("[Compactor] Memory flush failed for assistant msg: {}", e);
}
}
}
prev_was_user = false;
} else {
prev_was_user = false;
}
}
if flushed > 0 {
tracing::info!(
"[Compactor] Flushed {} memories before compaction for agent {}",
flushed,
agent_id
);
}
flushed
}
#[cfg(test)]