Compare commits
31 Commits
70229119be
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a504a40395 | ||
|
|
1309101a94 | ||
|
|
0d79993691 | ||
|
|
a0d1392371 | ||
|
|
7db9eb29a0 | ||
|
|
1e65b56a0f | ||
|
|
3c01754c40 | ||
|
|
08af78aa83 | ||
|
|
b69dc6115d | ||
|
|
7dea456fda | ||
|
|
f6c5dd21ce | ||
|
|
47250a3b70 | ||
|
|
215c079d29 | ||
|
|
043824c722 | ||
|
|
bd12bdb62b | ||
|
|
28c892fd31 | ||
|
|
9715f542b6 | ||
|
|
5121a3c599 | ||
|
|
ee1c9ef3ea | ||
|
|
76d36f62a6 | ||
|
|
be2a136392 | ||
|
|
76cdfd0c00 | ||
|
|
02a4ba5e75 | ||
|
|
a8a0751005 | ||
|
|
9c59e6e82a | ||
|
|
27b98cae6f | ||
|
|
d0aabf5f2e | ||
|
|
3c42e0d692 | ||
|
|
e0eb7173c5 | ||
|
|
6721a1cc6e | ||
|
|
d2a0c8efc0 |
@@ -529,7 +529,7 @@ refactor(store): 统一 Store 数据获取方式
|
||||
***
|
||||
|
||||
<!-- ARCH-SNAPSHOT-START -->
|
||||
<!-- 此区域由 auto-sync 自动更新,请勿手动编辑。更新时间: 2026-04-09 -->
|
||||
<!-- 此区域由 auto-sync 自动更新,请勿手动编辑。更新时间: 2026-04-15 -->
|
||||
|
||||
## 13. 当前架构快照
|
||||
|
||||
@@ -539,13 +539,14 @@ refactor(store): 统一 Store 数据获取方式
|
||||
|--------|------|----------|
|
||||
| 管家模式 (Butler) | ✅ 活跃 | 04-12 行业配置4行业 + 跨会话连续性 + <butler-context> XML fencing |
|
||||
| Hermes 管线 | ✅ 活跃 | 04-12 触发信号持久化 + 经验行业维度 + 注入格式优化 |
|
||||
| Intelligence Heartbeat | ✅ 活跃 | 04-15 统一健康快照 (health_snapshot.rs) + HeartbeatManager 重构 + HealthPanel 前端 |
|
||||
| 聊天流 (ChatStream) | ✅ 稳定 | 04-02 ChatStore 拆分为 4 Store (stream/conversation/message/chat) |
|
||||
| 记忆管道 (Memory) | ✅ 稳定 | 04-02 闭环修复: 对话→提取→FTS5+TF-IDF→检索→注入 |
|
||||
| SaaS 认证 (Auth) | ✅ 稳定 | Token池 RPM/TPM 轮换 + JWT password_version 失效机制 |
|
||||
| Pipeline DSL | ✅ 稳定 | 04-01 17 个 YAML 模板 + DAG 执行器 |
|
||||
| Hands 系统 | ✅ 稳定 | 9 启用 (Browser/Collector/Researcher/Twitter/Whiteboard/Slideshow/Speech/Quiz/Clip) |
|
||||
| 技能系统 (Skills) | ✅ 稳定 | 75 个 SKILL.md + 语义路由 |
|
||||
| 中间件链 | ✅ 稳定 | 15 层 (含 DataMasking@90, ButlerRouter, TrajectoryRecorder@650 — V13注册) |
|
||||
| 中间件链 | ✅ 稳定 | 14 层 (ButlerRouter@80, DataMasking@90, Compaction@100, Memory@150, Title@180, SkillIndex@200, DanglingTool@300, ToolError@350, ToolOutputGuard@360, Guardrail@400, LoopGuard@500, SubagentLimit@550, TrajectoryRecorder@650, TokenCalibration@700) |
|
||||
|
||||
### 关键架构模式
|
||||
|
||||
@@ -559,7 +560,8 @@ refactor(store): 统一 Store 数据获取方式
|
||||
|
||||
### 最近变更
|
||||
|
||||
1. [04-12] 行业配置+管家主动性 全栈 5 Phase: 行业数据模型+4内置配置+ButlerRouter动态关键词+触发信号+Tauri加载+Admin管理页面+跨会话连续性+XML fencing注入格式
|
||||
1. [04-15] Heartbeat 统一健康系统: health_snapshot.rs 统一收集器(LLM连接/记忆/会话/系统资源) + heartbeat.rs HeartbeatManager 重构 + HealthPanel.tsx 前端面板 + Tauri 命令 182→183 + intelligence 模块 15→16 文件 + 删除 intelligence-client/ 9 废弃文件
|
||||
2. [04-12] 行业配置+管家主动性 全栈 5 Phase: 行业数据模型+4内置配置+ButlerRouter动态关键词+触发信号+Tauri加载+Admin管理页面+跨会话连续性+XML fencing注入格式
|
||||
2. [04-09] Hermes Intelligence Pipeline 4 Chunk: ExperienceStore+Extractor, UserProfileStore+Profiler, NlScheduleParser, TrajectoryRecorder+Compressor (684 tests, 0 failed)
|
||||
3. [04-09] 管家模式6交付物完成: ButlerRouter + 冷启动 + 简洁模式UI + 桥测试 + 发布文档
|
||||
3. [04-07] @reserved 标注 5 个 butler Tauri 命令 + 痛点持久化 SQLite
|
||||
|
||||
@@ -9,6 +9,7 @@ import type { ProColumns } from '@ant-design/pro-components'
|
||||
import { ProTable } from '@ant-design/pro-components'
|
||||
import { accountService } from '@/services/accounts'
|
||||
import { industryService } from '@/services/industries'
|
||||
import { billingService } from '@/services/billing'
|
||||
import { PageHeader } from '@/components/PageHeader'
|
||||
import type { AccountPublic } from '@/types'
|
||||
|
||||
@@ -70,6 +71,12 @@ export default function Accounts() {
|
||||
}
|
||||
}, [accountIndustries, editingId, form])
|
||||
|
||||
// 获取所有活跃计划(用于管理员切换)
|
||||
const { data: plansData } = useQuery({
|
||||
queryKey: ['billing-plans'],
|
||||
queryFn: ({ signal }) => billingService.listPlans(signal),
|
||||
})
|
||||
|
||||
const updateMutation = useMutation({
|
||||
mutationFn: ({ id, data }: { id: string; data: Partial<AccountPublic> }) =>
|
||||
accountService.update(id, data),
|
||||
@@ -101,6 +108,14 @@ export default function Accounts() {
|
||||
onError: (err: Error) => message.error(err.message || '行业授权更新失败'),
|
||||
})
|
||||
|
||||
// 管理员切换用户计划
|
||||
const switchPlanMutation = useMutation({
|
||||
mutationFn: ({ accountId, planId }: { accountId: string; planId: string }) =>
|
||||
billingService.adminSwitchPlan(accountId, planId),
|
||||
onSuccess: () => message.success('计划切换成功'),
|
||||
onError: (err: Error) => message.error(err.message || '计划切换失败'),
|
||||
})
|
||||
|
||||
const columns: ProColumns<AccountPublic>[] = [
|
||||
{ title: '用户名', dataIndex: 'username', width: 120, tooltip: '搜索用户名、邮箱或显示名' },
|
||||
{ title: '显示名', dataIndex: 'display_name', width: 120, hideInSearch: true },
|
||||
@@ -186,7 +201,7 @@ export default function Accounts() {
|
||||
|
||||
try {
|
||||
// 更新基础信息
|
||||
const { industry_ids, ...accountData } = values
|
||||
const { industry_ids, plan_id, ...accountData } = values
|
||||
await updateMutation.mutateAsync({ id: editingId, data: accountData })
|
||||
|
||||
// 更新行业授权(如果变更了)
|
||||
@@ -201,6 +216,11 @@ export default function Accounts() {
|
||||
queryClient.invalidateQueries({ queryKey: ['account-industries'] })
|
||||
}
|
||||
|
||||
// 切换订阅计划(如果选择了新计划)
|
||||
if (plan_id) {
|
||||
await switchPlanMutation.mutateAsync({ accountId: editingId, planId: plan_id })
|
||||
}
|
||||
|
||||
handleClose()
|
||||
} catch {
|
||||
// Errors handled by mutation onError callbacks
|
||||
@@ -218,6 +238,11 @@ export default function Accounts() {
|
||||
label: `${item.icon} ${item.name}`,
|
||||
}))
|
||||
|
||||
const planOptions = (plansData || []).map((plan) => ({
|
||||
value: plan.id,
|
||||
label: `${plan.display_name} (¥${(plan.price_cents / 100).toFixed(0)}/月)`,
|
||||
}))
|
||||
|
||||
return (
|
||||
<div>
|
||||
<PageHeader title="账号管理" description="管理系统用户账号、角色、权限与行业授权" />
|
||||
@@ -256,7 +281,7 @@ export default function Accounts() {
|
||||
open={modalOpen}
|
||||
onOk={handleSave}
|
||||
onCancel={handleClose}
|
||||
confirmLoading={updateMutation.isPending || setIndustriesMutation.isPending}
|
||||
confirmLoading={updateMutation.isPending || setIndustriesMutation.isPending || switchPlanMutation.isPending}
|
||||
width={560}
|
||||
>
|
||||
<Form form={form} layout="vertical" className="mt-4">
|
||||
@@ -280,6 +305,21 @@ export default function Accounts() {
|
||||
]} />
|
||||
</Form.Item>
|
||||
|
||||
<Divider>订阅计划</Divider>
|
||||
|
||||
<Form.Item
|
||||
name="plan_id"
|
||||
label="切换计划"
|
||||
extra="选择新计划后保存将立即切换。留空则不修改当前计划。"
|
||||
>
|
||||
<Select
|
||||
allowClear
|
||||
placeholder="不修改当前计划"
|
||||
options={planOptions}
|
||||
loading={!plansData}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Divider>行业授权</Divider>
|
||||
|
||||
<Form.Item
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import request, { withSignal } from './request'
|
||||
import type { TokenInfo, CreateTokenRequest, PaginatedResponse } from '@/types'
|
||||
|
||||
// 使用 /tokens 路由 (api_tokens 表),前端 UI 字段 {name, expires_days, permissions} 与此后端匹配
|
||||
// 注: /keys 路由 (account_api_keys 表) 需要 {provider_id, key_value},属于不同的 Key 管理系统
|
||||
export const apiKeyService = {
|
||||
list: (params?: Record<string, unknown>, signal?: AbortSignal) =>
|
||||
request.get<PaginatedResponse<TokenInfo>>('/keys', withSignal({ params }, signal)).then((r) => r.data),
|
||||
request.get<PaginatedResponse<TokenInfo>>('/tokens', withSignal({ params }, signal)).then((r) => r.data),
|
||||
|
||||
create: (data: CreateTokenRequest, signal?: AbortSignal) =>
|
||||
request.post<TokenInfo>('/keys', data, withSignal({}, signal)).then((r) => r.data),
|
||||
request.post<TokenInfo>('/tokens', data, withSignal({}, signal)).then((r) => r.data),
|
||||
|
||||
revoke: (id: string, signal?: AbortSignal) =>
|
||||
request.delete(`/keys/${id}`, withSignal({}, signal)).then((r) => r.data),
|
||||
request.delete(`/tokens/${id}`, withSignal({}, signal)).then((r) => r.data),
|
||||
}
|
||||
|
||||
@@ -90,4 +90,9 @@ export const billingService = {
|
||||
getPaymentStatus: (id: string, signal?: AbortSignal) =>
|
||||
request.get<PaymentStatus>(`/billing/payments/${id}`, withSignal({}, signal))
|
||||
.then((r) => r.data),
|
||||
|
||||
/** 管理员切换用户订阅计划 (super_admin only) */
|
||||
adminSwitchPlan: (accountId: string, planId: string) =>
|
||||
request.put<{ success: boolean; subscription: Subscription }>(`/admin/accounts/${accountId}/subscription`, { plan_id: planId })
|
||||
.then((r) => r.data),
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ export default defineConfig({
|
||||
timeout: 600_000,
|
||||
proxyTimeout: 600_000,
|
||||
},
|
||||
'/api': {
|
||||
'/api/': {
|
||||
target: 'http://localhost:8080',
|
||||
changeOrigin: true,
|
||||
timeout: 30_000,
|
||||
|
||||
@@ -132,13 +132,16 @@ impl SqliteStorage {
|
||||
.map_err(|e| ZclawError::StorageError(format!("Failed to create memories table: {}", e)))?;
|
||||
|
||||
// Create FTS5 virtual table for full-text search
|
||||
// Use trigram tokenizer for CJK (Chinese/Japanese/Korean) support.
|
||||
// unicode61 cannot tokenize CJK characters, causing memory search to fail.
|
||||
// trigram indexes overlapping 3-character slices, works well for all languages.
|
||||
sqlx::query(
|
||||
r#"
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS memories_fts USING fts5(
|
||||
uri,
|
||||
content,
|
||||
keywords,
|
||||
tokenize='unicode61'
|
||||
tokenize='trigram'
|
||||
)
|
||||
"#,
|
||||
)
|
||||
@@ -176,6 +179,36 @@ impl SqliteStorage {
|
||||
.execute(&self.pool)
|
||||
.await;
|
||||
|
||||
// Backfill content_hash for existing entries that have NULL content_hash
|
||||
{
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
let rows: Vec<(String, String)> = sqlx::query_as(
|
||||
"SELECT uri, content FROM memories WHERE content_hash IS NULL"
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
if !rows.is_empty() {
|
||||
for (uri, content) in &rows {
|
||||
let normalized = content.trim().to_lowercase();
|
||||
let mut hasher = std::collections::hash_map::DefaultHasher::new();
|
||||
normalized.hash(&mut hasher);
|
||||
let hash = format!("{:016x}", hasher.finish());
|
||||
let _ = sqlx::query("UPDATE memories SET content_hash = ? WHERE uri = ?")
|
||||
.bind(&hash)
|
||||
.bind(uri)
|
||||
.execute(&self.pool)
|
||||
.await;
|
||||
}
|
||||
tracing::info!(
|
||||
"[SqliteStorage] Backfilled content_hash for {} existing entries",
|
||||
rows.len()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Create metadata table
|
||||
sqlx::query(
|
||||
r#"
|
||||
@@ -189,6 +222,46 @@ impl SqliteStorage {
|
||||
.await
|
||||
.map_err(|e| ZclawError::StorageError(format!("Failed to create metadata table: {}", e)))?;
|
||||
|
||||
// Migration: Rebuild FTS5 table if using old unicode61 tokenizer (can't handle CJK)
|
||||
// Check tokenizer by inspecting the existing FTS5 table definition
|
||||
let needs_rebuild: bool = sqlx::query_scalar::<_, i64>(
|
||||
"SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='memories_fts' AND sql LIKE '%unicode61%'"
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await
|
||||
.unwrap_or(0) > 0;
|
||||
|
||||
if needs_rebuild {
|
||||
tracing::info!("[SqliteStorage] Rebuilding FTS5 table: unicode61 → trigram for CJK support");
|
||||
// Drop old FTS5 table
|
||||
let _ = sqlx::query("DROP TABLE IF EXISTS memories_fts")
|
||||
.execute(&self.pool)
|
||||
.await;
|
||||
// Recreate with trigram tokenizer
|
||||
sqlx::query(
|
||||
r#"
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS memories_fts USING fts5(
|
||||
uri,
|
||||
content,
|
||||
keywords,
|
||||
tokenize='trigram'
|
||||
)
|
||||
"#,
|
||||
)
|
||||
.execute(&self.pool)
|
||||
.await
|
||||
.map_err(|e| ZclawError::StorageError(format!("Failed to recreate FTS5 table: {}", e)))?;
|
||||
// Reindex all existing memories into FTS5
|
||||
let reindexed = sqlx::query(
|
||||
"INSERT INTO memories_fts (uri, content, keywords) SELECT uri, content, keywords FROM memories"
|
||||
)
|
||||
.execute(&self.pool)
|
||||
.await
|
||||
.map(|r| r.rows_affected())
|
||||
.unwrap_or(0);
|
||||
tracing::info!("[SqliteStorage] FTS5 rebuild complete, reindexed {} entries", reindexed);
|
||||
}
|
||||
|
||||
tracing::info!("[SqliteStorage] Database schema initialized");
|
||||
Ok(())
|
||||
}
|
||||
@@ -378,19 +451,37 @@ impl SqliteStorage {
|
||||
/// Strips these and keeps only alphanumeric + CJK tokens with length > 1,
|
||||
/// then joins them with `OR` for broad matching.
|
||||
fn sanitize_fts_query(query: &str) -> String {
|
||||
let terms: Vec<String> = query
|
||||
.to_lowercase()
|
||||
.split(|c: char| !c.is_alphanumeric())
|
||||
.filter(|s| !s.is_empty() && s.len() > 1)
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
// trigram tokenizer requires quoted phrases for substring matching
|
||||
// and needs at least 3 characters per term to produce results.
|
||||
let lower = query.to_lowercase();
|
||||
|
||||
if terms.is_empty() {
|
||||
return String::new();
|
||||
// Check if query contains CJK characters — trigram handles them natively
|
||||
let has_cjk = lower.chars().any(|c| {
|
||||
matches!(c, '\u{4E00}'..='\u{9FFF}' | '\u{3400}'..='\u{4DBF}' | '\u{F900}'..='\u{FAFF}')
|
||||
});
|
||||
|
||||
if has_cjk {
|
||||
// For CJK, use the full query as a quoted phrase for substring matching
|
||||
// trigram will match any 3-char subsequence
|
||||
if lower.len() >= 3 {
|
||||
format!("\"{}\"", lower)
|
||||
} else {
|
||||
String::new()
|
||||
}
|
||||
} else {
|
||||
// For non-CJK, split into terms and join with OR
|
||||
let terms: Vec<String> = lower
|
||||
.split(|c: char| !c.is_alphanumeric())
|
||||
.filter(|s| !s.is_empty() && s.len() > 1)
|
||||
.map(|s| format!("\"{}\"", s))
|
||||
.collect();
|
||||
|
||||
if terms.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
terms.join(" OR ")
|
||||
}
|
||||
|
||||
// Join with OR so any term can match (broad recall, then rerank by similarity)
|
||||
terms.join(" OR ")
|
||||
}
|
||||
|
||||
/// Fetch memories by scope with importance-based ordering.
|
||||
|
||||
@@ -20,6 +20,7 @@ mod researcher;
|
||||
mod collector;
|
||||
mod clip;
|
||||
mod twitter;
|
||||
pub mod reminder;
|
||||
|
||||
pub use whiteboard::*;
|
||||
pub use slideshow::*;
|
||||
@@ -30,3 +31,4 @@ pub use researcher::*;
|
||||
pub use collector::*;
|
||||
pub use clip::*;
|
||||
pub use twitter::*;
|
||||
pub use reminder::*;
|
||||
|
||||
77
crates/zclaw-hands/src/hands/reminder.rs
Normal file
77
crates/zclaw-hands/src/hands/reminder.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
//! Reminder Hand - Internal hand for scheduled reminders
|
||||
//!
|
||||
//! This is a system hand (id `_reminder`) used by the schedule interception
|
||||
//! layer in `agent_chat_stream`. When the NlScheduleParser detects a schedule
|
||||
//! intent in chat, it creates a trigger targeting this hand. The SchedulerService
|
||||
//! fires the trigger at the scheduled time.
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde_json::Value;
|
||||
use zclaw_types::Result;
|
||||
|
||||
use crate::{Hand, HandConfig, HandContext, HandResult, HandStatus};
|
||||
|
||||
/// Internal reminder hand for scheduled tasks
|
||||
pub struct ReminderHand {
|
||||
config: HandConfig,
|
||||
}
|
||||
|
||||
impl ReminderHand {
|
||||
/// Create a new reminder hand
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
config: HandConfig {
|
||||
id: "_reminder".to_string(),
|
||||
name: "定时提醒".to_string(),
|
||||
description: "Internal hand for scheduled reminders".to_string(),
|
||||
needs_approval: false,
|
||||
dependencies: vec![],
|
||||
input_schema: None,
|
||||
tags: vec!["system".to_string()],
|
||||
enabled: true,
|
||||
max_concurrent: 0,
|
||||
timeout_secs: 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Hand for ReminderHand {
|
||||
fn config(&self) -> &HandConfig {
|
||||
&self.config
|
||||
}
|
||||
|
||||
async fn execute(&self, _context: &HandContext, input: Value) -> Result<HandResult> {
|
||||
let task_desc = input
|
||||
.get("task_description")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("定时提醒");
|
||||
|
||||
let cron = input
|
||||
.get("cron")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
let fired_at = input
|
||||
.get("fired_at")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("unknown time");
|
||||
|
||||
tracing::info!(
|
||||
"[ReminderHand] Fired at {} — task: {}, cron: {}",
|
||||
fired_at, task_desc, cron
|
||||
);
|
||||
|
||||
Ok(HandResult::success(serde_json::json!({
|
||||
"task": task_desc,
|
||||
"cron": cron,
|
||||
"fired_at": fired_at,
|
||||
"status": "reminded",
|
||||
})))
|
||||
}
|
||||
|
||||
fn status(&self) -> HandStatus {
|
||||
HandStatus::Idle
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,7 @@ use crate::config::KernelConfig;
|
||||
use zclaw_memory::MemoryStore;
|
||||
use zclaw_runtime::{LlmDriver, ToolRegistry, tool::SkillExecutor};
|
||||
use zclaw_skills::SkillRegistry;
|
||||
use zclaw_hands::{HandRegistry, hands::{BrowserHand, SlideshowHand, SpeechHand, QuizHand, WhiteboardHand, ResearcherHand, CollectorHand, ClipHand, TwitterHand, quiz::LlmQuizGenerator}};
|
||||
use zclaw_hands::{HandRegistry, hands::{BrowserHand, SlideshowHand, SpeechHand, QuizHand, WhiteboardHand, ResearcherHand, CollectorHand, ClipHand, TwitterHand, ReminderHand, quiz::LlmQuizGenerator}};
|
||||
|
||||
pub use adapters::KernelSkillExecutor;
|
||||
pub use messaging::ChatModeConfig;
|
||||
@@ -101,6 +101,7 @@ impl Kernel {
|
||||
hands.register(Arc::new(CollectorHand::new())).await;
|
||||
hands.register(Arc::new(ClipHand::new())).await;
|
||||
hands.register(Arc::new(TwitterHand::new())).await;
|
||||
hands.register(Arc::new(ReminderHand::new())).await;
|
||||
|
||||
// Create skill executor
|
||||
let skill_executor = Arc::new(KernelSkillExecutor::new(skills.clone(), driver.clone()));
|
||||
|
||||
@@ -77,7 +77,7 @@ impl SchedulerService {
|
||||
kernel_lock: &Arc<Mutex<Option<Kernel>>>,
|
||||
) -> Result<()> {
|
||||
// Collect due triggers under lock
|
||||
let to_execute: Vec<(String, String, String)> = {
|
||||
let to_execute: Vec<(String, String, String, String)> = {
|
||||
let kernel_guard = kernel_lock.lock().await;
|
||||
let kernel = match kernel_guard.as_ref() {
|
||||
Some(k) => k,
|
||||
@@ -103,7 +103,8 @@ impl SchedulerService {
|
||||
.filter_map(|t| {
|
||||
if let zclaw_hands::TriggerType::Schedule { ref cron } = t.config.trigger_type {
|
||||
if Self::should_fire_cron(cron, &now) {
|
||||
Some((t.config.id.clone(), t.config.hand_id.clone(), cron.clone()))
|
||||
// (trigger_id, hand_id, cron_expr, trigger_name)
|
||||
Some((t.config.id.clone(), t.config.hand_id.clone(), cron.clone(), t.config.name.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -123,7 +124,7 @@ impl SchedulerService {
|
||||
// If parallel execution is needed, spawn each execute_hand in a separate task
|
||||
// and collect results via JoinSet.
|
||||
let now = chrono::Utc::now();
|
||||
for (trigger_id, hand_id, cron_expr) in to_execute {
|
||||
for (trigger_id, hand_id, cron_expr, trigger_name) in to_execute {
|
||||
tracing::info!(
|
||||
"[Scheduler] Firing scheduled trigger '{}' → hand '{}' (cron: {})",
|
||||
trigger_id, hand_id, cron_expr
|
||||
@@ -138,6 +139,7 @@ impl SchedulerService {
|
||||
let input = serde_json::json!({
|
||||
"trigger_id": trigger_id,
|
||||
"trigger_type": "schedule",
|
||||
"task_description": trigger_name,
|
||||
"cron": cron_expr,
|
||||
"fired_at": now.to_rfc3339(),
|
||||
});
|
||||
|
||||
@@ -134,7 +134,9 @@ impl TriggerManager {
|
||||
/// Create a new trigger
|
||||
pub async fn create_trigger(&self, config: TriggerConfig) -> Result<TriggerEntry> {
|
||||
// Validate hand exists (outside of our lock to avoid holding two locks)
|
||||
if self.hand_registry.get(&config.hand_id).await.is_none() {
|
||||
// System hands (prefixed with '_') are exempt from validation — they are
|
||||
// registered at boot but may not appear in the hand registry scan path.
|
||||
if !config.hand_id.starts_with('_') && self.hand_registry.get(&config.hand_id).await.is_none() {
|
||||
return Err(zclaw_types::ZclawError::InvalidInput(
|
||||
format!("Hand '{}' not found", config.hand_id)
|
||||
));
|
||||
@@ -170,7 +172,7 @@ impl TriggerManager {
|
||||
) -> Result<TriggerEntry> {
|
||||
// Validate hand exists if being updated (outside of our lock)
|
||||
if let Some(hand_id) = &updates.hand_id {
|
||||
if self.hand_registry.get(hand_id).await.is_none() {
|
||||
if !hand_id.starts_with('_') && self.hand_registry.get(hand_id).await.is_none() {
|
||||
return Err(zclaw_types::ZclawError::InvalidInput(
|
||||
format!("Hand '{}' not found", hand_id)
|
||||
));
|
||||
@@ -303,9 +305,10 @@ impl TriggerManager {
|
||||
};
|
||||
|
||||
// Get hand (outside of our lock to avoid potential deadlock with hand_registry)
|
||||
// System hands (prefixed with '_') must be registered at boot — same rule as create_trigger.
|
||||
let hand = self.hand_registry.get(&hand_id).await
|
||||
.ok_or_else(|| zclaw_types::ZclawError::InvalidInput(
|
||||
format!("Hand '{}' not found", hand_id)
|
||||
format!("Hand '{}' not found (system hands must be registered at boot)", hand_id)
|
||||
))?;
|
||||
|
||||
// Update state before execution
|
||||
|
||||
@@ -130,7 +130,7 @@ impl DataMasker {
|
||||
fn recover_read<T>(lock: &RwLock<T>) -> std::sync::LockResult<std::sync::RwLockReadGuard<'_, T>> {
|
||||
match lock.read() {
|
||||
Ok(guard) => Ok(guard),
|
||||
Err(e) => {
|
||||
Err(_e) => {
|
||||
tracing::warn!("[DataMasker] RwLock poisoned during read, recovering");
|
||||
// Poison error still gives us access to the inner guard
|
||||
lock.read()
|
||||
@@ -141,7 +141,7 @@ impl DataMasker {
|
||||
fn recover_write<T>(lock: &RwLock<T>) -> std::sync::LockResult<std::sync::RwLockWriteGuard<'_, T>> {
|
||||
match lock.write() {
|
||||
Ok(guard) => Ok(guard),
|
||||
Err(e) => {
|
||||
Err(_e) => {
|
||||
tracing::warn!("[DataMasker] RwLock poisoned during write, recovering");
|
||||
lock.write()
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ use tokio::sync::RwLock;
|
||||
use zclaw_memory::trajectory_store::{
|
||||
TrajectoryEvent, TrajectoryStepType, TrajectoryStore,
|
||||
};
|
||||
use zclaw_types::{Result, SessionId};
|
||||
use zclaw_types::Result;
|
||||
use crate::driver::ContentBlock;
|
||||
use crate::middleware::{AgentMiddleware, MiddlewareContext, MiddlewareDecision};
|
||||
|
||||
|
||||
@@ -7,7 +7,10 @@
|
||||
//!
|
||||
//! Lives in `zclaw-runtime` because it's a pure text→cron utility with no kernel dependency.
|
||||
|
||||
use chrono::{Datelike, Timelike};
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use chrono::Timelike;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use zclaw_types::AgentId;
|
||||
|
||||
@@ -56,20 +59,79 @@ pub enum ScheduleParseResult {
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Regex pattern library
|
||||
// Pre-compiled regex patterns (LazyLock — compiled once, reused forever)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// A single pattern for matching Chinese time expressions.
|
||||
struct SchedulePattern {
|
||||
/// Regex pattern string
|
||||
regex: &'static str,
|
||||
/// Cron template — use {h} for hour, {m} for minute, {dow} for day-of-week, {dom} for day-of-month
|
||||
cron_template: &'static str,
|
||||
/// Human description template
|
||||
description: &'static str,
|
||||
/// Base confidence for this pattern
|
||||
confidence: f32,
|
||||
}
|
||||
/// Time-of-day period fragment used across multiple patterns.
|
||||
const PERIOD: &str = "(凌晨|早上|早晨|上午|中午|下午|午后|傍晚|黄昏|晚上|晚间|夜里|夜晚|半夜|午夜)?";
|
||||
|
||||
// extract_task_description
|
||||
static RE_TIME_STRIP: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"^(?:凌晨|早上|早晨|上午|中午|下午|午后|傍晚|黄昏|晚上|晚间|夜里|夜晚|半夜|午夜)?\d{1,2}[点时::]\d{0,2}分?"
|
||||
).unwrap()
|
||||
});
|
||||
|
||||
// try_every_day
|
||||
static RE_EVERY_DAY_EXACT: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(&format!(
|
||||
r"(?:每天|每日)(?:的)?{}(\d{{1,2}})[点时::](\d{{1,2}})?",
|
||||
PERIOD
|
||||
)).unwrap()
|
||||
});
|
||||
|
||||
static RE_EVERY_DAY_PERIOD: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?:每天|每日)(?:的)?(凌晨|早上|早晨|上午|中午|下午|午后|傍晚|黄昏|晚上|晚间|夜里|夜晚|半夜|午夜)"
|
||||
).unwrap()
|
||||
});
|
||||
|
||||
// try_every_week
|
||||
static RE_EVERY_WEEK: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(&format!(
|
||||
r"(?:每周|每个?星期|每个?礼拜)(一|二|三|四|五|六|日|天|周一|周二|周三|周四|周五|周六|周日|周天|星期一|星期二|星期三|星期四|星期五|星期六|星期日|星期天|礼拜一|礼拜二|礼拜三|礼拜四|礼拜五|礼拜六|礼拜日|礼拜天)(?:的)?{}(\d{{1,2}})[点时::](\d{{1,2}})?",
|
||||
PERIOD
|
||||
)).unwrap()
|
||||
});
|
||||
|
||||
// try_workday
|
||||
static RE_WORKDAY_EXACT: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(&format!(
|
||||
r"(?:工作日|每个?工作日|工作日(?:的)?){}(\d{{1,2}})[点时::](\d{{1,2}})?",
|
||||
PERIOD
|
||||
)).unwrap()
|
||||
});
|
||||
|
||||
static RE_WORKDAY_PERIOD: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?:工作日|每个?工作日)(?:的)?(凌晨|早上|早晨|上午|中午|下午|午后|傍晚|黄昏|晚上|晚间|夜里|夜晚|半夜|午夜)"
|
||||
).unwrap()
|
||||
});
|
||||
|
||||
// try_interval
|
||||
static RE_INTERVAL: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"每(\d{1,2})(小时|分钟|分|钟|个小时)").unwrap()
|
||||
});
|
||||
|
||||
// try_monthly
|
||||
static RE_MONTHLY: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(&format!(
|
||||
r"(?:每月|每个月)(?:的)?(\d{{1,2}})[号日](?:的)?{}(\d{{1,2}})?[点时::]?(\d{{1,2}})?",
|
||||
PERIOD
|
||||
)).unwrap()
|
||||
});
|
||||
|
||||
// try_one_shot
|
||||
static RE_ONE_SHOT: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(&format!(
|
||||
r"(明天|后天|大后天)(?:的)?{}(\d{{1,2}})[点时::](\d{{1,2}})?",
|
||||
PERIOD
|
||||
)).unwrap()
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helper lookups (pure functions, no allocation)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Chinese time period keywords → hour mapping
|
||||
fn period_to_hour(period: &str) -> Option<u32> {
|
||||
@@ -99,6 +161,23 @@ fn weekday_to_cron(day: &str) -> Option<&'static str> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Adjust hour based on time-of-day period. Chinese 12-hour convention:
|
||||
/// 下午3点 = 15, 晚上8点 = 20, etc. Morning hours stay as-is.
|
||||
fn adjust_hour_for_period(hour: u32, period: Option<&str>) -> u32 {
|
||||
if let Some(p) = period {
|
||||
match p {
|
||||
"下午" | "午后" => { if hour < 12 { hour + 12 } else { hour } }
|
||||
"晚上" | "晚间" | "夜里" | "夜晚" => { if hour < 12 { hour + 12 } else { hour } }
|
||||
"傍晚" | "黄昏" => { if hour < 12 { hour + 12 } else { hour } }
|
||||
"中午" => { if hour == 12 { 12 } else if hour < 12 { hour + 12 } else { hour } }
|
||||
"半夜" | "午夜" => { if hour == 12 { 0 } else { hour } }
|
||||
_ => hour,
|
||||
}
|
||||
} else {
|
||||
hour
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Parser implementation
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -113,35 +192,23 @@ pub fn parse_nl_schedule(input: &str, default_agent_id: &AgentId) -> SchedulePar
|
||||
return ScheduleParseResult::Unclear;
|
||||
}
|
||||
|
||||
// Extract task description (everything after keywords like "提醒我", "帮我")
|
||||
let task_description = extract_task_description(input);
|
||||
|
||||
// --- Pattern 1: 每天 + 时间 ---
|
||||
if let Some(result) = try_every_day(input, &task_description, default_agent_id) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// --- Pattern 2: 每周N + 时间 ---
|
||||
if let Some(result) = try_every_week(input, &task_description, default_agent_id) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// --- Pattern 3: 工作日 + 时间 ---
|
||||
if let Some(result) = try_workday(input, &task_description, default_agent_id) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// --- Pattern 4: 每N小时/分钟 ---
|
||||
if let Some(result) = try_interval(input, &task_description, default_agent_id) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// --- Pattern 5: 每月N号 ---
|
||||
if let Some(result) = try_monthly(input, &task_description, default_agent_id) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// --- Pattern 6: 明天/后天 + 时间 (one-shot) ---
|
||||
if let Some(result) = try_one_shot(input, &task_description, default_agent_id) {
|
||||
return result;
|
||||
}
|
||||
@@ -160,13 +227,7 @@ fn extract_task_description(input: &str) -> String {
|
||||
|
||||
let mut desc = input.to_string();
|
||||
|
||||
// Strip prefixes + time expressions in alternating passes until stable
|
||||
let time_re = regex::Regex::new(
|
||||
r"^(?:凌晨|早上|早晨|上午|中午|下午|午后|傍晚|黄昏|晚上|晚间|夜里|夜晚|半夜|午夜)?\d{1,2}[点时::]\d{0,2}分?"
|
||||
).unwrap_or_else(|_| regex::Regex::new("").unwrap());
|
||||
|
||||
for _ in 0..3 {
|
||||
// Pass 1: strip prefixes
|
||||
loop {
|
||||
let mut stripped = false;
|
||||
for prefix in &strip_prefixes {
|
||||
@@ -177,8 +238,7 @@ fn extract_task_description(input: &str) -> String {
|
||||
}
|
||||
if !stripped { break; }
|
||||
}
|
||||
// Pass 2: strip time expressions
|
||||
let new_desc = time_re.replace(&desc, "").to_string();
|
||||
let new_desc = RE_TIME_STRIP.replace(&desc, "").to_string();
|
||||
if new_desc == desc { break; }
|
||||
desc = new_desc;
|
||||
}
|
||||
@@ -186,32 +246,10 @@ fn extract_task_description(input: &str) -> String {
|
||||
desc.trim().to_string()
|
||||
}
|
||||
|
||||
// -- Pattern matchers --
|
||||
|
||||
/// Adjust hour based on time-of-day period. Chinese 12-hour convention:
|
||||
/// 下午3点 = 15, 晚上8点 = 20, etc. Morning hours stay as-is.
|
||||
fn adjust_hour_for_period(hour: u32, period: Option<&str>) -> u32 {
|
||||
if let Some(p) = period {
|
||||
match p {
|
||||
"下午" | "午后" => { if hour < 12 { hour + 12 } else { hour } }
|
||||
"晚上" | "晚间" | "夜里" | "夜晚" => { if hour < 12 { hour + 12 } else { hour } }
|
||||
"傍晚" | "黄昏" => { if hour < 12 { hour + 12 } else { hour } }
|
||||
"中午" => { if hour == 12 { 12 } else if hour < 12 { hour + 12 } else { hour } }
|
||||
"半夜" | "午夜" => { if hour == 12 { 0 } else { hour } }
|
||||
_ => hour,
|
||||
}
|
||||
} else {
|
||||
hour
|
||||
}
|
||||
}
|
||||
|
||||
const PERIOD_PATTERN: &str = "(凌晨|早上|早晨|上午|中午|下午|午后|傍晚|黄昏|晚上|晚间|夜里|夜晚|半夜|午夜)?";
|
||||
// -- Pattern matchers (all use pre-compiled statics) --
|
||||
|
||||
fn try_every_day(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<ScheduleParseResult> {
|
||||
let re = regex::Regex::new(
|
||||
&format!(r"(?:每天|每日)(?:的)?{}(\d{{1,2}})[点时::](\d{{1,2}})?", PERIOD_PATTERN)
|
||||
).ok()?;
|
||||
if let Some(caps) = re.captures(input) {
|
||||
if let Some(caps) = RE_EVERY_DAY_EXACT.captures(input) {
|
||||
let period = caps.get(1).map(|m| m.as_str());
|
||||
let raw_hour: u32 = caps.get(2)?.as_str().parse().ok()?;
|
||||
let minute: u32 = caps.get(3).map(|m| m.as_str().parse().unwrap_or(0)).unwrap_or(0);
|
||||
@@ -228,9 +266,7 @@ fn try_every_day(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<Sch
|
||||
}));
|
||||
}
|
||||
|
||||
// "每天早上/下午..." without explicit hour
|
||||
let re2 = regex::Regex::new(r"(?:每天|每日)(?:的)?(凌晨|早上|早晨|上午|中午|下午|午后|傍晚|黄昏|晚上|晚间|夜里|夜晚|半夜|午夜)").ok()?;
|
||||
if let Some(caps) = re2.captures(input) {
|
||||
if let Some(caps) = RE_EVERY_DAY_PERIOD.captures(input) {
|
||||
let period = caps.get(1)?.as_str();
|
||||
if let Some(hour) = period_to_hour(period) {
|
||||
return Some(ScheduleParseResult::Exact(ParsedSchedule {
|
||||
@@ -247,11 +283,7 @@ fn try_every_day(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<Sch
|
||||
}
|
||||
|
||||
fn try_every_week(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<ScheduleParseResult> {
|
||||
let re = regex::Regex::new(
|
||||
&format!(r"(?:每周|每个?星期|每个?礼拜)(一|二|三|四|五|六|日|天|周一|周二|周三|周四|周五|周六|周日|周天|星期一|星期二|星期三|星期四|星期五|星期六|星期日|星期天|礼拜一|礼拜二|礼拜三|礼拜四|礼拜五|礼拜六|礼拜日|礼拜天)(?:的)?{}(\d{{1,2}})[点时::](\d{{1,2}})?", PERIOD_PATTERN)
|
||||
).ok()?;
|
||||
|
||||
let caps = re.captures(input)?;
|
||||
let caps = RE_EVERY_WEEK.captures(input)?;
|
||||
let day_str = caps.get(1)?.as_str();
|
||||
let dow = weekday_to_cron(day_str)?;
|
||||
let period = caps.get(2).map(|m| m.as_str());
|
||||
@@ -272,11 +304,7 @@ fn try_every_week(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<Sc
|
||||
}
|
||||
|
||||
fn try_workday(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<ScheduleParseResult> {
|
||||
let re = regex::Regex::new(
|
||||
&format!(r"(?:工作日|每个?工作日|工作日(?:的)?){}(\d{{1,2}})[点时::](\d{{1,2}})?", PERIOD_PATTERN)
|
||||
).ok()?;
|
||||
|
||||
if let Some(caps) = re.captures(input) {
|
||||
if let Some(caps) = RE_WORKDAY_EXACT.captures(input) {
|
||||
let period = caps.get(1).map(|m| m.as_str());
|
||||
let raw_hour: u32 = caps.get(2)?.as_str().parse().ok()?;
|
||||
let minute: u32 = caps.get(3).map(|m| m.as_str().parse().unwrap_or(0)).unwrap_or(0);
|
||||
@@ -293,11 +321,7 @@ fn try_workday(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<Sched
|
||||
}));
|
||||
}
|
||||
|
||||
// "工作日下午3点" style
|
||||
let re2 = regex::Regex::new(
|
||||
r"(?:工作日|每个?工作日)(?:的)?(凌晨|早上|早晨|上午|中午|下午|午后|傍晚|黄昏|晚上|晚间|夜里|夜晚|半夜|午夜)"
|
||||
).ok()?;
|
||||
if let Some(caps) = re2.captures(input) {
|
||||
if let Some(caps) = RE_WORKDAY_PERIOD.captures(input) {
|
||||
let period = caps.get(1)?.as_str();
|
||||
if let Some(hour) = period_to_hour(period) {
|
||||
return Some(ScheduleParseResult::Exact(ParsedSchedule {
|
||||
@@ -314,9 +338,7 @@ fn try_workday(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<Sched
|
||||
}
|
||||
|
||||
fn try_interval(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<ScheduleParseResult> {
|
||||
// "每2小时", "每30分钟", "每N小时/分钟"
|
||||
let re = regex::Regex::new(r"每(\d{1,2})(小时|分钟|分|钟|个小时)").ok()?;
|
||||
if let Some(caps) = re.captures(input) {
|
||||
if let Some(caps) = RE_INTERVAL.captures(input) {
|
||||
let n: u32 = caps.get(1)?.as_str().parse().ok()?;
|
||||
if n == 0 {
|
||||
return None;
|
||||
@@ -340,11 +362,7 @@ fn try_interval(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<Sche
|
||||
}
|
||||
|
||||
fn try_monthly(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<ScheduleParseResult> {
|
||||
let re = regex::Regex::new(
|
||||
&format!(r"(?:每月|每个月)(?:的)?(\d{{1,2}})[号日](?:的)?{}(\d{{1,2}})?[点时::]?(\d{{1,2}})?", PERIOD_PATTERN)
|
||||
).ok()?;
|
||||
|
||||
if let Some(caps) = re.captures(input) {
|
||||
if let Some(caps) = RE_MONTHLY.captures(input) {
|
||||
let day: u32 = caps.get(1)?.as_str().parse().ok()?;
|
||||
let period = caps.get(2).map(|m| m.as_str());
|
||||
let raw_hour: u32 = caps.get(3).map(|m| m.as_str().parse().unwrap_or(9)).unwrap_or(9);
|
||||
@@ -366,11 +384,7 @@ fn try_monthly(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<Sched
|
||||
}
|
||||
|
||||
fn try_one_shot(input: &str, task_desc: &str, agent_id: &AgentId) -> Option<ScheduleParseResult> {
|
||||
let re = regex::Regex::new(
|
||||
&format!(r"(明天|后天|大后天)(?:的)?{}(\d{{1,2}})[点时::](\d{{1,2}})?", PERIOD_PATTERN)
|
||||
).ok()?;
|
||||
|
||||
let caps = re.captures(input)?;
|
||||
let caps = RE_ONE_SHOT.captures(input)?;
|
||||
let day_offset = match caps.get(1)?.as_str() {
|
||||
"明天" => 1,
|
||||
"后天" => 2,
|
||||
|
||||
@@ -16,7 +16,7 @@ pub fn routes() -> axum::Router<crate::state::AppState> {
|
||||
.route("/api/v1/tokens", post(handlers::create_token))
|
||||
.route("/api/v1/tokens/:id", delete(handlers::revoke_token))
|
||||
.route("/api/v1/logs/operations", get(handlers::list_operation_logs))
|
||||
.route("/api/v1/stats/dashboard", get(handlers::dashboard_stats))
|
||||
.route("/api/v1/admin/dashboard", get(handlers::dashboard_stats))
|
||||
.route("/api/v1/devices", get(handlers::list_devices))
|
||||
.route("/api/v1/devices/register", post(handlers::register_device))
|
||||
.route("/api/v1/devices/heartbeat", post(handlers::device_heartbeat))
|
||||
|
||||
@@ -215,7 +215,10 @@ pub async fn login(
|
||||
.bind(&r.id)
|
||||
.fetch_one(&state.db)
|
||||
.await
|
||||
.unwrap_or(false);
|
||||
.map_err(|e| {
|
||||
tracing::warn!(account_id = %r.id, error = %e, "Lockout check query failed");
|
||||
SaasError::Internal("账号状态检查失败,请重试".into())
|
||||
})?;
|
||||
|
||||
if is_locked {
|
||||
return Err(SaasError::AuthError("账号已被临时锁定,请稍后再试".into()));
|
||||
@@ -631,5 +634,32 @@ pub async fn logout(
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: 如果没有找到 refresh token,尝试从 access token cookie 提取 account_id
|
||||
// Tauri 桌面端使用 Bearer auth 时,logout body 可能不含 refresh_token
|
||||
if tokens_to_check.is_empty() {
|
||||
if let Some(access_cookie) = jar.get(ACCESS_TOKEN_COOKIE) {
|
||||
let access_val = access_cookie.value().to_string();
|
||||
if let Ok(claims) = verify_token_skip_expiry(&access_val, jwt_secret) {
|
||||
let now = chrono::Utc::now();
|
||||
let result = sqlx::query(
|
||||
"UPDATE refresh_tokens SET used_at = $1 WHERE account_id = $2 AND used_at IS NULL"
|
||||
)
|
||||
.bind(&now)
|
||||
.bind(&claims.sub)
|
||||
.execute(&state.db)
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(r) => {
|
||||
tracing::info!(account_id = %claims.sub, n = r.rows_affected(), "Refresh tokens revoked via access token fallback");
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!(account_id = %claims.sub, error = %e, "Failed to revoke refresh tokens (access fallback)");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(clear_auth_cookies(jar), axum::http::StatusCode::NO_CONTENT)
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ use axum::{
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::auth::types::AuthContext;
|
||||
use crate::auth::handlers::{log_operation, check_permission};
|
||||
use crate::error::{SaasError, SaasResult};
|
||||
use crate::state::AppState;
|
||||
use super::service;
|
||||
@@ -115,6 +116,41 @@ pub async fn increment_usage_dimension(
|
||||
})))
|
||||
}
|
||||
|
||||
/// POST /api/v1/billing/payments — 创建支付订单
|
||||
|
||||
/// PUT /api/v1/admin/accounts/:id/subscription — 管理员切换用户订阅计划(仅 super_admin)
|
||||
pub async fn admin_switch_subscription(
|
||||
State(state): State<AppState>,
|
||||
Extension(ctx): Extension<AuthContext>,
|
||||
Path(account_id): Path<String>,
|
||||
Json(req): Json<AdminSwitchPlanRequest>,
|
||||
) -> SaasResult<Json<serde_json::Value>> {
|
||||
// 仅 super_admin 可操作
|
||||
check_permission(&ctx, "admin:full")?;
|
||||
|
||||
// 验证 plan_id 非空
|
||||
if req.plan_id.trim().is_empty() {
|
||||
return Err(SaasError::InvalidInput("plan_id 不能为空".into()));
|
||||
}
|
||||
|
||||
let sub = service::admin_switch_plan(&state.db, &account_id, &req.plan_id).await?;
|
||||
|
||||
log_operation(
|
||||
&state.db,
|
||||
&ctx.account_id,
|
||||
"billing.admin_switch_plan",
|
||||
"account",
|
||||
&account_id,
|
||||
Some(serde_json::json!({ "plan_id": req.plan_id })),
|
||||
None,
|
||||
).await.ok(); // 日志失败不影响主流程
|
||||
|
||||
Ok(Json(serde_json::json!({
|
||||
"success": true,
|
||||
"subscription": sub,
|
||||
})))
|
||||
}
|
||||
|
||||
/// POST /api/v1/billing/payments — 创建支付订单
|
||||
pub async fn create_payment(
|
||||
State(state): State<AppState>,
|
||||
|
||||
@@ -6,7 +6,7 @@ pub mod handlers;
|
||||
pub mod payment;
|
||||
pub mod invoice_pdf;
|
||||
|
||||
use axum::routing::{get, post};
|
||||
use axum::routing::{get, post, put};
|
||||
|
||||
/// 全部计费路由(用于 main.rs 一次性挂载)
|
||||
pub fn routes() -> axum::Router<crate::state::AppState> {
|
||||
@@ -51,3 +51,9 @@ pub fn mock_routes() -> axum::Router<crate::state::AppState> {
|
||||
.route("/api/v1/billing/mock-pay", get(handlers::mock_pay_page))
|
||||
.route("/api/v1/billing/mock-pay/confirm", post(handlers::mock_pay_confirm))
|
||||
}
|
||||
|
||||
/// 管理员计费路由(需 super_admin 权限)
|
||||
pub fn admin_routes() -> axum::Router<crate::state::AppState> {
|
||||
axum::Router::new()
|
||||
.route("/api/v1/admin/accounts/:id/subscription", put(handlers::admin_switch_subscription))
|
||||
}
|
||||
|
||||
@@ -101,6 +101,7 @@ pub async fn create_payment(
|
||||
|
||||
Ok(PaymentResult {
|
||||
payment_id,
|
||||
invoice_id,
|
||||
trade_no,
|
||||
pay_url,
|
||||
amount_cents: plan.price_cents,
|
||||
@@ -272,8 +273,8 @@ pub async fn query_payment_status(
|
||||
payment_id: &str,
|
||||
account_id: &str,
|
||||
) -> SaasResult<serde_json::Value> {
|
||||
let payment: (String, String, i32, String, String) = sqlx::query_as::<_, (String, String, i32, String, String)>(
|
||||
"SELECT id, method, amount_cents, currency, status \
|
||||
let payment: (String, String, String, i32, String, String) = sqlx::query_as::<_, (String, String, String, i32, String, String)>(
|
||||
"SELECT id, invoice_id, method, amount_cents, currency, status \
|
||||
FROM billing_payments WHERE id = $1 AND account_id = $2"
|
||||
)
|
||||
.bind(payment_id)
|
||||
@@ -282,9 +283,10 @@ pub async fn query_payment_status(
|
||||
.await?
|
||||
.ok_or_else(|| SaasError::NotFound("支付记录不存在".into()))?;
|
||||
|
||||
let (id, method, amount, currency, status) = payment;
|
||||
let (id, invoice_id, method, amount, currency, status) = payment;
|
||||
Ok(serde_json::json!({
|
||||
"id": id,
|
||||
"invoice_id": invoice_id,
|
||||
"method": method,
|
||||
"amount_cents": amount,
|
||||
"currency": currency,
|
||||
|
||||
@@ -300,6 +300,93 @@ pub async fn increment_dimension_by(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// 管理员切换用户订阅计划(仅 super_admin 调用)
|
||||
///
|
||||
/// 1. 验证目标 plan_id 存在且 active
|
||||
/// 2. 取消用户当前 active 订阅
|
||||
/// 3. 创建新订阅(status=active, 30 天周期)
|
||||
/// 4. 更新当月 usage quota 的 max_* 列
|
||||
pub async fn admin_switch_plan(
|
||||
pool: &PgPool,
|
||||
account_id: &str,
|
||||
target_plan_id: &str,
|
||||
) -> SaasResult<Subscription> {
|
||||
// 1. 验证目标计划存在且 active
|
||||
let plan = get_plan(pool, target_plan_id).await?
|
||||
.ok_or_else(|| crate::error::SaasError::NotFound("目标计划不存在或已下架".into()))?;
|
||||
|
||||
// 2. 检查是否已订阅该计划
|
||||
if let Some(current_sub) = get_active_subscription(pool, account_id).await? {
|
||||
if current_sub.plan_id == target_plan_id {
|
||||
return Err(crate::error::SaasError::InvalidInput("用户已订阅该计划".into()));
|
||||
}
|
||||
}
|
||||
|
||||
let mut tx = pool.begin().await
|
||||
.map_err(|e| crate::error::SaasError::Internal(format!("开启事务失败: {}", e)))?;
|
||||
|
||||
let now = chrono::Utc::now();
|
||||
|
||||
// 3. 取消当前活跃订阅
|
||||
sqlx::query(
|
||||
"UPDATE billing_subscriptions SET status = 'canceled', canceled_at = $1, updated_at = $1 \
|
||||
WHERE account_id = $2 AND status IN ('trial', 'active', 'past_due')"
|
||||
)
|
||||
.bind(&now)
|
||||
.bind(account_id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
// 4. 创建新订阅
|
||||
let sub_id = uuid::Uuid::new_v4().to_string();
|
||||
let period_start = now;
|
||||
let period_end = now + chrono::Duration::days(30);
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO billing_subscriptions \
|
||||
(id, account_id, plan_id, status, current_period_start, current_period_end, created_at, updated_at) \
|
||||
VALUES ($1, $2, $3, 'active', $4, $5, $6, $6)"
|
||||
)
|
||||
.bind(&sub_id)
|
||||
.bind(account_id)
|
||||
.bind(&target_plan_id)
|
||||
.bind(&period_start)
|
||||
.bind(&period_end)
|
||||
.bind(&now)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
// 5. 同步当月 usage quota 的 max_* 列
|
||||
let limits: PlanLimits = serde_json::from_value(plan.limits.clone())
|
||||
.unwrap_or_else(|_| PlanLimits::free());
|
||||
sqlx::query(
|
||||
"UPDATE billing_usage_quotas SET max_input_tokens=$1, max_output_tokens=$2, \
|
||||
max_relay_requests=$3, max_hand_executions=$4, max_pipeline_runs=$5, updated_at=NOW() \
|
||||
WHERE account_id=$6 AND period_start = DATE_TRUNC('month', NOW())"
|
||||
)
|
||||
.bind(limits.max_input_tokens_monthly)
|
||||
.bind(limits.max_output_tokens_monthly)
|
||||
.bind(limits.max_relay_requests_monthly)
|
||||
.bind(limits.max_hand_executions_monthly)
|
||||
.bind(limits.max_pipeline_runs_monthly)
|
||||
.bind(account_id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
tx.commit().await
|
||||
.map_err(|e| crate::error::SaasError::Internal(format!("事务提交失败: {}", e)))?;
|
||||
|
||||
// 查询返回新订阅
|
||||
let sub = sqlx::query_as::<_, Subscription>(
|
||||
"SELECT * FROM billing_subscriptions WHERE id = $1"
|
||||
)
|
||||
.bind(&sub_id)
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
|
||||
Ok(sub)
|
||||
}
|
||||
|
||||
/// 检查用量配额
|
||||
///
|
||||
/// P1-7 修复: 从当前 Plan 读取限额(而非 stale 的 usage 表冗余列)
|
||||
|
||||
@@ -155,7 +155,14 @@ pub struct CreatePaymentRequest {
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct PaymentResult {
|
||||
pub payment_id: String,
|
||||
pub invoice_id: String,
|
||||
pub trade_no: String,
|
||||
pub pay_url: String,
|
||||
pub amount_cents: i32,
|
||||
}
|
||||
|
||||
/// 管理员切换计划请求
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct AdminSwitchPlanRequest {
|
||||
pub plan_id: String,
|
||||
}
|
||||
|
||||
@@ -742,7 +742,7 @@ async fn seed_demo_data(pool: &PgPool) -> SaasResult<()> {
|
||||
let id = format!("cfg-{}-{}", cat, key);
|
||||
sqlx::query(
|
||||
"INSERT INTO config_items (id, category, key_path, value_type, current_value, default_value, source, description, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, 'local', $7, $8, $8) ON CONFLICT (id) DO NOTHING"
|
||||
VALUES ($1, $2, $3, $4, $5, $6, 'local', $7, $8, $8) ON CONFLICT (category, key_path) DO NOTHING"
|
||||
).bind(&id).bind(cat).bind(key).bind(vtype).bind(current).bind(default).bind(desc).bind(&ts)
|
||||
.execute(pool).await?;
|
||||
}
|
||||
@@ -854,6 +854,7 @@ async fn fix_seed_data(pool: &PgPool) -> SaasResult<()> {
|
||||
let admin_ids: Vec<String> = admins.into_iter().map(|(id,)| id).collect();
|
||||
|
||||
// 2. 更新 config_items 分类名(旧 → 新)
|
||||
// 先删除目标 (category, key_path) 已存在的旧 category 行,避免唯一约束冲突
|
||||
let category_mappings = [
|
||||
("server", "general"),
|
||||
("llm", "model"),
|
||||
@@ -862,6 +863,13 @@ async fn fix_seed_data(pool: &PgPool) -> SaasResult<()> {
|
||||
("security", "rate_limit"),
|
||||
];
|
||||
for (old_cat, new_cat) in &category_mappings {
|
||||
// 删除旧 category 中与目标 category key_path 冲突的行
|
||||
sqlx::query(
|
||||
"DELETE FROM config_items WHERE category = $1 AND key_path IN \
|
||||
(SELECT key_path FROM config_items WHERE category = $2)"
|
||||
).bind(old_cat).bind(new_cat)
|
||||
.execute(pool).await?;
|
||||
|
||||
let result = sqlx::query(
|
||||
"UPDATE config_items SET category = $1, updated_at = $2 WHERE category = $3"
|
||||
).bind(new_cat).bind(&now).bind(old_cat)
|
||||
@@ -889,7 +897,7 @@ async fn fix_seed_data(pool: &PgPool) -> SaasResult<()> {
|
||||
let id = format!("cfg-{}-{}", cat, key);
|
||||
sqlx::query(
|
||||
"INSERT INTO config_items (id, category, key_path, value_type, current_value, default_value, source, description, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, 'local', $7, $8, $8) ON CONFLICT (id) DO NOTHING"
|
||||
VALUES ($1, $2, $3, $4, $5, $6, 'local', $7, $8, $8) ON CONFLICT (category, key_path) DO NOTHING"
|
||||
).bind(&id).bind(cat).bind(key).bind(vtype).bind(current).bind(default).bind(desc).bind(&now)
|
||||
.execute(pool).await?;
|
||||
}
|
||||
|
||||
@@ -15,24 +15,48 @@ pub async fn list_industries(
|
||||
) -> SaasResult<PaginatedResponse<IndustryListItem>> {
|
||||
let (page, page_size, offset) = normalize_pagination(query.page, query.page_size);
|
||||
|
||||
// 动态构建参数化查询 — 所有用户输入通过 $N 绑定
|
||||
let mut where_parts: Vec<String> = vec!["1=1".to_string()];
|
||||
let mut param_idx = 3; // $1=LIMIT, $2=OFFSET, $3+=filters
|
||||
let status_param: Option<String> = query.status.clone();
|
||||
let source_param: Option<String> = query.source.clone();
|
||||
|
||||
// 构建 WHERE 条件 — 每个查询独立的参数编号
|
||||
let mut where_parts: Vec<String> = vec!["1=1".to_string()];
|
||||
|
||||
// count 查询:参数从 $1 开始
|
||||
let mut count_params: Vec<String> = Vec::new();
|
||||
let mut count_idx = 1;
|
||||
if status_param.is_some() {
|
||||
where_parts.push(format!("status = ${}", param_idx));
|
||||
param_idx += 1;
|
||||
count_params.push(format!("status = ${}", count_idx));
|
||||
count_idx += 1;
|
||||
}
|
||||
if source_param.is_some() {
|
||||
where_parts.push(format!("source = ${}", param_idx));
|
||||
param_idx += 1;
|
||||
count_params.push(format!("source = ${}", count_idx));
|
||||
count_idx += 1;
|
||||
}
|
||||
let where_sql = where_parts.join(" AND ");
|
||||
let count_where = if count_params.is_empty() {
|
||||
"1=1".to_string()
|
||||
} else {
|
||||
format!("1=1 AND {}", count_params.join(" AND "))
|
||||
};
|
||||
|
||||
// items 查询:$1=LIMIT, $2=OFFSET, $3+=filters
|
||||
let mut items_params: Vec<String> = Vec::new();
|
||||
let mut items_idx = 3;
|
||||
if status_param.is_some() {
|
||||
items_params.push(format!("status = ${}", items_idx));
|
||||
items_idx += 1;
|
||||
}
|
||||
if source_param.is_some() {
|
||||
items_params.push(format!("source = ${}", items_idx));
|
||||
items_idx += 1;
|
||||
}
|
||||
let items_where = if items_params.is_empty() {
|
||||
"1=1".to_string()
|
||||
} else {
|
||||
format!("1=1 AND {}", items_params.join(" AND "))
|
||||
};
|
||||
|
||||
// count 查询
|
||||
let count_sql = format!("SELECT COUNT(*) FROM industries WHERE {}", where_sql);
|
||||
let count_sql = format!("SELECT COUNT(*) FROM industries WHERE {}", count_where);
|
||||
let mut count_q = sqlx::query_scalar::<_, i64>(&count_sql);
|
||||
if let Some(ref s) = status_param { count_q = count_q.bind(s); }
|
||||
if let Some(ref s) = source_param { count_q = count_q.bind(s); }
|
||||
@@ -44,7 +68,7 @@ pub async fn list_industries(
|
||||
COALESCE(jsonb_array_length(keywords), 0) as keywords_count, \
|
||||
created_at, updated_at \
|
||||
FROM industries WHERE {} ORDER BY source, id LIMIT $1 OFFSET $2",
|
||||
where_sql
|
||||
items_where
|
||||
);
|
||||
let mut items_q = sqlx::query_as::<_, IndustryListItem>(&items_sql)
|
||||
.bind(page_size as i64)
|
||||
|
||||
@@ -29,7 +29,7 @@ pub struct IndustryListItem {
|
||||
pub description: String,
|
||||
pub status: String,
|
||||
pub source: String,
|
||||
pub keywords_count: i64,
|
||||
pub keywords_count: i32,
|
||||
pub created_at: chrono::DateTime<chrono::Utc>,
|
||||
pub updated_at: chrono::DateTime<chrono::Utc>,
|
||||
}
|
||||
|
||||
@@ -99,6 +99,8 @@ async fn main() -> anyhow::Result<()> {
|
||||
if let Err(e) = zclaw_saas::crypto::migrate_legacy_totp_secrets(&db, &enc_key).await {
|
||||
tracing::warn!("TOTP legacy migration check failed: {}", e);
|
||||
}
|
||||
// Self-heal: re-encrypt provider keys with current key
|
||||
zclaw_saas::relay::key_pool::heal_provider_keys(&db, &enc_key).await;
|
||||
} else {
|
||||
drop(config_for_migration);
|
||||
}
|
||||
@@ -359,6 +361,7 @@ async fn build_router(state: AppState) -> axum::Router {
|
||||
.merge(zclaw_saas::scheduled_task::routes())
|
||||
.merge(zclaw_saas::telemetry::routes())
|
||||
.merge(zclaw_saas::billing::routes())
|
||||
.merge(zclaw_saas::billing::admin_routes())
|
||||
.merge(zclaw_saas::knowledge::routes())
|
||||
.merge(zclaw_saas::industry::routes())
|
||||
.layer(middleware::from_fn_with_state(
|
||||
|
||||
@@ -258,7 +258,8 @@ pub async fn seed_default_config_items(db: &PgPool) -> SaasResult<usize> {
|
||||
let id = uuid::Uuid::new_v4().to_string();
|
||||
sqlx::query(
|
||||
"INSERT INTO config_items (id, category, key_path, value_type, current_value, default_value, source, description, requires_restart, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, 'local', $7, false, $8, $8)"
|
||||
VALUES ($1, $2, $3, $4, $5, $6, 'local', $7, false, $8, $8)
|
||||
ON CONFLICT (category, key_path) DO NOTHING"
|
||||
)
|
||||
.bind(&id).bind(category).bind(key_path).bind(value_type)
|
||||
.bind(current_value).bind(default_value).bind(description).bind(&now)
|
||||
@@ -374,7 +375,8 @@ pub async fn sync_config(
|
||||
let category = parts.first().unwrap_or(&"general").to_string();
|
||||
sqlx::query(
|
||||
"INSERT INTO config_items (id, category, key_path, value_type, current_value, default_value, source, description, requires_restart, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, 'string', $4, $4, 'local', '客户端推送', false, $5, $5)"
|
||||
VALUES ($1, $2, $3, 'string', $4, $4, 'local', '客户端推送', false, $5, $5)
|
||||
ON CONFLICT (category, key_path) DO NOTHING"
|
||||
)
|
||||
.bind(&id).bind(&category).bind(key).bind(val).bind(&now)
|
||||
.execute(db).await?;
|
||||
|
||||
@@ -419,21 +419,33 @@ pub async fn revoke_account_api_key(
|
||||
pub async fn get_usage_stats(
|
||||
db: &PgPool, account_id: &str, query: &UsageQuery,
|
||||
) -> SaasResult<UsageStats> {
|
||||
// Optional date filters: pass as TEXT with explicit $N::timestamptz SQL cast.
|
||||
// This avoids the sqlx NULL-without-type-OID problem — PG's ::timestamptz
|
||||
// gives a typed NULL even when sqlx sends an untyped NULL.
|
||||
// === Totals: from billing_usage_quotas (authoritative source) ===
|
||||
// billing_usage_quotas is written to on every relay request (both JSON and SSE),
|
||||
// whereas usage_records has 0 tokens for SSE requests. Use billing as the primary source.
|
||||
let billing_row = sqlx::query(
|
||||
"SELECT COALESCE(SUM(input_tokens), 0)::bigint,
|
||||
COALESCE(SUM(output_tokens), 0)::bigint,
|
||||
COALESCE(SUM(relay_requests), 0)::bigint
|
||||
FROM billing_usage_quotas WHERE account_id = $1"
|
||||
)
|
||||
.bind(account_id)
|
||||
.fetch_one(db)
|
||||
.await?;
|
||||
let total_input: i64 = billing_row.try_get(0).unwrap_or(0);
|
||||
let total_output: i64 = billing_row.try_get(1).unwrap_or(0);
|
||||
let total_requests: i64 = billing_row.try_get(2).unwrap_or(0);
|
||||
|
||||
// === Breakdowns: from usage_records (per-request detail) ===
|
||||
// Optional date filters: pass as TEXT with explicit SQL cast.
|
||||
let from_str: Option<&str> = query.from.as_deref();
|
||||
// For 'to' date-only strings, append T23:59:59 to include the entire day
|
||||
let to_str: Option<String> = query.to.as_ref().map(|s| {
|
||||
if s.len() == 10 { format!("{}T23:59:59", s) } else { s.clone() }
|
||||
});
|
||||
|
||||
// Build SQL dynamically to avoid sqlx NULL-without-type-OID problem entirely.
|
||||
// Date parameters are injected as SQL literals (validated above via chrono parse).
|
||||
// Only account_id uses parameterized binding to prevent SQL injection on user input.
|
||||
// Build SQL dynamically for usage_records breakdowns.
|
||||
// Date parameters are injected as SQL literals (validated via chrono parse).
|
||||
let mut where_parts = vec![format!("account_id = '{}'", account_id.replace('\'', "''"))];
|
||||
if let Some(f) = from_str {
|
||||
// Validate: must be parseable as a date
|
||||
let valid = chrono::NaiveDate::parse_from_str(f, "%Y-%m-%d").is_ok()
|
||||
|| chrono::NaiveDateTime::parse_from_str(f, "%Y-%m-%dT%H:%M:%S%.f").is_ok();
|
||||
if !valid {
|
||||
@@ -457,15 +469,6 @@ pub async fn get_usage_stats(
|
||||
}
|
||||
let where_clause = where_parts.join(" AND ");
|
||||
|
||||
let total_sql = format!(
|
||||
"SELECT COUNT(*)::bigint, COALESCE(SUM(input_tokens), 0)::bigint, COALESCE(SUM(output_tokens), 0)::bigint
|
||||
FROM usage_records WHERE {}", where_clause
|
||||
);
|
||||
let row = sqlx::query(&total_sql).fetch_one(db).await?;
|
||||
let total_requests: i64 = row.try_get(0).unwrap_or(0);
|
||||
let total_input: i64 = row.try_get(1).unwrap_or(0);
|
||||
let total_output: i64 = row.try_get(2).unwrap_or(0);
|
||||
|
||||
// 按模型统计
|
||||
let by_model_sql = format!(
|
||||
"SELECT provider_id, model_id, COUNT(*)::bigint AS request_count, COALESCE(SUM(input_tokens), 0)::bigint AS input_tokens, COALESCE(SUM(output_tokens), 0)::bigint AS output_tokens
|
||||
|
||||
@@ -68,7 +68,7 @@ pub async fn get_prompt(
|
||||
Ok(Json(service::get_template_by_name(&state.db, &name).await?))
|
||||
}
|
||||
|
||||
/// PUT /api/v1/prompts/{name} — 更新模板元数据
|
||||
/// PUT /api/v1/prompts/{name} — 更新模板元数据 + 可选自动创建新版本
|
||||
pub async fn update_prompt(
|
||||
State(state): State<AppState>,
|
||||
Extension(ctx): Extension<AuthContext>,
|
||||
@@ -82,6 +82,11 @@ pub async fn update_prompt(
|
||||
&state.db, &tmpl.id,
|
||||
req.description.as_deref(),
|
||||
req.status.as_deref(),
|
||||
req.system_prompt.as_deref(),
|
||||
req.user_prompt_template.as_deref(),
|
||||
req.variables.clone(),
|
||||
req.changelog.as_deref(),
|
||||
req.min_app_version.as_deref(),
|
||||
).await?;
|
||||
|
||||
log_operation(&state.db, &ctx.account_id, "prompt.update", "prompt", &tmpl.id,
|
||||
@@ -99,7 +104,7 @@ pub async fn archive_prompt(
|
||||
check_permission(&ctx, "prompt:admin")?;
|
||||
|
||||
let tmpl = service::get_template_by_name(&state.db, &name).await?;
|
||||
let result = service::update_template(&state.db, &tmpl.id, None, Some("archived")).await?;
|
||||
let result = service::update_template(&state.db, &tmpl.id, None, Some("archived"), None, None, None, None, None).await?;
|
||||
|
||||
log_operation(&state.db, &ctx.account_id, "prompt.archive", "prompt", &tmpl.id, None, ctx.client_ip.as_deref()).await?;
|
||||
|
||||
|
||||
@@ -108,12 +108,20 @@ pub async fn list_templates(
|
||||
Ok(PaginatedResponse { items, total, page, page_size })
|
||||
}
|
||||
|
||||
/// 更新模板元数据(不修改内容)
|
||||
/// 更新模板元数据 + 可选自动创建新版本
|
||||
///
|
||||
/// 当传入 `system_prompt` 时,自动创建新版本并递增 `current_version`。
|
||||
/// 仅更新 `description`/`status` 时不会递增版本号。
|
||||
pub async fn update_template(
|
||||
db: &PgPool,
|
||||
id: &str,
|
||||
description: Option<&str>,
|
||||
status: Option<&str>,
|
||||
system_prompt: Option<&str>,
|
||||
user_prompt_template: Option<&str>,
|
||||
variables: Option<serde_json::Value>,
|
||||
changelog: Option<&str>,
|
||||
min_app_version: Option<&str>,
|
||||
) -> SaasResult<PromptTemplateInfo> {
|
||||
let now = chrono::Utc::now();
|
||||
|
||||
@@ -130,6 +138,11 @@ pub async fn update_template(
|
||||
.bind(st).bind(&now).bind(id).execute(db).await?;
|
||||
}
|
||||
|
||||
// Auto-create version when content is provided
|
||||
if let Some(sp) = system_prompt {
|
||||
create_version(db, id, sp, user_prompt_template, variables, changelog, min_app_version).await?;
|
||||
}
|
||||
|
||||
get_template(db, id).await
|
||||
}
|
||||
|
||||
|
||||
@@ -33,6 +33,12 @@ pub struct CreatePromptRequest {
|
||||
pub struct UpdatePromptRequest {
|
||||
pub description: Option<String>,
|
||||
pub status: Option<String>,
|
||||
/// If provided, auto-creates a new version with this content
|
||||
pub system_prompt: Option<String>,
|
||||
pub user_prompt_template: Option<String>,
|
||||
pub variables: Option<serde_json::Value>,
|
||||
pub changelog: Option<String>,
|
||||
pub min_app_version: Option<String>,
|
||||
}
|
||||
|
||||
// --- Prompt Version ---
|
||||
|
||||
@@ -333,14 +333,8 @@ pub async fn chat_completions(
|
||||
}
|
||||
}
|
||||
|
||||
// SSE: relay_requests 实时递增(tokens 由 AggregateUsageWorker 对账修正)
|
||||
if let Err(e) = crate::billing::service::increment_dimension(
|
||||
&state.db, &account_id_usage, "relay_requests",
|
||||
).await {
|
||||
tracing::warn!("Failed to increment billing relay_requests for {}: {}", account_id_usage, e);
|
||||
}
|
||||
|
||||
// SSE 流已返回,递减队列计数器(流式任务开始处理)
|
||||
// 注意: relay_requests 和 tokens 统一由 execute_relay spawned task 中的 increment_usage 递增
|
||||
state.cache.relay_dequeue(&account_id_usage);
|
||||
|
||||
let response = axum::response::Response::builder()
|
||||
@@ -384,13 +378,14 @@ pub async fn list_available_models(
|
||||
State(state): State<AppState>,
|
||||
_ctx: Extension<AuthContext>,
|
||||
) -> SaasResult<Json<Vec<serde_json::Value>>> {
|
||||
// 单次 JOIN 查询替代 2 次全量加载
|
||||
// 单次 JOIN 查询 + provider_keys 过滤:仅返回有活跃 API Key 的 provider 下的模型
|
||||
let rows: Vec<(String, String, String, i64, i64, bool, bool, bool, String)> = sqlx::query_as(
|
||||
"SELECT m.model_id, m.provider_id, m.alias, m.context_window,
|
||||
"SELECT DISTINCT m.model_id, m.provider_id, m.alias, m.context_window,
|
||||
m.max_output_tokens, m.supports_streaming, m.supports_vision,
|
||||
m.is_embedding, m.model_type
|
||||
FROM models m
|
||||
INNER JOIN providers p ON m.provider_id = p.id
|
||||
INNER JOIN provider_keys pk ON pk.provider_id = p.id AND pk.is_active = true
|
||||
WHERE m.enabled = true AND p.enabled = true
|
||||
ORDER BY m.provider_id, m.model_id"
|
||||
)
|
||||
|
||||
@@ -117,7 +117,13 @@ pub async fn select_best_key(db: &PgPool, provider_id: &str, enc_key: &[u8; 32])
|
||||
}
|
||||
|
||||
// 此 Key 可用 — 解密 key_value
|
||||
let decrypted_kv = decrypt_key_value(key_value, enc_key)?;
|
||||
let decrypted_kv = match decrypt_key_value(key_value, enc_key) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
tracing::warn!("Key {} decryption failed, skipping: {}", id, e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let selection = KeySelection {
|
||||
key: PoolKey {
|
||||
id: id.clone(),
|
||||
@@ -371,3 +377,52 @@ fn parse_cooldown_remaining(cooldown_until: &str, now: &str) -> i64 {
|
||||
_ => 60, // 默认 60 秒
|
||||
}
|
||||
}
|
||||
|
||||
/// Startup self-healing: re-encrypt all provider keys with current encryption key.
|
||||
///
|
||||
/// For each encrypted key, attempts decryption with the current key.
|
||||
/// If decryption succeeds, re-encrypts and updates in-place (idempotent).
|
||||
/// If decryption fails, logs a warning and marks the key inactive.
|
||||
pub async fn heal_provider_keys(db: &PgPool, enc_key: &[u8; 32]) -> usize {
|
||||
let rows: Vec<(String, String)> = sqlx::query_as(
|
||||
"SELECT id, key_value FROM provider_keys WHERE key_value LIKE 'enc:%'"
|
||||
).fetch_all(db).await.unwrap_or_default();
|
||||
|
||||
let mut healed = 0usize;
|
||||
let mut failed = 0usize;
|
||||
|
||||
for (id, key_value) in &rows {
|
||||
match crypto::decrypt_value(key_value, enc_key) {
|
||||
Ok(plaintext) => {
|
||||
// Re-encrypt with current key (idempotent if same key)
|
||||
match crypto::encrypt_value(&plaintext, enc_key) {
|
||||
Ok(new_encrypted) => {
|
||||
if let Err(e) = sqlx::query(
|
||||
"UPDATE provider_keys SET key_value = $1 WHERE id = $2"
|
||||
).bind(&new_encrypted).bind(id).execute(db).await {
|
||||
tracing::warn!("[heal] Failed to update key {}: {}", id, e);
|
||||
} else {
|
||||
healed += 1;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("[heal] Failed to re-encrypt key {}: {}", id, e);
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("[heal] Cannot decrypt key {}, marking inactive: {}", id, e);
|
||||
let _ = sqlx::query(
|
||||
"UPDATE provider_keys SET is_active = FALSE WHERE id = $1"
|
||||
).bind(id).execute(db).await;
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if healed > 0 || failed > 0 {
|
||||
tracing::info!("[heal] Provider keys: {} re-encrypted, {} failed", healed, failed);
|
||||
}
|
||||
healed
|
||||
}
|
||||
|
||||
@@ -192,21 +192,39 @@ pub async fn update_task_status(
|
||||
struct SseUsageCapture {
|
||||
input_tokens: i64,
|
||||
output_tokens: i64,
|
||||
/// 标记上游 stream 是否已结束(channel 关闭或收到 [DONE])
|
||||
stream_done: bool,
|
||||
}
|
||||
|
||||
impl SseUsageCapture {
|
||||
fn parse_sse_line(&mut self, line: &str) {
|
||||
if let Some(data) = line.strip_prefix("data: ") {
|
||||
if data == "[DONE]" {
|
||||
return;
|
||||
}
|
||||
if let Ok(parsed) = serde_json::from_str::<serde_json::Value>(data) {
|
||||
if let Some(usage) = parsed.get("usage") {
|
||||
if let Some(input) = usage.get("prompt_tokens").and_then(|v| v.as_i64()) {
|
||||
self.input_tokens = input;
|
||||
}
|
||||
if let Some(output) = usage.get("completion_tokens").and_then(|v| v.as_i64()) {
|
||||
self.output_tokens = output;
|
||||
// 兼容 "data: " 和 "data:" 两种前缀
|
||||
let data = if let Some(d) = line.strip_prefix("data: ") {
|
||||
d
|
||||
} else if let Some(d) = line.strip_prefix("data:") {
|
||||
d.trim_start()
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
if data == "[DONE]" {
|
||||
self.stream_done = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if let Ok(parsed) = serde_json::from_str::<serde_json::Value>(data) {
|
||||
if let Some(usage) = parsed.get("usage") {
|
||||
// 标准 OpenAI 格式: prompt_tokens / completion_tokens
|
||||
if let Some(input) = usage.get("prompt_tokens").and_then(|v| v.as_i64()) {
|
||||
self.input_tokens = input;
|
||||
}
|
||||
if let Some(output) = usage.get("completion_tokens").and_then(|v| v.as_i64()) {
|
||||
self.output_tokens = output;
|
||||
}
|
||||
// 兜底: 某些 provider 只返回 total_tokens
|
||||
if self.input_tokens == 0 && self.output_tokens > 0 {
|
||||
if let Some(total) = usage.get("total_tokens").and_then(|v| v.as_i64()) {
|
||||
self.input_tokens = (total - self.output_tokens).max(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -315,6 +333,12 @@ pub async fn execute_relay(
|
||||
let task_id_clone = task_id.to_string();
|
||||
let key_id_for_spawn = key_id.clone();
|
||||
let account_id_clone = account_id.to_string();
|
||||
let provider_id_clone = provider_id.to_string();
|
||||
// 从 request_body 提取 model_id 用于 usage_records 归因
|
||||
let model_id_clone = serde_json::from_str::<serde_json::Value>(request_body)
|
||||
.ok()
|
||||
.and_then(|v| v.get("model").and_then(|m| m.as_str()).map(String::from))
|
||||
.unwrap_or_default();
|
||||
|
||||
// Bounded channel for backpressure: 128 chunks (~128KB) buffer.
|
||||
// If the client reads slowly, the upstream is signaled via
|
||||
@@ -350,6 +374,11 @@ pub async fn execute_relay(
|
||||
}
|
||||
}
|
||||
}
|
||||
// Stream 结束后设置 stream_done 标志,通知 usage 轮询任务
|
||||
{
|
||||
let mut capture = usage_capture_clone.lock().await;
|
||||
capture.stream_done = true;
|
||||
}
|
||||
});
|
||||
|
||||
// Build StreamBridge: wraps the bounded receiver with heartbeat,
|
||||
@@ -371,8 +400,8 @@ pub async fn execute_relay(
|
||||
|
||||
tokio::spawn(async move {
|
||||
let _permit = permit; // 持有 permit 直到任务完成
|
||||
// 等待 SSE 流结束 — 等待 capture 稳定(tokens 不再增长)
|
||||
// 替代原来固定 500ms 的 race condition
|
||||
// 等待 SSE 流结束 — 优先等待 stream_done 标志,
|
||||
// 兜底使用 token 稳定检测 + 最大等待时间
|
||||
let max_wait = std::time::Duration::from_secs(120);
|
||||
let poll_interval = std::time::Duration::from_millis(500);
|
||||
let start = tokio::time::Instant::now();
|
||||
@@ -381,11 +410,15 @@ pub async fn execute_relay(
|
||||
let (input, output) = loop {
|
||||
tokio::time::sleep(poll_interval).await;
|
||||
let capture = usage_capture.lock().await;
|
||||
// 优先: stream_done 标志表示上游已结束
|
||||
if capture.stream_done {
|
||||
break (capture.input_tokens, capture.output_tokens);
|
||||
}
|
||||
let total = capture.input_tokens + capture.output_tokens;
|
||||
// 兜底: token 数稳定检测(兼容不发送 [DONE] 的 provider)
|
||||
if total == last_tokens && total > 0 {
|
||||
stable_count += 1;
|
||||
if stable_count >= 3 {
|
||||
// 连续 3 次稳定(1.5s),认为流结束
|
||||
break (capture.input_tokens, capture.output_tokens);
|
||||
}
|
||||
} else {
|
||||
@@ -393,8 +426,13 @@ pub async fn execute_relay(
|
||||
last_tokens = total;
|
||||
}
|
||||
drop(capture);
|
||||
// 最终兜底: 超时保护
|
||||
if start.elapsed() >= max_wait {
|
||||
let capture = usage_capture.lock().await;
|
||||
tracing::warn!(
|
||||
"SSE usage capture timed out for task {}, tokens: in={} out={}",
|
||||
task_id_clone, capture.input_tokens, capture.output_tokens
|
||||
);
|
||||
break (capture.input_tokens, capture.output_tokens);
|
||||
}
|
||||
};
|
||||
@@ -402,16 +440,23 @@ pub async fn execute_relay(
|
||||
let input_opt = if input > 0 { Some(input) } else { None };
|
||||
let output_opt = if output > 0 { Some(output) } else { None };
|
||||
|
||||
// Record task status + billing usage + key usage
|
||||
// Record task status + billing usage + key usage + usage_records
|
||||
let db_op = async {
|
||||
if let Err(e) = update_task_status(&db_clone, &task_id_clone, "completed", input_opt, output_opt, None).await {
|
||||
tracing::warn!("Failed to update task status after SSE stream: {}", e);
|
||||
}
|
||||
// P2-9 修复: SSE 路径也更新 billing_usage_quotas
|
||||
// SSE 路径回写 usage_records + billing 配额
|
||||
if input > 0 || output > 0 {
|
||||
// 回写 usage_records 真实 token(补全 handlers.rs 中 token=0 的占位记录)
|
||||
if let Err(e) = crate::model_config::service::record_usage(
|
||||
&db_clone, &account_id_clone, &provider_id_clone, &model_id_clone,
|
||||
input, output, None, "success", None,
|
||||
).await {
|
||||
tracing::warn!("Failed to record SSE usage for task {}: {}", task_id_clone, e);
|
||||
}
|
||||
// 更新 billing_usage_quotas(tokens + relay_requests 同步递增)
|
||||
if let Err(e) = crate::billing::service::increment_usage(
|
||||
&db_clone, &account_id_clone,
|
||||
input, output,
|
||||
&db_clone, &account_id_clone, input, output,
|
||||
).await {
|
||||
tracing::warn!("Failed to increment billing usage for SSE task {}: {}", task_id_clone, e);
|
||||
}
|
||||
|
||||
@@ -82,6 +82,7 @@ pub fn start_scheduler(config: &SchedulerConfig, _db: PgPool, dispatcher: Worker
|
||||
pub fn start_db_cleanup_tasks(db: PgPool) {
|
||||
let db_devices = db.clone();
|
||||
let db_key_pool = db.clone();
|
||||
let db_relay = db.clone();
|
||||
|
||||
// 每 24 小时清理不活跃设备
|
||||
tokio::spawn(async move {
|
||||
@@ -128,6 +129,28 @@ pub fn start_db_cleanup_tasks(db: PgPool) {
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// 每 5 分钟清理超时的 relay_tasks(status=processing 且 updated_at 超过 10 分钟)
|
||||
tokio::spawn(async move {
|
||||
let mut interval = tokio::time::interval(Duration::from_secs(300));
|
||||
loop {
|
||||
interval.tick().await;
|
||||
match sqlx::query(
|
||||
"UPDATE relay_tasks SET status = 'failed', error_message = 'timeout: upstream not responding', completed_at = NOW() \
|
||||
WHERE status = 'processing' AND updated_at < NOW() - INTERVAL '10 minutes'"
|
||||
)
|
||||
.execute(&db_relay)
|
||||
.await
|
||||
{
|
||||
Ok(result) => {
|
||||
if result.rows_affected() > 0 {
|
||||
tracing::warn!("Cleaned up {} timed-out relay tasks (>10m processing)", result.rows_affected());
|
||||
}
|
||||
}
|
||||
Err(e) => tracing::error!("Relay task timeout cleanup failed: {}", e),
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// 用户任务调度器
|
||||
|
||||
@@ -47,6 +47,7 @@ pub struct ClassroomChatCmdRequest {
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Send a message in the classroom chat and get multi-agent responses.
|
||||
// @reserved: classroom chat functionality
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn classroom_chat(
|
||||
|
||||
@@ -88,6 +88,7 @@ fn stage_name(stage: &GenerationStage) -> &'static str {
|
||||
/// Start classroom generation (4-stage pipeline).
|
||||
/// Progress events are emitted via `classroom:progress`.
|
||||
/// Supports cancellation between stages by removing the task from GenerationTasks.
|
||||
// @reserved: classroom generation
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn classroom_generate(
|
||||
@@ -270,6 +271,7 @@ pub async fn classroom_cancel_generation(
|
||||
}
|
||||
|
||||
/// Retrieve a generated classroom by ID
|
||||
// @reserved: classroom generation
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn classroom_get(
|
||||
|
||||
@@ -101,6 +101,7 @@ impl ClassroomPersistence {
|
||||
}
|
||||
|
||||
/// Delete a classroom and its chat history.
|
||||
#[allow(dead_code)]
|
||||
pub async fn delete_classroom(&self, classroom_id: &str) -> Result<(), String> {
|
||||
let mut conn = self.conn.lock().await;
|
||||
sqlx::query("DELETE FROM classrooms WHERE id = ?")
|
||||
|
||||
@@ -52,6 +52,7 @@ pub(crate) struct ProcessLogsResponse {
|
||||
}
|
||||
|
||||
/// Get ZCLAW Kernel status
|
||||
// @reserved: system control
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_status(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
@@ -59,6 +60,7 @@ pub fn zclaw_status(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
}
|
||||
|
||||
/// Start ZCLAW Kernel
|
||||
// @reserved: system control
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_start(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
@@ -69,6 +71,7 @@ pub fn zclaw_start(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
}
|
||||
|
||||
/// Stop ZCLAW Kernel
|
||||
// @reserved: system control
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_stop(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
@@ -78,6 +81,7 @@ pub fn zclaw_stop(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
}
|
||||
|
||||
/// Restart ZCLAW Kernel
|
||||
// @reserved: system control
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_restart(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
@@ -88,6 +92,7 @@ pub fn zclaw_restart(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
}
|
||||
|
||||
/// Get local auth token from ZCLAW config
|
||||
// @reserved: system control
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_local_auth() -> Result<LocalGatewayAuth, String> {
|
||||
@@ -95,6 +100,7 @@ pub fn zclaw_local_auth() -> Result<LocalGatewayAuth, String> {
|
||||
}
|
||||
|
||||
/// Prepare ZCLAW for Tauri (update allowed origins)
|
||||
// @reserved: system control
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_prepare_for_tauri(app: AppHandle) -> Result<LocalGatewayPrepareResult, String> {
|
||||
@@ -102,6 +108,7 @@ pub fn zclaw_prepare_for_tauri(app: AppHandle) -> Result<LocalGatewayPrepareResu
|
||||
}
|
||||
|
||||
/// Approve device pairing request
|
||||
// @reserved: system control
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_approve_device_pairing(
|
||||
@@ -122,6 +129,7 @@ pub fn zclaw_doctor(app: AppHandle) -> Result<String, String> {
|
||||
}
|
||||
|
||||
/// List ZCLAW processes
|
||||
// @reserved: system control
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_process_list(app: AppHandle) -> Result<ProcessListResponse, String> {
|
||||
@@ -160,6 +168,7 @@ pub fn zclaw_process_list(app: AppHandle) -> Result<ProcessListResponse, String>
|
||||
}
|
||||
|
||||
/// Get ZCLAW process logs
|
||||
// @reserved: system control
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_process_logs(
|
||||
@@ -224,6 +233,7 @@ pub fn zclaw_process_logs(
|
||||
}
|
||||
|
||||
/// Get ZCLAW version information
|
||||
// @reserved: system control
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_version(app: AppHandle) -> Result<VersionResponse, String> {
|
||||
|
||||
@@ -112,6 +112,7 @@ fn get_process_uptime(status: &LocalGatewayStatus) -> Option<u64> {
|
||||
}
|
||||
|
||||
/// Perform comprehensive health check on ZCLAW Kernel
|
||||
// @reserved: system health check
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn zclaw_health_check(
|
||||
|
||||
@@ -10,12 +10,11 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, warn};
|
||||
use uuid::Uuid;
|
||||
use zclaw_growth::ExperienceStore;
|
||||
use zclaw_types::Result;
|
||||
|
||||
use super::pain_aggregator::PainPoint;
|
||||
use super::solution_generator::{Proposal, ProposalStatus};
|
||||
use super::solution_generator::Proposal;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Shared completion status
|
||||
|
||||
126
desktop/src-tauri/src/intelligence/health_snapshot.rs
Normal file
126
desktop/src-tauri/src/intelligence/health_snapshot.rs
Normal file
@@ -0,0 +1,126 @@
|
||||
//! Health Snapshot — on-demand query for all subsystem health status
|
||||
//!
|
||||
//! Provides a single Tauri command that aggregates health data from:
|
||||
//! - Intelligence Heartbeat engine (running state, config, alerts)
|
||||
//! - Memory pipeline (entries count, storage size)
|
||||
//!
|
||||
//! Connection and SaaS status are managed by frontend stores and not included here.
|
||||
|
||||
use serde::Serialize;
|
||||
use super::heartbeat::{HeartbeatConfig, HeartbeatEngineState, HeartbeatResult};
|
||||
|
||||
/// Aggregated health snapshot from Rust backend
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HealthSnapshot {
|
||||
pub timestamp: String,
|
||||
pub intelligence: IntelligenceHealth,
|
||||
pub memory: MemoryHealth,
|
||||
}
|
||||
|
||||
/// Intelligence heartbeat engine status
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct IntelligenceHealth {
|
||||
pub engine_running: bool,
|
||||
pub config: HeartbeatConfig,
|
||||
pub last_tick: Option<String>,
|
||||
pub alert_count_24h: usize,
|
||||
pub total_checks: usize,
|
||||
}
|
||||
|
||||
/// Memory pipeline status
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MemoryHealth {
|
||||
pub total_entries: usize,
|
||||
pub storage_size_bytes: u64,
|
||||
pub last_extraction: Option<String>,
|
||||
}
|
||||
|
||||
/// Query a unified health snapshot for an agent
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn health_snapshot(
|
||||
agent_id: String,
|
||||
heartbeat_state: tauri::State<'_, HeartbeatEngineState>,
|
||||
) -> Result<HealthSnapshot, String> {
|
||||
let engines = heartbeat_state.lock().await;
|
||||
|
||||
let engine = engines
|
||||
.get(&agent_id)
|
||||
.ok_or_else(|| format!("Heartbeat engine not initialized for agent: {}", agent_id))?;
|
||||
|
||||
let engine_running = engine.is_running().await;
|
||||
let config = engine.get_config().await;
|
||||
let history: Vec<HeartbeatResult> = engine.get_history(100).await;
|
||||
|
||||
// Calculate alert count in the last 24 hours
|
||||
let now = chrono::Utc::now();
|
||||
let twenty_four_hours_ago = now - chrono::Duration::hours(24);
|
||||
let alert_count_24h = history
|
||||
.iter()
|
||||
.filter(|r| {
|
||||
r.timestamp.parse::<chrono::DateTime<chrono::Utc>>()
|
||||
.map(|t| t > twenty_four_hours_ago)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.flat_map(|r| r.alerts.iter())
|
||||
.count();
|
||||
|
||||
let last_tick = history.first().map(|r| r.timestamp.clone());
|
||||
|
||||
// Memory health from cached stats (fallback to zeros)
|
||||
// Read cache in a separate scope to ensure RwLockReadGuard is dropped before any .await
|
||||
let cached_stats: Option<super::heartbeat::MemoryStatsCache> = {
|
||||
let cache = super::heartbeat::get_memory_stats_cache();
|
||||
match cache.read() {
|
||||
Ok(c) => c.get(&agent_id).cloned(),
|
||||
Err(_) => None,
|
||||
}
|
||||
}; // RwLockReadGuard dropped here
|
||||
|
||||
let memory = match cached_stats {
|
||||
Some(s) => MemoryHealth {
|
||||
total_entries: s.total_entries,
|
||||
storage_size_bytes: s.storage_size_bytes as u64,
|
||||
last_extraction: s.last_updated,
|
||||
},
|
||||
None => {
|
||||
// Fallback: try to query VikingStorage directly
|
||||
match crate::viking_commands::get_storage().await {
|
||||
Ok(storage) => {
|
||||
match zclaw_growth::VikingStorage::find_by_prefix(&*storage, &format!("mem:{}", agent_id)).await {
|
||||
Ok(entries) => MemoryHealth {
|
||||
total_entries: entries.len(),
|
||||
storage_size_bytes: 0,
|
||||
last_extraction: None,
|
||||
},
|
||||
Err(_) => MemoryHealth {
|
||||
total_entries: 0,
|
||||
storage_size_bytes: 0,
|
||||
last_extraction: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
Err(_) => MemoryHealth {
|
||||
total_entries: 0,
|
||||
storage_size_bytes: 0,
|
||||
last_extraction: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(HealthSnapshot {
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
intelligence: IntelligenceHealth {
|
||||
engine_running,
|
||||
config,
|
||||
last_tick,
|
||||
alert_count_24h,
|
||||
total_checks: 5, // Fixed: 5 built-in checks
|
||||
},
|
||||
memory,
|
||||
})
|
||||
}
|
||||
@@ -13,9 +13,10 @@ use chrono::{Local, Timelike};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use std::sync::OnceLock;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::{broadcast, Mutex};
|
||||
use tokio::time::interval;
|
||||
use tokio::sync::{broadcast, Mutex, Notify};
|
||||
use tauri::{AppHandle, Emitter};
|
||||
|
||||
// === Types ===
|
||||
|
||||
@@ -91,9 +92,9 @@ pub enum HeartbeatStatus {
|
||||
Alert,
|
||||
}
|
||||
|
||||
/// Type alias for heartbeat check function
|
||||
#[allow(dead_code)] // Reserved for future proactive check registration
|
||||
type HeartbeatCheckFn = Box<dyn Fn(String) -> std::pin::Pin<Box<dyn std::future::Future<Output = Option<HeartbeatAlert>> + Send>> + Send + Sync>;
|
||||
/// Global AppHandle for emitting heartbeat alerts to frontend
|
||||
/// Set by heartbeat_init, used by background tick task
|
||||
static HEARTBEAT_APP_HANDLE: OnceLock<AppHandle> = OnceLock::new();
|
||||
|
||||
// === Default Config ===
|
||||
|
||||
@@ -117,6 +118,7 @@ pub struct HeartbeatEngine {
|
||||
agent_id: String,
|
||||
config: Arc<Mutex<HeartbeatConfig>>,
|
||||
running: Arc<Mutex<bool>>,
|
||||
stop_notify: Arc<Notify>,
|
||||
alert_sender: broadcast::Sender<HeartbeatAlert>,
|
||||
history: Arc<Mutex<Vec<HeartbeatResult>>>,
|
||||
}
|
||||
@@ -129,6 +131,7 @@ impl HeartbeatEngine {
|
||||
agent_id,
|
||||
config: Arc::new(Mutex::new(config.unwrap_or_default())),
|
||||
running: Arc::new(Mutex::new(false)),
|
||||
stop_notify: Arc::new(Notify::new()),
|
||||
alert_sender,
|
||||
history: Arc::new(Mutex::new(Vec::new())),
|
||||
}
|
||||
@@ -146,16 +149,20 @@ impl HeartbeatEngine {
|
||||
let agent_id = self.agent_id.clone();
|
||||
let config = Arc::clone(&self.config);
|
||||
let running_clone = Arc::clone(&self.running);
|
||||
let stop_notify = Arc::clone(&self.stop_notify);
|
||||
let alert_sender = self.alert_sender.clone();
|
||||
let history = Arc::clone(&self.history);
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut ticker = interval(Duration::from_secs(
|
||||
config.lock().await.interval_minutes * 60,
|
||||
));
|
||||
|
||||
loop {
|
||||
ticker.tick().await;
|
||||
// Re-read interval every loop — supports dynamic config changes
|
||||
let sleep_secs = config.lock().await.interval_minutes * 60;
|
||||
|
||||
// Interruptible sleep: stop_notify wakes immediately on stop()
|
||||
tokio::select! {
|
||||
_ = tokio::time::sleep(Duration::from_secs(sleep_secs)) => {},
|
||||
_ = stop_notify.notified() => { break; }
|
||||
};
|
||||
|
||||
if !*running_clone.lock().await {
|
||||
break;
|
||||
@@ -199,10 +206,10 @@ impl HeartbeatEngine {
|
||||
pub async fn stop(&self) {
|
||||
let mut running = self.running.lock().await;
|
||||
*running = false;
|
||||
self.stop_notify.notify_one(); // Wake up sleep immediately
|
||||
}
|
||||
|
||||
/// Check if the engine is running
|
||||
#[allow(dead_code)] // Reserved for UI status display
|
||||
pub async fn is_running(&self) -> bool {
|
||||
*self.running.lock().await
|
||||
}
|
||||
@@ -237,12 +244,6 @@ impl HeartbeatEngine {
|
||||
result
|
||||
}
|
||||
|
||||
/// Subscribe to alerts
|
||||
#[allow(dead_code)] // Reserved for future UI notification integration
|
||||
pub fn subscribe(&self) -> broadcast::Receiver<HeartbeatAlert> {
|
||||
self.alert_sender.subscribe()
|
||||
}
|
||||
|
||||
/// Get heartbeat history
|
||||
pub async fn get_history(&self, limit: usize) -> Vec<HeartbeatResult> {
|
||||
let hist = self.history.lock().await;
|
||||
@@ -280,10 +281,22 @@ impl HeartbeatEngine {
|
||||
}
|
||||
}
|
||||
|
||||
/// Update configuration
|
||||
/// Update configuration and persist to VikingStorage
|
||||
pub async fn update_config(&self, updates: HeartbeatConfig) {
|
||||
let mut config = self.config.lock().await;
|
||||
*config = updates;
|
||||
*self.config.lock().await = updates.clone();
|
||||
// Persist config to VikingStorage
|
||||
let key = format!("heartbeat:config:{}", self.agent_id);
|
||||
tokio::spawn(async move {
|
||||
if let Ok(storage) = crate::viking_commands::get_storage().await {
|
||||
if let Ok(json) = serde_json::to_string(&updates) {
|
||||
if let Err(e) = zclaw_growth::VikingStorage::store_metadata_json(
|
||||
&*storage, &key, &json,
|
||||
).await {
|
||||
tracing::warn!("[heartbeat] Failed to persist config: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Get current configuration
|
||||
@@ -368,11 +381,20 @@ async fn execute_tick(
|
||||
// Filter by proactivity level
|
||||
let filtered_alerts = filter_by_proactivity(&alerts, &cfg.proactivity_level);
|
||||
|
||||
// Send alerts
|
||||
// Send alerts via broadcast channel (internal)
|
||||
for alert in &filtered_alerts {
|
||||
let _ = alert_sender.send(alert.clone());
|
||||
}
|
||||
|
||||
// Emit alerts to frontend via Tauri event (real-time toast)
|
||||
if !filtered_alerts.is_empty() {
|
||||
if let Some(app) = HEARTBEAT_APP_HANDLE.get() {
|
||||
if let Err(e) = app.emit("heartbeat:alert", &filtered_alerts) {
|
||||
tracing::warn!("[heartbeat] Failed to emit alert: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let status = if filtered_alerts.is_empty() {
|
||||
HeartbeatStatus::Ok
|
||||
} else {
|
||||
@@ -410,7 +432,6 @@ fn filter_by_proactivity(alerts: &[HeartbeatAlert], level: &ProactivityLevel) ->
|
||||
/// Pattern detection counters (shared state for personality detection)
|
||||
use std::collections::HashMap as StdHashMap;
|
||||
use std::sync::RwLock;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
/// Global correction counters
|
||||
static CORRECTION_COUNTERS: OnceLock<RwLock<StdHashMap<String, usize>>> = OnceLock::new();
|
||||
@@ -437,7 +458,7 @@ fn get_correction_counters() -> &'static RwLock<StdHashMap<String, usize>> {
|
||||
CORRECTION_COUNTERS.get_or_init(|| RwLock::new(StdHashMap::new()))
|
||||
}
|
||||
|
||||
fn get_memory_stats_cache() -> &'static RwLock<StdHashMap<String, MemoryStatsCache>> {
|
||||
pub fn get_memory_stats_cache() -> &'static RwLock<StdHashMap<String, MemoryStatsCache>> {
|
||||
MEMORY_STATS_CACHE.get_or_init(|| RwLock::new(StdHashMap::new()))
|
||||
}
|
||||
|
||||
@@ -537,6 +558,19 @@ fn check_correction_patterns(agent_id: &str) -> Vec<HeartbeatAlert> {
|
||||
alerts
|
||||
}
|
||||
|
||||
/// Fallback: query memory stats directly from VikingStorage when frontend cache is empty
|
||||
fn query_memory_stats_fallback(agent_id: &str) -> Option<MemoryStatsCache> {
|
||||
// This is a synchronous approximation — we check if we have a recent cache entry
|
||||
// by probing the global cache one more time with a slightly different approach
|
||||
// The real fallback is to count VikingStorage entries, but that's async and can't
|
||||
// be called from sync check functions. Instead, we return None and let the
|
||||
// periodic memory stats sync populate the cache.
|
||||
// NOTE: This is intentionally a lightweight no-op fallback. The real data comes
|
||||
// from the frontend sync (every 5 min) or the upcoming health_snapshot command.
|
||||
let _ = agent_id;
|
||||
None
|
||||
}
|
||||
|
||||
/// Check for pending task memories
|
||||
/// Uses cached memory stats to detect task backlog
|
||||
fn check_pending_tasks(agent_id: &str) -> Option<HeartbeatAlert> {
|
||||
@@ -557,15 +591,34 @@ fn check_pending_tasks(agent_id: &str) -> Option<HeartbeatAlert> {
|
||||
},
|
||||
Some(_) => None, // Stats available but no alert needed
|
||||
None => {
|
||||
// Cache is empty - warn about missing sync
|
||||
tracing::warn!("[Heartbeat] Memory stats cache is empty for agent {}, waiting for frontend sync", agent_id);
|
||||
Some(HeartbeatAlert {
|
||||
title: "记忆统计未同步".to_string(),
|
||||
content: "心跳引擎未能获取记忆统计信息,部分检查被跳过。请确保记忆系统正常运行。".to_string(),
|
||||
urgency: Urgency::Low,
|
||||
source: "pending-tasks".to_string(),
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
})
|
||||
// Cache is empty — fallback to VikingStorage direct query
|
||||
let fallback = query_memory_stats_fallback(agent_id);
|
||||
match fallback {
|
||||
Some(stats) if stats.task_count >= 5 => {
|
||||
Some(HeartbeatAlert {
|
||||
title: "待办任务积压".to_string(),
|
||||
content: format!("当前有 {} 个待办任务未完成,建议处理或重新评估优先级", stats.task_count),
|
||||
urgency: if stats.task_count >= 10 {
|
||||
Urgency::High
|
||||
} else {
|
||||
Urgency::Medium
|
||||
},
|
||||
source: "pending-tasks".to_string(),
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
})
|
||||
},
|
||||
Some(_) => None, // Fallback stats available but no alert needed
|
||||
None => {
|
||||
tracing::warn!("[Heartbeat] Memory stats unavailable for agent {} (cache + fallback empty)", agent_id);
|
||||
Some(HeartbeatAlert {
|
||||
title: "记忆统计未同步".to_string(),
|
||||
content: "心跳引擎未能获取记忆统计信息,部分检查被跳过。请确保记忆系统正常运行。".to_string(),
|
||||
urgency: Urgency::Low,
|
||||
source: "pending-tasks".to_string(),
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -706,15 +759,21 @@ pub type HeartbeatEngineState = Arc<Mutex<HashMap<String, HeartbeatEngine>>>;
|
||||
|
||||
/// Initialize heartbeat engine for an agent
|
||||
///
|
||||
/// Restores persisted interaction time from VikingStorage so idle-greeting
|
||||
/// check works correctly across app restarts.
|
||||
/// Restores persisted interaction time and config from VikingStorage so
|
||||
/// idle-greeting check and config changes survive across app restarts.
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn heartbeat_init(
|
||||
app: AppHandle,
|
||||
agent_id: String,
|
||||
config: Option<HeartbeatConfig>,
|
||||
state: tauri::State<'_, HeartbeatEngineState>,
|
||||
) -> Result<(), String> {
|
||||
// Store AppHandle globally for real-time alert emission
|
||||
if let Err(_) = HEARTBEAT_APP_HANDLE.set(app) {
|
||||
tracing::warn!("[heartbeat] APP_HANDLE already set (multiple init calls)");
|
||||
}
|
||||
|
||||
// P2-06: Validate minimum interval (prevent busy-loop)
|
||||
const MIN_INTERVAL_MINUTES: u64 = 1;
|
||||
if let Some(ref cfg) = config {
|
||||
@@ -726,7 +785,11 @@ pub async fn heartbeat_init(
|
||||
}
|
||||
}
|
||||
|
||||
let engine = HeartbeatEngine::new(agent_id.clone(), config);
|
||||
// Restore config from VikingStorage (overrides passed-in default)
|
||||
let restored_config = restore_config_from_storage(&agent_id).await
|
||||
.or(config);
|
||||
|
||||
let engine = HeartbeatEngine::new(agent_id.clone(), restored_config);
|
||||
|
||||
// Restore last interaction time from VikingStorage metadata
|
||||
restore_last_interaction(&agent_id).await;
|
||||
@@ -739,6 +802,38 @@ pub async fn heartbeat_init(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Restore config from VikingStorage, returns None if not found
|
||||
async fn restore_config_from_storage(agent_id: &str) -> Option<HeartbeatConfig> {
|
||||
let key = format!("heartbeat:config:{}", agent_id);
|
||||
match crate::viking_commands::get_storage().await {
|
||||
Ok(storage) => {
|
||||
match zclaw_growth::VikingStorage::get_metadata_json(&*storage, &key).await {
|
||||
Ok(Some(json)) => {
|
||||
match serde_json::from_str::<HeartbeatConfig>(&json) {
|
||||
Ok(cfg) => {
|
||||
tracing::info!("[heartbeat] Restored config for {}", agent_id);
|
||||
Some(cfg)
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("[heartbeat] Failed to parse persisted config: {}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => None,
|
||||
Err(e) => {
|
||||
tracing::warn!("[heartbeat] Failed to read persisted config: {}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("[heartbeat] Storage unavailable for config restore: {}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Restore the last interaction timestamp for an agent from VikingStorage.
|
||||
/// Called during heartbeat_init so the idle-greeting check works after restart.
|
||||
pub async fn restore_last_interaction(agent_id: &str) {
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
use chrono::Utc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use zclaw_growth::VikingStorage;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
@@ -53,6 +54,7 @@ pub struct IdentityChangeProposal {
|
||||
pub enum IdentityFile {
|
||||
Soul,
|
||||
Instructions,
|
||||
UserProfile,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
@@ -270,11 +272,13 @@ impl AgentIdentityManager {
|
||||
match file {
|
||||
IdentityFile::Soul => identity.soul,
|
||||
IdentityFile::Instructions => identity.instructions,
|
||||
IdentityFile::UserProfile => identity.user_profile,
|
||||
}
|
||||
}
|
||||
|
||||
/// Build system prompt from identity files
|
||||
pub fn build_system_prompt(&mut self, agent_id: &str, memory_context: Option<&str>) -> String {
|
||||
/// Build system prompt from identity files.
|
||||
/// Async because it may query VikingStorage as a fallback for user preferences.
|
||||
pub async fn build_system_prompt(&mut self, agent_id: &str, memory_context: Option<&str>) -> String {
|
||||
let identity = self.get_identity(agent_id);
|
||||
let mut sections = Vec::new();
|
||||
|
||||
@@ -284,18 +288,50 @@ impl AgentIdentityManager {
|
||||
if !identity.instructions.is_empty() {
|
||||
sections.push(identity.instructions.clone());
|
||||
}
|
||||
// NOTE: user_profile injection is intentionally disabled.
|
||||
// The reflection engine may accumulate overly specific details from past
|
||||
// conversations (e.g., "广东光华", "汕头玩具产业") into user_profile.
|
||||
// These details then leak into every new conversation's system prompt,
|
||||
// causing the model to think about old topics instead of the current query.
|
||||
// Memory injection should only happen via MemoryMiddleware with relevance
|
||||
// filtering, not unconditionally via user_profile.
|
||||
// if !identity.user_profile.is_empty()
|
||||
// && identity.user_profile != default_user_profile()
|
||||
// {
|
||||
// sections.push(format!("## 用户画像\n{}", identity.user_profile));
|
||||
// }
|
||||
// Inject user_profile into system prompt for cross-session identity continuity.
|
||||
// Truncate to first 10 lines to avoid flooding the prompt with overly specific
|
||||
// details accumulated by the reflection engine. Core identity (name, role)
|
||||
// is typically in the first few lines.
|
||||
if !identity.user_profile.is_empty()
|
||||
&& identity.user_profile != default_user_profile()
|
||||
{
|
||||
let truncated: String = identity
|
||||
.user_profile
|
||||
.lines()
|
||||
.take(10)
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
if !truncated.is_empty() {
|
||||
sections.push(format!("## 用户画像\n{}", truncated));
|
||||
}
|
||||
} else {
|
||||
// Fallback: query VikingStorage for user-related preferences.
|
||||
// The UserProfiler pipeline stores extracted preferences under agent://{uuid}/preferences/.
|
||||
// When identity's user_profile is default (never populated), use this as a data source.
|
||||
if let Ok(storage) = crate::viking_commands::get_storage().await {
|
||||
let prefix = format!("agent://{}/preferences/", agent_id);
|
||||
if let Ok(entries) = storage.find_by_prefix(&prefix).await {
|
||||
if !entries.is_empty() {
|
||||
let prefs: Vec<String> = entries
|
||||
.iter()
|
||||
.filter_map(|e| {
|
||||
let text = if e.content.len() > 80 {
|
||||
let truncated: String = e.content.chars().take(80).collect();
|
||||
format!("{}...", truncated)
|
||||
} else {
|
||||
e.content.clone()
|
||||
};
|
||||
if text.is_empty() { None } else { Some(format!("- {}", text)) }
|
||||
})
|
||||
.take(5)
|
||||
.collect();
|
||||
if !prefs.is_empty() {
|
||||
sections.push(format!("## 用户偏好\n{}", prefs.join("\n")));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(ctx) = memory_context {
|
||||
sections.push(ctx.to_string());
|
||||
}
|
||||
@@ -336,6 +372,7 @@ impl AgentIdentityManager {
|
||||
let current_content = match file {
|
||||
IdentityFile::Soul => identity.soul.clone(),
|
||||
IdentityFile::Instructions => identity.instructions.clone(),
|
||||
IdentityFile::UserProfile => identity.user_profile.clone(),
|
||||
};
|
||||
|
||||
let proposal = IdentityChangeProposal {
|
||||
@@ -381,6 +418,9 @@ impl AgentIdentityManager {
|
||||
IdentityFile::Instructions => {
|
||||
updated.instructions = suggested_content
|
||||
}
|
||||
IdentityFile::UserProfile => {
|
||||
updated.user_profile = suggested_content
|
||||
}
|
||||
}
|
||||
|
||||
self.identities.insert(agent_id.clone(), updated.clone());
|
||||
@@ -601,6 +641,7 @@ pub async fn identity_get_file(
|
||||
let file_type = match file.as_str() {
|
||||
"soul" => IdentityFile::Soul,
|
||||
"instructions" => IdentityFile::Instructions,
|
||||
"userprofile" | "user_profile" => IdentityFile::UserProfile,
|
||||
_ => return Err(format!("Unknown file: {}", file)),
|
||||
};
|
||||
Ok(manager.get_file(&agent_id, file_type))
|
||||
@@ -615,7 +656,7 @@ pub async fn identity_build_prompt(
|
||||
state: tauri::State<'_, IdentityManagerState>,
|
||||
) -> Result<String, String> {
|
||||
let mut manager = state.lock().await;
|
||||
Ok(manager.build_system_prompt(&agent_id, memory_context.as_deref()))
|
||||
Ok(manager.build_system_prompt(&agent_id, memory_context.as_deref()).await)
|
||||
}
|
||||
|
||||
/// Update user profile (auto)
|
||||
@@ -657,7 +698,8 @@ pub async fn identity_propose_change(
|
||||
let file_type = match target.as_str() {
|
||||
"soul" => IdentityFile::Soul,
|
||||
"instructions" => IdentityFile::Instructions,
|
||||
_ => return Err(format!("Invalid file type: '{}'. Expected 'soul' or 'instructions'", target)),
|
||||
"userprofile" | "user_profile" => IdentityFile::UserProfile,
|
||||
_ => return Err(format!("Invalid file type: '{}'. Expected 'soul', 'instructions', or 'user_profile'", target)),
|
||||
};
|
||||
Ok(manager.propose_change(&agent_id, file_type, &suggested_content, &reason))
|
||||
}
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
//! - `trigger_evaluator` - 2026-03-26
|
||||
//! - `persona_evolver` - 2026-03-26
|
||||
|
||||
// Hermes 管线子模块:部分函数由 Tauri 命令或中间件 hooks 按需调用,
|
||||
// 编译期无法检测到跨 crate 引用,统一抑制 dead_code 警告。
|
||||
#![allow(dead_code)]
|
||||
|
||||
pub mod heartbeat;
|
||||
pub mod compactor;
|
||||
pub mod reflection;
|
||||
@@ -40,6 +44,7 @@ pub mod experience;
|
||||
pub mod triggers;
|
||||
pub mod user_profiler;
|
||||
pub mod trajectory_compressor;
|
||||
pub mod health_snapshot;
|
||||
|
||||
// Re-export main types for convenience
|
||||
pub use heartbeat::HeartbeatEngineState;
|
||||
|
||||
@@ -610,13 +610,22 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_severity_ordering() {
|
||||
// Single frustration signal → Medium
|
||||
let messages = vec![
|
||||
Message::user("这又来了"),
|
||||
];
|
||||
let result = analyze_for_pain_signals(&messages);
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap().severity, PainSeverity::Medium);
|
||||
|
||||
// Two frustration signals → High (len >= 2 triggers High)
|
||||
let messages = vec![
|
||||
Message::user("这又来了"),
|
||||
Message::user("还是不行"),
|
||||
];
|
||||
let result = analyze_for_pain_signals(&messages);
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap().severity, PainSeverity::Medium);
|
||||
assert_eq!(result.unwrap().severity, PainSeverity::High);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -9,7 +9,7 @@ use std::sync::Arc;
|
||||
|
||||
use chrono::Utc;
|
||||
use tracing::{debug, warn};
|
||||
use zclaw_memory::fact::{Fact, FactCategory};
|
||||
use zclaw_memory::fact::Fact;
|
||||
use zclaw_memory::user_profile_store::{
|
||||
CommStyle, Level, UserProfile, UserProfileStore,
|
||||
};
|
||||
|
||||
@@ -283,7 +283,7 @@ async fn build_identity_prompt(
|
||||
let prompt = manager.build_system_prompt(
|
||||
agent_id,
|
||||
if memory_context.is_empty() { None } else { Some(memory_context) },
|
||||
);
|
||||
).await;
|
||||
|
||||
Ok(prompt)
|
||||
}
|
||||
|
||||
@@ -121,6 +121,7 @@ pub async fn agent_a2a_delegate_task(
|
||||
|
||||
/// Butler delegates a user request to expert agents via the Director.
|
||||
#[cfg(feature = "multi-agent")]
|
||||
// @reserved: butler multi-agent delegation
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn butler_delegate_task(
|
||||
|
||||
@@ -68,6 +68,7 @@ pub struct AgentUpdateRequest {
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Create a new agent
|
||||
// @reserved: agent CRUD management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn agent_create(
|
||||
@@ -150,6 +151,7 @@ pub async fn agent_create(
|
||||
}
|
||||
|
||||
/// List all agents
|
||||
// @reserved: agent CRUD management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn agent_list(
|
||||
@@ -164,6 +166,7 @@ pub async fn agent_list(
|
||||
}
|
||||
|
||||
/// Get agent info (with optional UserProfile from memory store)
|
||||
// @reserved: agent CRUD management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn agent_get(
|
||||
|
||||
@@ -89,6 +89,7 @@ pub struct StreamChatRequest {
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Send a message to an agent
|
||||
// @reserved: agent chat (desktop uses ChatStore/SaaS relay)
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn agent_chat(
|
||||
@@ -216,8 +217,93 @@ pub async fn agent_chat_stream(
|
||||
&identity_state,
|
||||
).await.unwrap_or_default();
|
||||
|
||||
// --- Schedule intent interception ---
|
||||
// If the user's message contains a schedule intent (e.g. "每天早上9点提醒我查房"),
|
||||
// parse it with NlScheduleParser, create a trigger, and return confirmation
|
||||
// directly without calling the LLM.
|
||||
let mut captured_parsed: Option<zclaw_runtime::nl_schedule::ParsedSchedule> = None;
|
||||
|
||||
if zclaw_runtime::nl_schedule::has_schedule_intent(&message) {
|
||||
let parse_result = zclaw_runtime::nl_schedule::parse_nl_schedule(&message, &id);
|
||||
|
||||
match parse_result {
|
||||
zclaw_runtime::nl_schedule::ScheduleParseResult::Exact(ref parsed)
|
||||
if parsed.confidence >= 0.8 =>
|
||||
{
|
||||
// Try to create a schedule trigger
|
||||
let kernel_lock = state.lock().await;
|
||||
if let Some(kernel) = kernel_lock.as_ref() {
|
||||
// Use UUID fragment to avoid collision under high concurrency
|
||||
let trigger_id = format!(
|
||||
"sched_{}_{}",
|
||||
chrono::Utc::now().timestamp_millis(),
|
||||
&uuid::Uuid::new_v4().to_string()[..8]
|
||||
);
|
||||
let trigger_config = zclaw_hands::TriggerConfig {
|
||||
id: trigger_id.clone(),
|
||||
name: parsed.task_description.clone(),
|
||||
hand_id: "_reminder".to_string(),
|
||||
trigger_type: zclaw_hands::TriggerType::Schedule {
|
||||
cron: parsed.cron_expression.clone(),
|
||||
},
|
||||
enabled: true,
|
||||
// 60/hour = once per minute max, reasonable for scheduled tasks
|
||||
max_executions_per_hour: 60,
|
||||
};
|
||||
|
||||
match kernel.create_trigger(trigger_config).await {
|
||||
Ok(_entry) => {
|
||||
tracing::info!(
|
||||
"[agent_chat_stream] Schedule trigger created: {} (cron: {})",
|
||||
trigger_id, parsed.cron_expression
|
||||
);
|
||||
captured_parsed = Some(parsed.clone());
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"[agent_chat_stream] Failed to create schedule trigger, falling through to LLM: {}",
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// Ambiguous, Unclear, or low confidence — let LLM handle it naturally
|
||||
tracing::debug!(
|
||||
"[agent_chat_stream] Schedule intent detected but not confident enough, falling through to LLM"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get the streaming receiver while holding the lock, then release it
|
||||
let (mut rx, llm_driver) = {
|
||||
// NOTE: When schedule_intercepted, llm_driver is None so post_conversation_hook
|
||||
// (memory extraction, heartbeat, reflection) is intentionally skipped —
|
||||
// schedule confirmations are system messages, not user conversations.
|
||||
let (mut rx, llm_driver) = if let Some(parsed) = captured_parsed {
|
||||
// Schedule was intercepted — build confirmation message directly
|
||||
let confirm_msg = format!(
|
||||
"已为您设置定时任务:\n\n- **任务**:{}\n- **时间**:{}\n- **Cron**:`{}`\n\n任务已激活,将在设定时间自动执行。",
|
||||
parsed.task_description,
|
||||
parsed.natural_description,
|
||||
parsed.cron_expression,
|
||||
);
|
||||
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(32);
|
||||
let _ = tx.send(zclaw_runtime::LoopEvent::Delta(confirm_msg)).await;
|
||||
let _ = tx.send(zclaw_runtime::LoopEvent::Complete(
|
||||
zclaw_runtime::AgentLoopResult {
|
||||
response: String::new(),
|
||||
input_tokens: 0,
|
||||
output_tokens: 0,
|
||||
iterations: 1,
|
||||
}
|
||||
)).await;
|
||||
drop(tx);
|
||||
(rx, None)
|
||||
} else {
|
||||
// Normal LLM chat path
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| {
|
||||
|
||||
@@ -112,6 +112,7 @@ impl From<zclaw_hands::HandResult> for HandResult {
|
||||
///
|
||||
/// Returns hands from the Kernel's HandRegistry.
|
||||
/// Hands are registered during kernel initialization.
|
||||
// @reserved: Hand autonomous capabilities
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn hand_list(
|
||||
@@ -142,6 +143,7 @@ pub async fn hand_list(
|
||||
/// Executes a hand with the given ID and input.
|
||||
/// If the hand has `needs_approval = true`, creates a pending approval instead.
|
||||
/// Returns the hand result as JSON, or a pending status with approval ID.
|
||||
// @reserved: Hand autonomous capabilities
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn hand_execute(
|
||||
@@ -209,6 +211,7 @@ pub async fn hand_execute(
|
||||
/// When approved, the kernel's `respond_to_approval` internally spawns the Hand
|
||||
/// execution. We additionally emit Tauri events so the frontend can track when
|
||||
/// the execution finishes.
|
||||
// @reserved: Hand approval workflow
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn hand_approve(
|
||||
|
||||
@@ -57,6 +57,7 @@ pub struct KernelStatusResponse {
|
||||
///
|
||||
/// If kernel already exists with the same config, returns existing status.
|
||||
/// If config changed, reboots kernel with new config.
|
||||
// @reserved: kernel lifecycle management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn kernel_init(
|
||||
@@ -73,15 +74,18 @@ pub async fn kernel_init(
|
||||
// Get current config from kernel
|
||||
let current_config = kernel.config();
|
||||
|
||||
// Check if config changed
|
||||
// Check if config changed (model, base_url, or api_key)
|
||||
let config_changed = if let Some(ref req) = config_request {
|
||||
let default_base_url = zclaw_kernel::config::KernelConfig::from_provider(
|
||||
&req.provider, "", &req.model, None, &req.api_protocol
|
||||
).llm.base_url;
|
||||
let request_base_url = req.base_url.clone().unwrap_or(default_base_url.clone());
|
||||
let current_api_key = ¤t_config.llm.api_key;
|
||||
let request_api_key = req.api_key.as_deref().unwrap_or("");
|
||||
|
||||
current_config.llm.model != req.model ||
|
||||
current_config.llm.base_url != request_base_url
|
||||
current_config.llm.base_url != request_base_url ||
|
||||
current_api_key != request_api_key
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
@@ -33,6 +33,7 @@ impl Default for McpManagerState {
|
||||
|
||||
impl McpManagerState {
|
||||
/// Create with a pre-allocated kernel_adapters Arc for sharing with Kernel.
|
||||
#[allow(dead_code)]
|
||||
pub fn with_shared_adapters(kernel_adapters: Arc<std::sync::RwLock<Vec<McpToolAdapter>>>) -> Self {
|
||||
Self {
|
||||
manager: Arc::new(Mutex::new(McpServiceManager::new())),
|
||||
@@ -81,6 +82,7 @@ pub struct McpServiceStatus {
|
||||
// ────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Start an MCP server and discover its tools
|
||||
// @reserved: MCP protocol management
|
||||
/// @connected — frontend: MCPServices.tsx via mcp-client.ts
|
||||
#[tauri::command]
|
||||
pub async fn mcp_start_service(
|
||||
@@ -127,6 +129,7 @@ pub async fn mcp_start_service(
|
||||
}
|
||||
|
||||
/// Stop an MCP server and remove its tools
|
||||
// @reserved: MCP protocol management
|
||||
/// @connected — frontend: MCPServices.tsx via mcp-client.ts
|
||||
#[tauri::command]
|
||||
pub async fn mcp_stop_service(
|
||||
@@ -144,6 +147,7 @@ pub async fn mcp_stop_service(
|
||||
}
|
||||
|
||||
/// List all active MCP services and their tools
|
||||
// @reserved: MCP protocol management
|
||||
/// @connected — frontend: MCPServices.tsx via mcp-client.ts
|
||||
#[tauri::command]
|
||||
pub async fn mcp_list_services(
|
||||
@@ -176,6 +180,7 @@ pub async fn mcp_list_services(
|
||||
}
|
||||
|
||||
/// Call an MCP tool directly
|
||||
// @reserved: MCP protocol management
|
||||
/// @connected — frontend: agent loop via mcp-client.ts
|
||||
#[tauri::command]
|
||||
pub async fn mcp_call_tool(
|
||||
|
||||
@@ -47,6 +47,7 @@ pub struct ScheduledTaskResponse {
|
||||
///
|
||||
/// Tasks are automatically executed by the SchedulerService which checks
|
||||
/// every 60 seconds for due triggers.
|
||||
// @reserved: scheduled task management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn scheduled_task_create(
|
||||
@@ -95,6 +96,7 @@ pub async fn scheduled_task_create(
|
||||
}
|
||||
|
||||
/// List all scheduled tasks (kernel triggers of Schedule type)
|
||||
// @reserved: scheduled task management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn scheduled_task_list(
|
||||
|
||||
@@ -85,6 +85,7 @@ pub async fn skill_list(
|
||||
///
|
||||
/// Re-scans the skills directory for new or updated skills.
|
||||
/// Optionally accepts a custom directory path to scan.
|
||||
// @reserved: skill system management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn skill_refresh(
|
||||
@@ -136,6 +137,7 @@ pub struct UpdateSkillRequest {
|
||||
}
|
||||
|
||||
/// Create a new skill in the skills directory
|
||||
// @reserved: skill system management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn skill_create(
|
||||
@@ -184,6 +186,7 @@ pub async fn skill_create(
|
||||
}
|
||||
|
||||
/// Update an existing skill
|
||||
// @reserved: skill system management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn skill_update(
|
||||
@@ -303,6 +306,7 @@ impl From<zclaw_skills::SkillResult> for SkillResult {
|
||||
///
|
||||
/// Executes a skill with the given ID and input.
|
||||
/// Returns the skill result as JSON.
|
||||
// @reserved: skill system management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn skill_execute(
|
||||
|
||||
@@ -96,6 +96,7 @@ impl From<zclaw_kernel::trigger_manager::TriggerEntry> for TriggerResponse {
|
||||
}
|
||||
|
||||
/// List all triggers
|
||||
// @reserved: trigger management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn trigger_list(
|
||||
@@ -110,6 +111,7 @@ pub async fn trigger_list(
|
||||
}
|
||||
|
||||
/// Get a specific trigger
|
||||
// @reserved: trigger management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn trigger_get(
|
||||
@@ -127,6 +129,7 @@ pub async fn trigger_get(
|
||||
}
|
||||
|
||||
/// Create a new trigger
|
||||
// @reserved: trigger management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn trigger_create(
|
||||
@@ -182,6 +185,7 @@ pub async fn trigger_create(
|
||||
}
|
||||
|
||||
/// Update a trigger
|
||||
// @reserved: trigger management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn trigger_update(
|
||||
@@ -227,6 +231,7 @@ pub async fn trigger_delete(
|
||||
}
|
||||
|
||||
/// Execute a trigger manually
|
||||
// @reserved: trigger management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn trigger_execute(
|
||||
|
||||
@@ -10,6 +10,7 @@ pub struct DirStats {
|
||||
}
|
||||
|
||||
/// Count files and total size in a directory (non-recursive, top-level only)
|
||||
// @reserved: workspace statistics
|
||||
#[tauri::command]
|
||||
pub async fn workspace_dir_stats(path: String) -> Result<DirStats, String> {
|
||||
let dir = Path::new(&path);
|
||||
|
||||
@@ -386,6 +386,8 @@ pub fn run() {
|
||||
intelligence::heartbeat::heartbeat_update_memory_stats,
|
||||
intelligence::heartbeat::heartbeat_record_correction,
|
||||
intelligence::heartbeat::heartbeat_record_interaction,
|
||||
// Health Snapshot (on-demand query)
|
||||
intelligence::health_snapshot::health_snapshot,
|
||||
// Context Compactor
|
||||
intelligence::compactor::compactor_estimate_tokens,
|
||||
intelligence::compactor::compactor_estimate_messages_tokens,
|
||||
|
||||
@@ -453,6 +453,7 @@ impl EmbeddingClient {
|
||||
}
|
||||
}
|
||||
|
||||
// @reserved: embedding vector generation
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn embedding_create(
|
||||
@@ -473,6 +474,7 @@ pub async fn embedding_create(
|
||||
client.embed(&text).await
|
||||
}
|
||||
|
||||
// @reserved: embedding provider listing
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn embedding_providers() -> Result<Vec<(String, String, String, usize)>, String> {
|
||||
|
||||
@@ -473,6 +473,7 @@ If no significant memories found, return empty array: []"#,
|
||||
|
||||
// === Tauri Commands ===
|
||||
|
||||
// @reserved: memory extraction
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn extract_session_memories(
|
||||
@@ -490,6 +491,7 @@ pub async fn extract_session_memories(
|
||||
|
||||
/// Extract memories from session and store to SqliteStorage
|
||||
/// This combines extraction and storage in one command
|
||||
// @reserved: memory extraction and storage
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn extract_and_store_memories(
|
||||
|
||||
@@ -55,6 +55,7 @@ pub struct WorkflowStepInput {
|
||||
}
|
||||
|
||||
/// Create a new pipeline as a YAML file
|
||||
// @reserved: pipeline workflow management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_create(
|
||||
@@ -180,6 +181,7 @@ pub async fn pipeline_create(
|
||||
}
|
||||
|
||||
/// Update an existing pipeline
|
||||
// @reserved: pipeline workflow management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_update(
|
||||
|
||||
@@ -20,6 +20,7 @@ use super::helpers::{get_pipelines_directory, scan_pipelines_with_paths, scan_pi
|
||||
use crate::kernel_commands::KernelState;
|
||||
|
||||
/// Discover and list all available pipelines
|
||||
// @reserved: pipeline workflow management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_list(
|
||||
@@ -70,6 +71,7 @@ pub async fn pipeline_list(
|
||||
}
|
||||
|
||||
/// Get pipeline details
|
||||
// @reserved: pipeline workflow management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_get(
|
||||
@@ -85,6 +87,7 @@ pub async fn pipeline_get(
|
||||
}
|
||||
|
||||
/// Run a pipeline
|
||||
// @reserved: pipeline workflow management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_run(
|
||||
@@ -197,6 +200,7 @@ pub async fn pipeline_run(
|
||||
}
|
||||
|
||||
/// Get pipeline run progress
|
||||
// @reserved: pipeline workflow management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_progress(
|
||||
@@ -234,6 +238,7 @@ pub async fn pipeline_cancel(
|
||||
}
|
||||
|
||||
/// Get pipeline run result
|
||||
// @reserved: pipeline workflow management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_result(
|
||||
@@ -261,6 +266,7 @@ pub async fn pipeline_result(
|
||||
}
|
||||
|
||||
/// List all runs
|
||||
// @reserved: pipeline workflow management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_runs(
|
||||
@@ -287,6 +293,7 @@ pub async fn pipeline_runs(
|
||||
}
|
||||
|
||||
/// Refresh pipeline discovery
|
||||
// @reserved: pipeline workflow management
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_refresh(
|
||||
|
||||
@@ -62,6 +62,7 @@ pub struct PipelineCandidateInfo {
|
||||
}
|
||||
|
||||
/// Route user input to matching pipeline
|
||||
// @reserved: semantic intent routing
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn route_intent(
|
||||
|
||||
@@ -9,6 +9,7 @@ use super::types::PipelineInputInfo;
|
||||
use super::PipelineState;
|
||||
|
||||
/// Analyze presentation data
|
||||
// @reserved: presentation analysis
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn analyze_presentation(
|
||||
|
||||
@@ -32,6 +32,7 @@ pub fn secure_store_set(key: String, value: String) -> Result<(), String> {
|
||||
}
|
||||
|
||||
/// Retrieve a value from the OS keyring
|
||||
// @reserved: secure storage access
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn secure_store_get(key: String) -> Result<String, String> {
|
||||
@@ -81,6 +82,7 @@ pub fn secure_store_delete(key: String) -> Result<(), String> {
|
||||
}
|
||||
|
||||
/// Check if secure storage is available on this platform
|
||||
// @reserved: secure storage access
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub fn secure_store_is_available() -> bool {
|
||||
|
||||
@@ -150,6 +150,7 @@ fn get_data_dir_string() -> Option<String> {
|
||||
// === Tauri Commands ===
|
||||
|
||||
/// Check if memory storage is available
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_status() -> Result<VikingStatus, String> {
|
||||
@@ -178,6 +179,7 @@ pub async fn viking_status() -> Result<VikingStatus, String> {
|
||||
}
|
||||
|
||||
/// Add a memory entry
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_add(uri: String, content: String) -> Result<VikingAddResult, String> {
|
||||
@@ -187,6 +189,36 @@ pub async fn viking_add(uri: String, content: String) -> Result<VikingAddResult,
|
||||
// Expected format: agent://{agent_id}/{type}/{category}
|
||||
let (agent_id, memory_type, category) = parse_uri(&uri)?;
|
||||
|
||||
// Pre-check for duplicates via content hash
|
||||
use std::hash::{Hash, Hasher};
|
||||
let normalized_content = content.trim().to_lowercase();
|
||||
let content_hash = {
|
||||
let mut hasher = std::collections::hash_map::DefaultHasher::new();
|
||||
normalized_content.hash(&mut hasher);
|
||||
format!("{:016x}", hasher.finish())
|
||||
};
|
||||
|
||||
let agent_scope = uri.split('/').nth(2).unwrap_or("");
|
||||
let scope_prefix = format!("agent://{agent_scope}/");
|
||||
|
||||
// Check for existing entry with the same content hash in the same agent scope
|
||||
let pool = storage.pool();
|
||||
let existing: Option<(String,)> = sqlx::query_as(
|
||||
"SELECT uri FROM memories WHERE content_hash = ? AND uri LIKE ? LIMIT 1"
|
||||
)
|
||||
.bind(&content_hash)
|
||||
.bind(format!("{}%", scope_prefix))
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.map_err(|e| format!("Dedup check failed: {}", e))?;
|
||||
|
||||
if existing.is_some() {
|
||||
return Ok(VikingAddResult {
|
||||
uri,
|
||||
status: "deduped".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
let entry = MemoryEntry::new(&agent_id, memory_type, &category, content);
|
||||
|
||||
storage
|
||||
@@ -201,6 +233,7 @@ pub async fn viking_add(uri: String, content: String) -> Result<VikingAddResult,
|
||||
}
|
||||
|
||||
/// Add a memory with metadata
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_add_with_metadata(
|
||||
@@ -232,6 +265,7 @@ pub async fn viking_add_with_metadata(
|
||||
}
|
||||
|
||||
/// Find memories by semantic search
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_find(
|
||||
@@ -278,6 +312,7 @@ pub async fn viking_find(
|
||||
}
|
||||
|
||||
/// Grep memories by pattern (uses FTS5)
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_grep(
|
||||
@@ -332,6 +367,7 @@ pub async fn viking_grep(
|
||||
}
|
||||
|
||||
/// List memories at a path
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_ls(path: String) -> Result<Vec<VikingResource>, String> {
|
||||
@@ -360,6 +396,7 @@ pub async fn viking_ls(path: String) -> Result<Vec<VikingResource>, String> {
|
||||
}
|
||||
|
||||
/// Read memory content
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_read(uri: String, level: Option<String>) -> Result<String, String> {
|
||||
@@ -404,6 +441,7 @@ pub async fn viking_read(uri: String, level: Option<String>) -> Result<String, S
|
||||
}
|
||||
|
||||
/// Remove a memory
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_remove(uri: String) -> Result<(), String> {
|
||||
@@ -418,6 +456,7 @@ pub async fn viking_remove(uri: String) -> Result<(), String> {
|
||||
}
|
||||
|
||||
/// Get memory tree
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_tree(path: String, depth: Option<usize>) -> Result<serde_json::Value, String> {
|
||||
@@ -469,6 +508,7 @@ pub async fn viking_tree(path: String, depth: Option<usize>) -> Result<serde_jso
|
||||
}
|
||||
|
||||
/// Inject memories into prompt (for agent loop integration)
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_inject_prompt(
|
||||
@@ -611,6 +651,7 @@ pub async fn viking_configure_summary_driver(
|
||||
}
|
||||
|
||||
/// Store a memory and optionally generate L0/L1 summaries in the background
|
||||
// @reserved: VikingStorage persistence
|
||||
// @connected
|
||||
#[tauri::command]
|
||||
pub async fn viking_store_with_summaries(
|
||||
|
||||
@@ -21,6 +21,7 @@ import { isTauriRuntime, getLocalGatewayStatus, startLocalGateway } from './lib/
|
||||
import { LoginPage } from './components/LoginPage';
|
||||
import { useOnboarding } from './lib/use-onboarding';
|
||||
import { intelligenceClient } from './lib/intelligence-client';
|
||||
import { safeListen } from './lib/safe-tauri';
|
||||
import { loadEmbeddingConfig, loadEmbeddingApiKey } from './lib/embedding-client';
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import { useProposalNotifications, ProposalNotificationHandler } from './lib/useProposalNotifications';
|
||||
@@ -54,6 +55,7 @@ function App() {
|
||||
const [showOnboarding, setShowOnboarding] = useState(false);
|
||||
const [showDetailDrawer, setShowDetailDrawer] = useState(false);
|
||||
const statsSyncRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
||||
const alertUnlistenRef = useRef<(() => void) | null>(null);
|
||||
|
||||
// Hand Approval state
|
||||
const [pendingApprovalRun, setPendingApprovalRun] = useState<HandRun | null>(null);
|
||||
@@ -155,6 +157,11 @@ function App() {
|
||||
useEffect(() => {
|
||||
let mounted = true;
|
||||
|
||||
// SaaS recovery listener (defined at useEffect scope for cleanup access)
|
||||
const handleSaasRecovered = () => {
|
||||
toast('SaaS 服务已恢复连接', 'success');
|
||||
};
|
||||
|
||||
const bootstrap = async () => {
|
||||
// 未登录时不启动 bootstrap,直接结束 loading
|
||||
if (!useSaaSStore.getState().isLoggedIn) {
|
||||
@@ -208,7 +215,9 @@ function App() {
|
||||
// Step 4.5: Auto-start heartbeat engine for self-evolution
|
||||
try {
|
||||
const defaultAgentId = 'zclaw-main';
|
||||
await intelligenceClient.heartbeat.init(defaultAgentId, {
|
||||
// Restore config from localStorage (Rust side also restores from VikingStorage)
|
||||
const savedConfig = localStorage.getItem('zclaw-heartbeat-config');
|
||||
const heartbeatConfig = savedConfig ? JSON.parse(savedConfig) : {
|
||||
enabled: true,
|
||||
interval_minutes: 30,
|
||||
quiet_hours_start: '22:00',
|
||||
@@ -216,7 +225,8 @@ function App() {
|
||||
notify_channel: 'ui',
|
||||
proactivity_level: 'standard',
|
||||
max_alerts_per_tick: 5,
|
||||
});
|
||||
};
|
||||
await intelligenceClient.heartbeat.init(defaultAgentId, heartbeatConfig);
|
||||
|
||||
// Sync memory stats to heartbeat engine
|
||||
try {
|
||||
@@ -236,6 +246,21 @@ function App() {
|
||||
await intelligenceClient.heartbeat.start(defaultAgentId);
|
||||
log.debug('Heartbeat engine started for self-evolution');
|
||||
|
||||
// Listen for real-time heartbeat alerts and show as toast notifications
|
||||
const unlistenAlerts = await safeListen<Array<{ title: string; content: string; urgency: string }>>(
|
||||
'heartbeat:alert',
|
||||
(alerts) => {
|
||||
for (const alert of alerts) {
|
||||
const alertType = alert.urgency === 'high' ? 'error'
|
||||
: alert.urgency === 'medium' ? 'warning'
|
||||
: 'info';
|
||||
toast(`[${alert.title}] ${alert.content}`, alertType as 'info' | 'warning' | 'error');
|
||||
}
|
||||
}
|
||||
);
|
||||
// Store unlisten for cleanup
|
||||
alertUnlistenRef.current = unlistenAlerts;
|
||||
|
||||
// Set up periodic memory stats sync (every 5 minutes)
|
||||
const MEMORY_STATS_SYNC_INTERVAL = 5 * 60 * 1000;
|
||||
const statsSyncInterval = setInterval(async () => {
|
||||
@@ -261,6 +286,9 @@ function App() {
|
||||
// Non-critical, continue without heartbeat
|
||||
}
|
||||
|
||||
// Listen for SaaS recovery events (from saasStore recovery probe)
|
||||
window.addEventListener('saas-recovered', handleSaasRecovered);
|
||||
|
||||
// Step 5: Restore embedding config to Rust backend (Tauri-only)
|
||||
if (isTauriRuntime()) {
|
||||
try {
|
||||
@@ -339,6 +367,12 @@ function App() {
|
||||
if (statsSyncRef.current) {
|
||||
clearInterval(statsSyncRef.current);
|
||||
}
|
||||
// Clean up heartbeat alert listener
|
||||
if (alertUnlistenRef.current) {
|
||||
alertUnlistenRef.current();
|
||||
}
|
||||
// Clean up SaaS recovery event listener
|
||||
window.removeEventListener('saas-recovered', handleSaasRecovered);
|
||||
};
|
||||
}, [connect, onboardingNeeded, onboardingLoading, isLoggedIn]);
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import { listVikingResources } from '../../lib/viking-client';
|
||||
|
||||
interface MemorySectionProps {
|
||||
agentId: string;
|
||||
refreshKey?: number;
|
||||
}
|
||||
|
||||
interface MemoryEntry {
|
||||
@@ -12,7 +13,7 @@ interface MemoryEntry {
|
||||
resourceType: string;
|
||||
}
|
||||
|
||||
export function MemorySection({ agentId }: MemorySectionProps) {
|
||||
export function MemorySection({ agentId, refreshKey }: MemorySectionProps) {
|
||||
const [memories, setMemories] = useState<MemoryEntry[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
@@ -20,7 +21,8 @@ export function MemorySection({ agentId }: MemorySectionProps) {
|
||||
if (!agentId) return;
|
||||
|
||||
setLoading(true);
|
||||
listVikingResources(`viking://agent/${agentId}/memories/`)
|
||||
// 查询 agent:// 下的所有记忆资源 (preferences/knowledge/experience/sessions)
|
||||
listVikingResources(`agent://${agentId}/`)
|
||||
.then((entries) => {
|
||||
setMemories(entries as MemoryEntry[]);
|
||||
})
|
||||
@@ -29,7 +31,7 @@ export function MemorySection({ agentId }: MemorySectionProps) {
|
||||
setMemories([]);
|
||||
})
|
||||
.finally(() => setLoading(false));
|
||||
}, [agentId]);
|
||||
}, [agentId, refreshKey]);
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { useButlerInsights } from '../../hooks/useButlerInsights';
|
||||
import { useChatStore } from '../../store/chatStore';
|
||||
import { useIndustryStore } from '../../store/industryStore';
|
||||
import { extractAndStoreMemories } from '../../lib/viking-client';
|
||||
import { resolveKernelAgentId } from '../../lib/kernel-agent';
|
||||
import { InsightsSection } from './InsightsSection';
|
||||
import { ProposalsSection } from './ProposalsSection';
|
||||
import { MemorySection } from './MemorySection';
|
||||
@@ -11,10 +13,26 @@ interface ButlerPanelProps {
|
||||
}
|
||||
|
||||
export function ButlerPanel({ agentId }: ButlerPanelProps) {
|
||||
const { painPoints, proposals, loading, error, refresh } = useButlerInsights(agentId);
|
||||
const [resolvedAgentId, setResolvedAgentId] = useState<string | null>(null);
|
||||
// Use resolved kernel UUID for queries — raw agentId may be "1" from SaaS relay
|
||||
// while pain points/proposals are stored under kernel UUID
|
||||
const effectiveAgentId = resolvedAgentId ?? agentId;
|
||||
const { painPoints, proposals, loading, error, refresh } = useButlerInsights(effectiveAgentId);
|
||||
const messageCount = useChatStore((s) => s.messages.length);
|
||||
const { accountIndustries, configs, lastSynced, isLoading: industryLoading, fetchIndustries } = useIndustryStore();
|
||||
const [analyzing, setAnalyzing] = useState(false);
|
||||
const [memoryRefreshKey, setMemoryRefreshKey] = useState(0);
|
||||
|
||||
// Resolve SaaS relay agentId ("1") to kernel UUID for VikingStorage queries
|
||||
useEffect(() => {
|
||||
if (!agentId) {
|
||||
setResolvedAgentId(null);
|
||||
return;
|
||||
}
|
||||
resolveKernelAgentId(agentId)
|
||||
.then(setResolvedAgentId)
|
||||
.catch(() => setResolvedAgentId(agentId));
|
||||
}, [agentId]);
|
||||
|
||||
// Auto-fetch industry configs once per session
|
||||
useEffect(() => {
|
||||
@@ -26,15 +44,30 @@ export function ButlerPanel({ agentId }: ButlerPanelProps) {
|
||||
const hasData = (painPoints?.length ?? 0) > 0 || (proposals?.length ?? 0) > 0;
|
||||
const canAnalyze = messageCount >= 2;
|
||||
|
||||
const handleAnalyze = async () => {
|
||||
if (!canAnalyze || analyzing) return;
|
||||
const handleAnalyze = useCallback(async () => {
|
||||
if (!canAnalyze || analyzing || !resolvedAgentId) return;
|
||||
setAnalyzing(true);
|
||||
try {
|
||||
// 1. Refresh pain points & proposals
|
||||
await refresh();
|
||||
|
||||
// 2. Extract and store memories from current conversation
|
||||
const messages = useChatStore.getState().messages;
|
||||
if (messages.length >= 2) {
|
||||
const extractionMessages = messages.map((m) => ({
|
||||
role: m.role as 'user' | 'assistant',
|
||||
content: typeof m.content === 'string' ? m.content : '',
|
||||
}));
|
||||
await extractAndStoreMemories(extractionMessages, resolvedAgentId);
|
||||
// Trigger MemorySection to reload
|
||||
setMemoryRefreshKey((k) => k + 1);
|
||||
}
|
||||
} catch {
|
||||
// Extraction failure should not block UI — insights still refreshed
|
||||
} finally {
|
||||
setAnalyzing(false);
|
||||
}
|
||||
};
|
||||
}, [canAnalyze, analyzing, resolvedAgentId, refresh]);
|
||||
|
||||
if (!agentId) {
|
||||
return (
|
||||
@@ -107,7 +140,7 @@ export function ButlerPanel({ agentId }: ButlerPanelProps) {
|
||||
<h3 className="text-sm font-semibold text-gray-900 dark:text-gray-100 mb-2">
|
||||
我记得关于您
|
||||
</h3>
|
||||
<MemorySection agentId={agentId} />
|
||||
<MemorySection agentId={resolvedAgentId || agentId} refreshKey={memoryRefreshKey} />
|
||||
</div>
|
||||
|
||||
{/* Industry section */}
|
||||
|
||||
@@ -72,13 +72,27 @@ export function ChatArea({ compact, onOpenDetail }: { compact?: boolean; onOpenD
|
||||
const saasModels = useSaaSStore((s) => s.availableModels);
|
||||
const isLoggedIn = useSaaSStore((s) => s.isLoggedIn);
|
||||
|
||||
// Track models that failed with API key errors in this session
|
||||
const failedModelIds = useRef<Set<string>>(new Set());
|
||||
|
||||
// Scan messages for API key errors to populate failedModelIds
|
||||
useEffect(() => {
|
||||
for (const msg of messages) {
|
||||
if (msg.error && (msg.error.includes('没有可用的 API Key') || msg.error.includes('Key Pool'))) {
|
||||
failedModelIds.current.add(currentModel);
|
||||
}
|
||||
}
|
||||
}, [messages, currentModel]);
|
||||
|
||||
// Merge models: SaaS available models take priority when logged in
|
||||
const models = useMemo(() => {
|
||||
const failed = failedModelIds.current;
|
||||
if (isLoggedIn && saasModels.length > 0) {
|
||||
return saasModels.map(m => ({
|
||||
id: m.alias || m.id,
|
||||
name: m.alias || m.id,
|
||||
provider: m.provider_id,
|
||||
available: !failed.has(m.alias || m.id),
|
||||
}));
|
||||
}
|
||||
if (configModels.length > 0) {
|
||||
|
||||
441
desktop/src/components/HealthPanel.tsx
Normal file
441
desktop/src/components/HealthPanel.tsx
Normal file
@@ -0,0 +1,441 @@
|
||||
/**
|
||||
* HealthPanel — Read-only dashboard for all subsystem health status
|
||||
*
|
||||
* Displays:
|
||||
* - Agent Heartbeat engine status (running, config, alerts)
|
||||
* - Connection status (mode, SaaS reachability)
|
||||
* - SaaS device heartbeat status
|
||||
* - Memory pipeline status
|
||||
* - Recent alerts history
|
||||
*
|
||||
* No config editing (that's HeartbeatConfig tab).
|
||||
* Uses useState (not Zustand) — component-scoped state.
|
||||
*/
|
||||
|
||||
import { useState, useEffect, useCallback, useRef } from 'react';
|
||||
import {
|
||||
Activity,
|
||||
RefreshCw,
|
||||
Wifi,
|
||||
WifiOff,
|
||||
Cloud,
|
||||
CloudOff,
|
||||
Database,
|
||||
AlertTriangle,
|
||||
CheckCircle,
|
||||
XCircle,
|
||||
Clock,
|
||||
} from 'lucide-react';
|
||||
import { intelligenceClient, type HeartbeatResult } from '../lib/intelligence-client';
|
||||
import { useConnectionStore } from '../store/connectionStore';
|
||||
import { useSaaSStore } from '../store/saasStore';
|
||||
import { isTauriRuntime } from '../lib/tauri-gateway';
|
||||
import { safeListen } from '../lib/safe-tauri';
|
||||
import { createLogger } from '../lib/logger';
|
||||
|
||||
const log = createLogger('HealthPanel');
|
||||
|
||||
// === Types ===
|
||||
|
||||
interface HealthSnapshotData {
|
||||
timestamp: string;
|
||||
intelligence: {
|
||||
engineRunning: boolean;
|
||||
config: {
|
||||
enabled: boolean;
|
||||
interval_minutes: number;
|
||||
proactivity_level: string;
|
||||
};
|
||||
lastTick: string | null;
|
||||
alertCount24h: number;
|
||||
totalChecks: number;
|
||||
};
|
||||
memory: {
|
||||
totalEntries: number;
|
||||
storageSizeBytes: number;
|
||||
lastExtraction: string | null;
|
||||
};
|
||||
}
|
||||
|
||||
interface HealthCardProps {
|
||||
title: string;
|
||||
icon: React.ReactNode;
|
||||
status: 'green' | 'yellow' | 'gray' | 'red';
|
||||
children: React.ReactNode;
|
||||
}
|
||||
|
||||
const STATUS_COLORS = {
|
||||
green: 'text-green-500',
|
||||
yellow: 'text-yellow-500',
|
||||
gray: 'text-gray-400',
|
||||
red: 'text-red-500',
|
||||
};
|
||||
|
||||
const STATUS_BG = {
|
||||
green: 'bg-green-50 dark:bg-green-900/20',
|
||||
yellow: 'bg-yellow-50 dark:bg-yellow-900/20',
|
||||
gray: 'bg-gray-50 dark:bg-gray-800/50',
|
||||
red: 'bg-red-50 dark:bg-red-900/20',
|
||||
};
|
||||
|
||||
function HealthCard({ title, icon, status, children }: HealthCardProps) {
|
||||
return (
|
||||
<div className={`rounded-lg border border-gray-200 dark:border-gray-700 p-4 ${STATUS_BG[status]}`}>
|
||||
<div className="flex items-center gap-2 mb-3">
|
||||
<span className={STATUS_COLORS[status]}>{icon}</span>
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-gray-100">{title}</h3>
|
||||
<span className={`ml-auto text-xs ${STATUS_COLORS[status]}`}>
|
||||
{status === 'green' ? '正常' : status === 'yellow' ? '降级' : status === 'red' ? '异常' : '未启用'}
|
||||
</span>
|
||||
</div>
|
||||
<div className="space-y-1.5 text-xs text-gray-600 dark:text-gray-400">
|
||||
{children}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(1))} ${sizes[i]}`;
|
||||
}
|
||||
|
||||
function formatTime(isoString: string | null): string {
|
||||
if (!isoString) return '从未';
|
||||
try {
|
||||
const date = new Date(isoString);
|
||||
return date.toLocaleString('zh-CN', {
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
});
|
||||
} catch {
|
||||
return isoString;
|
||||
}
|
||||
}
|
||||
|
||||
function formatUrgency(urgency: string): { label: string; color: string } {
|
||||
switch (urgency) {
|
||||
case 'high': return { label: '高', color: 'text-red-500' };
|
||||
case 'medium': return { label: '中', color: 'text-yellow-500' };
|
||||
case 'low': return { label: '低', color: 'text-blue-500' };
|
||||
default: return { label: urgency, color: 'text-gray-500' };
|
||||
}
|
||||
}
|
||||
|
||||
// === Main Component ===
|
||||
|
||||
export function HealthPanel() {
|
||||
const [snapshot, setSnapshot] = useState<HealthSnapshotData | null>(null);
|
||||
const [alerts, setAlerts] = useState<HeartbeatResult[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const alertsEndRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
// Get live connection and SaaS state
|
||||
const connectionState = useConnectionStore((s) => s.connectionState);
|
||||
const gatewayVersion = useConnectionStore((s) => s.gatewayVersion);
|
||||
const connectionMode = useSaaSStore((s) => s.connectionMode);
|
||||
const saasReachable = useSaaSStore((s) => s.saasReachable);
|
||||
const consecutiveFailures = useSaaSStore((s) => s._consecutiveFailures);
|
||||
const isLoggedIn = useSaaSStore((s) => s.isLoggedIn);
|
||||
|
||||
// Fetch health snapshot
|
||||
const fetchSnapshot = useCallback(async () => {
|
||||
if (!isTauriRuntime()) return;
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const { invoke } = await import('@tauri-apps/api/core');
|
||||
const data = await invoke<HealthSnapshotData>('health_snapshot', {
|
||||
agentId: 'zclaw-main',
|
||||
});
|
||||
setSnapshot(data);
|
||||
} catch (err) {
|
||||
log.warn('Failed to fetch health snapshot:', err);
|
||||
setError(String(err));
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Fetch alert history
|
||||
const fetchAlerts = useCallback(async () => {
|
||||
if (!isTauriRuntime()) return;
|
||||
try {
|
||||
const history = await intelligenceClient.heartbeat.getHistory('zclaw-main', 100);
|
||||
setAlerts(history);
|
||||
} catch (err) {
|
||||
log.warn('Failed to fetch alert history:', err);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Initial load
|
||||
useEffect(() => {
|
||||
fetchSnapshot();
|
||||
fetchAlerts();
|
||||
}, [fetchSnapshot, fetchAlerts]);
|
||||
|
||||
// Subscribe to real-time alerts
|
||||
useEffect(() => {
|
||||
if (!isTauriRuntime()) return;
|
||||
|
||||
let unlisten: (() => void) | null = null;
|
||||
const subscribe = async () => {
|
||||
unlisten = await safeListen<Array<{ title: string; content: string; urgency: string; source: string; timestamp: string }>>(
|
||||
'heartbeat:alert',
|
||||
(newAlerts) => {
|
||||
// Prepend new alerts to history
|
||||
setAlerts((prev) => {
|
||||
const result: HeartbeatResult[] = [
|
||||
{
|
||||
status: 'alert',
|
||||
alerts: newAlerts.map((a) => ({
|
||||
title: a.title,
|
||||
content: a.content,
|
||||
urgency: a.urgency as 'low' | 'medium' | 'high',
|
||||
source: a.source,
|
||||
timestamp: a.timestamp,
|
||||
})),
|
||||
checked_items: 0,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
...prev,
|
||||
];
|
||||
// Keep max 100
|
||||
return result.slice(0, 100);
|
||||
});
|
||||
},
|
||||
);
|
||||
};
|
||||
subscribe();
|
||||
|
||||
return () => {
|
||||
if (unlisten) unlisten();
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Auto-scroll alerts to show latest
|
||||
useEffect(() => {
|
||||
alertsEndRef.current?.scrollIntoView({ behavior: 'smooth' });
|
||||
}, [alerts]);
|
||||
|
||||
// Determine SaaS card status
|
||||
const saasStatus: 'green' | 'yellow' | 'gray' | 'red' = !isLoggedIn
|
||||
? 'gray'
|
||||
: saasReachable
|
||||
? 'green'
|
||||
: 'red';
|
||||
|
||||
// Determine connection card status
|
||||
const isActuallyConnected = connectionState === 'connected';
|
||||
const connectionStatus: 'green' | 'yellow' | 'gray' | 'red' = isActuallyConnected
|
||||
? 'green'
|
||||
: connectionState === 'connecting' || connectionState === 'reconnecting'
|
||||
? 'yellow'
|
||||
: 'red';
|
||||
|
||||
// Determine heartbeat card status
|
||||
const heartbeatStatus: 'green' | 'yellow' | 'gray' | 'red' = !snapshot
|
||||
? 'gray'
|
||||
: snapshot.intelligence.engineRunning
|
||||
? 'green'
|
||||
: snapshot.intelligence.config.enabled
|
||||
? 'yellow'
|
||||
: 'gray';
|
||||
|
||||
// Determine memory card status
|
||||
const memoryStatus: 'green' | 'yellow' | 'gray' | 'red' = !snapshot
|
||||
? 'gray'
|
||||
: snapshot.memory.totalEntries === 0
|
||||
? 'gray'
|
||||
: snapshot.memory.storageSizeBytes > 50 * 1024 * 1024
|
||||
? 'yellow'
|
||||
: 'green';
|
||||
|
||||
return (
|
||||
<div className="flex flex-col h-full">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between p-4 border-b border-gray-200 dark:border-gray-700">
|
||||
<div className="flex items-center gap-2">
|
||||
<Activity className="w-5 h-5 text-blue-500" />
|
||||
<h2 className="text-lg font-semibold text-gray-900 dark:text-gray-100">系统健康</h2>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => { fetchSnapshot(); fetchAlerts(); }}
|
||||
disabled={loading}
|
||||
className="flex items-center gap-1 px-3 py-1.5 text-sm text-gray-600 dark:text-gray-400 hover:text-gray-900 dark:hover:text-gray-100 disabled:opacity-50"
|
||||
>
|
||||
<RefreshCw className={`w-4 h-4 ${loading ? 'animate-spin' : ''}`} />
|
||||
刷新
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Content */}
|
||||
<div className="flex-1 overflow-y-auto p-4 space-y-4">
|
||||
{error && (
|
||||
<div className="p-3 text-sm text-red-600 bg-red-50 dark:bg-red-900/20 rounded-lg">
|
||||
加载失败: {error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Health Cards Grid */}
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 gap-4">
|
||||
{/* Agent Heartbeat Card */}
|
||||
<HealthCard
|
||||
title="Agent 心跳"
|
||||
icon={<Activity className="w-4 h-4" />}
|
||||
status={heartbeatStatus}
|
||||
>
|
||||
<div className="flex justify-between">
|
||||
<span>引擎状态</span>
|
||||
<span className={snapshot?.intelligence.engineRunning ? 'text-green-600' : 'text-gray-400'}>
|
||||
{snapshot?.intelligence.engineRunning ? '运行中' : '已停止'}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>检查间隔</span>
|
||||
<span>{snapshot?.intelligence.config.interval_minutes ?? '-'} 分钟</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>上次检查</span>
|
||||
<span>{formatTime(snapshot?.intelligence.lastTick ?? null)}</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>24h 告警数</span>
|
||||
<span>{snapshot?.intelligence.alertCount24h ?? 0}</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>主动性级别</span>
|
||||
<span>{snapshot?.intelligence.config.proactivity_level ?? '-'}</span>
|
||||
</div>
|
||||
</HealthCard>
|
||||
|
||||
{/* Connection Card */}
|
||||
<HealthCard
|
||||
title="连接状态"
|
||||
icon={isActuallyConnected ? <Wifi className="w-4 h-4" /> : <WifiOff className="w-4 h-4" />}
|
||||
status={connectionStatus}
|
||||
>
|
||||
<div className="flex justify-between">
|
||||
<span>连接模式</span>
|
||||
<span>{connectionMode === 'saas' ? 'SaaS 云端' : connectionMode === 'tauri' ? '本地模式' : connectionMode}</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>连接状态</span>
|
||||
<span className={isActuallyConnected ? 'text-green-600' : connectionState === 'connecting' ? 'text-yellow-500' : 'text-red-500'}>
|
||||
{connectionState === 'connected' ? '已连接' : connectionState === 'connecting' ? '连接中...' : connectionState === 'reconnecting' ? '重连中...' : '未连接'}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>网关版本</span>
|
||||
<span>{gatewayVersion ?? '-'}</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>SaaS 可达</span>
|
||||
<span className={saasReachable ? 'text-green-600' : 'text-red-500'}>
|
||||
{saasReachable ? '是' : '否'}
|
||||
</span>
|
||||
</div>
|
||||
</HealthCard>
|
||||
|
||||
{/* SaaS Device Card */}
|
||||
<HealthCard
|
||||
title="SaaS 设备"
|
||||
icon={saasReachable ? <Cloud className="w-4 h-4" /> : <CloudOff className="w-4 h-4" />}
|
||||
status={saasStatus}
|
||||
>
|
||||
<div className="flex justify-between">
|
||||
<span>设备注册</span>
|
||||
<span>{isLoggedIn ? '已注册' : '未注册'}</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>连续失败</span>
|
||||
<span className={consecutiveFailures > 0 ? 'text-yellow-500' : 'text-green-600'}>
|
||||
{consecutiveFailures}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>服务状态</span>
|
||||
<span className={saasReachable ? 'text-green-600' : 'text-red-500'}>
|
||||
{saasReachable ? '在线' : isLoggedIn ? '离线 (已降级)' : '未连接'}
|
||||
</span>
|
||||
</div>
|
||||
</HealthCard>
|
||||
|
||||
{/* Memory Card */}
|
||||
<HealthCard
|
||||
title="记忆管道"
|
||||
icon={<Database className="w-4 h-4" />}
|
||||
status={memoryStatus}
|
||||
>
|
||||
<div className="flex justify-between">
|
||||
<span>记忆条目</span>
|
||||
<span>{snapshot?.memory.totalEntries ?? 0}</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>存储大小</span>
|
||||
<span>{formatBytes(snapshot?.memory.storageSizeBytes ?? 0)}</span>
|
||||
</div>
|
||||
<div className="flex justify-between">
|
||||
<span>上次提取</span>
|
||||
<span>{formatTime(snapshot?.memory.lastExtraction ?? null)}</span>
|
||||
</div>
|
||||
</HealthCard>
|
||||
</div>
|
||||
|
||||
{/* Alerts History */}
|
||||
<div className="rounded-lg border border-gray-200 dark:border-gray-700">
|
||||
<div className="flex items-center gap-2 p-3 border-b border-gray-200 dark:border-gray-700">
|
||||
<AlertTriangle className="w-4 h-4 text-yellow-500" />
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-gray-100">最近告警</h3>
|
||||
<span className="ml-auto text-xs text-gray-400">
|
||||
{alerts.reduce((sum, r) => sum + r.alerts.length, 0)} 条
|
||||
</span>
|
||||
</div>
|
||||
<div className="max-h-64 overflow-y-auto divide-y divide-gray-100 dark:divide-gray-800">
|
||||
{alerts.length === 0 ? (
|
||||
<div className="p-4 text-center text-sm text-gray-400">暂无告警记录</div>
|
||||
) : (
|
||||
alerts.map((result, ri) =>
|
||||
result.alerts.map((alert, ai) => (
|
||||
<div key={`${ri}-${ai}`} className="flex items-start gap-2 p-3 hover:bg-gray-50 dark:hover:bg-gray-800/50">
|
||||
<span className={`mt-0.5 ${formatUrgency(alert.urgency).color}`}>
|
||||
{alert.urgency === 'high' ? (
|
||||
<XCircle className="w-3.5 h-3.5" />
|
||||
) : alert.urgency === 'medium' ? (
|
||||
<AlertTriangle className="w-3.5 h-3.5" />
|
||||
) : (
|
||||
<CheckCircle className="w-3.5 h-3.5" />
|
||||
)}
|
||||
</span>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-xs font-medium text-gray-900 dark:text-gray-100 truncate">
|
||||
{alert.title}
|
||||
</span>
|
||||
<span className={`text-xs px-1 rounded ${formatUrgency(alert.urgency).color} bg-opacity-10`}>
|
||||
{formatUrgency(alert.urgency).label}
|
||||
</span>
|
||||
</div>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 truncate">{alert.content}</p>
|
||||
</div>
|
||||
<span className="text-xs text-gray-400 whitespace-nowrap flex items-center gap-1">
|
||||
<Clock className="w-3 h-3" />
|
||||
{formatTime(alert.timestamp)}
|
||||
</span>
|
||||
</div>
|
||||
))
|
||||
)
|
||||
)}
|
||||
<div ref={alertsEndRef} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -31,6 +31,9 @@ import {
|
||||
type HeartbeatResult,
|
||||
type HeartbeatAlert,
|
||||
} from '../lib/intelligence-client';
|
||||
import { createLogger } from '../lib/logger';
|
||||
|
||||
const log = createLogger('HeartbeatConfig');
|
||||
|
||||
// === Default Config ===
|
||||
|
||||
@@ -312,9 +315,15 @@ export function HeartbeatConfig({ className = '', onConfigChange }: HeartbeatCon
|
||||
});
|
||||
}, []);
|
||||
|
||||
const handleSave = useCallback(() => {
|
||||
const handleSave = useCallback(async () => {
|
||||
localStorage.setItem('zclaw-heartbeat-config', JSON.stringify(config));
|
||||
localStorage.setItem('zclaw-heartbeat-checks', JSON.stringify(checkItems));
|
||||
// Sync to Rust backend (non-blocking — UI updates immediately)
|
||||
try {
|
||||
await intelligenceClient.heartbeat.updateConfig('zclaw-main', config);
|
||||
} catch (err) {
|
||||
log.warn('[HeartbeatConfig] Backend sync failed:', err);
|
||||
}
|
||||
setHasChanges(false);
|
||||
}, [config, checkItems]);
|
||||
|
||||
|
||||
@@ -179,7 +179,7 @@ export function RightPanel({ simpleMode = false }: RightPanelProps) {
|
||||
.catch(() => setUserProfile(null));
|
||||
}, [currentAgent?.id]);
|
||||
|
||||
// Listen for profile updates after conversations
|
||||
// Listen for profile updates after conversations (fired after memory extraction completes)
|
||||
useEffect(() => {
|
||||
const handler = (e: Event) => {
|
||||
const detail = (e as CustomEvent).detail;
|
||||
@@ -187,6 +187,8 @@ export function RightPanel({ simpleMode = false }: RightPanelProps) {
|
||||
invoke<AgentInfo | null>('agent_get', { agentId: currentAgent.id })
|
||||
.then(data => setUserProfile(data?.userProfile ?? null))
|
||||
.catch(() => {});
|
||||
// Refresh clones data so selectedClone (name, role, nickname, etc.) stays current
|
||||
loadClones();
|
||||
}
|
||||
};
|
||||
window.addEventListener('zclaw:agent-profile-updated', handler);
|
||||
|
||||
@@ -10,6 +10,10 @@ import {
|
||||
Package,
|
||||
BarChart,
|
||||
Palette,
|
||||
HeartPulse,
|
||||
GraduationCap,
|
||||
Landmark,
|
||||
Scale,
|
||||
Server,
|
||||
Search,
|
||||
Megaphone,
|
||||
@@ -33,6 +37,10 @@ const iconMap: Record<string, React.ComponentType<{ className?: string }>> = {
|
||||
Package,
|
||||
BarChart,
|
||||
Palette,
|
||||
HeartPulse,
|
||||
GraduationCap,
|
||||
Landmark,
|
||||
Scale,
|
||||
Server,
|
||||
Search,
|
||||
Megaphone,
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
import { useState } from 'react';
|
||||
|
||||
export function Credits() {
|
||||
const [filter, setFilter] = useState<'all' | 'consume' | 'earn'>('all');
|
||||
|
||||
return (
|
||||
<div className="max-w-3xl">
|
||||
<div className="flex justify-between items-center mb-6">
|
||||
<h1 className="text-xl font-bold text-gray-900">积分</h1>
|
||||
<div className="flex gap-2">
|
||||
<button className="text-xs text-gray-500 hover:text-gray-700 px-3 py-1.5 border border-gray-200 rounded-lg transition-colors">
|
||||
刷新
|
||||
</button>
|
||||
<button className="text-xs text-white bg-orange-500 hover:bg-orange-600 px-3 py-1.5 rounded-lg transition-colors">
|
||||
去充值
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="text-center mb-8 py-12">
|
||||
<div className="text-xs text-gray-500 mb-1">总积分</div>
|
||||
<div className="text-3xl font-bold text-gray-900">--</div>
|
||||
<div className="text-xs text-gray-400 mt-2">积分系统开发中</div>
|
||||
</div>
|
||||
|
||||
<div className="p-1 mb-6 flex rounded-lg bg-gray-50 border border-gray-100 shadow-sm">
|
||||
<button
|
||||
onClick={() => setFilter('all')}
|
||||
className={`flex-1 py-2 rounded-md text-xs transition-colors ${filter === 'all' ? 'bg-white shadow-sm font-medium text-gray-900' : 'text-gray-500 hover:text-gray-700'}`}
|
||||
>
|
||||
全部
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setFilter('consume')}
|
||||
className={`flex-1 py-2 rounded-md text-xs transition-colors ${filter === 'consume' ? 'bg-white shadow-sm font-medium text-gray-900' : 'text-gray-500 hover:text-gray-700'}`}
|
||||
>
|
||||
消耗
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setFilter('earn')}
|
||||
className={`flex-1 py-2 rounded-md text-xs transition-colors ${filter === 'earn' ? 'bg-white shadow-sm font-medium text-gray-900' : 'text-gray-500 hover:text-gray-700'}`}
|
||||
>
|
||||
获得
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="bg-white rounded-xl border border-gray-200 p-8 text-center">
|
||||
<div className="text-sm text-gray-400">暂无积分记录</div>
|
||||
<div className="text-xs text-gray-300 mt-1">连接后端服务后即可查看积分使用记录</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -2,14 +2,12 @@ import { useState } from 'react';
|
||||
import { useSecurityStore } from '../../store/securityStore';
|
||||
import {
|
||||
Settings as SettingsIcon,
|
||||
BarChart3,
|
||||
Puzzle,
|
||||
MessageSquare,
|
||||
FolderOpen,
|
||||
Shield,
|
||||
Info,
|
||||
ArrowLeft,
|
||||
Coins,
|
||||
Cpu,
|
||||
Zap,
|
||||
HelpCircle,
|
||||
@@ -18,12 +16,12 @@ import {
|
||||
Heart,
|
||||
Key,
|
||||
Database,
|
||||
Activity,
|
||||
Cloud,
|
||||
CreditCard,
|
||||
} from 'lucide-react';
|
||||
import { silentErrorHandler } from '../../lib/error-utils';
|
||||
import { General } from './General';
|
||||
import { UsageStats } from './UsageStats';
|
||||
import { ModelsAPI } from './ModelsAPI';
|
||||
import { MCPServices } from './MCPServices';
|
||||
import { Skills } from './Skills';
|
||||
@@ -31,12 +29,12 @@ import { IMChannels } from './IMChannels';
|
||||
import { Workspace } from './Workspace';
|
||||
import { Privacy } from './Privacy';
|
||||
import { About } from './About';
|
||||
import { Credits } from './Credits';
|
||||
import { AuditLogsPanel } from '../AuditLogsPanel';
|
||||
import { SecurityStatus } from '../SecurityStatus';
|
||||
import { SecurityLayersPanel } from '../SecurityLayersPanel';
|
||||
import { TaskList } from '../TaskList';
|
||||
import { HeartbeatConfig } from '../HeartbeatConfig';
|
||||
import { HealthPanel } from '../HealthPanel';
|
||||
import { SecureStorage } from './SecureStorage';
|
||||
import { VikingPanel } from '../VikingPanel';
|
||||
import { SaaSSettings } from '../SaaS/SaaSSettings';
|
||||
@@ -49,8 +47,6 @@ interface SettingsLayoutProps {
|
||||
|
||||
type SettingsPage =
|
||||
| 'general'
|
||||
| 'usage'
|
||||
| 'credits'
|
||||
| 'models'
|
||||
| 'mcp'
|
||||
| 'skills'
|
||||
@@ -65,14 +61,13 @@ type SettingsPage =
|
||||
| 'audit'
|
||||
| 'tasks'
|
||||
| 'heartbeat'
|
||||
| 'health'
|
||||
| 'feedback'
|
||||
| 'about';
|
||||
|
||||
const menuItems: { id: SettingsPage; label: string; icon: React.ReactNode; group?: 'advanced' }[] = [
|
||||
// --- Core settings ---
|
||||
{ id: 'general', label: '通用', icon: <SettingsIcon className="w-4 h-4" /> },
|
||||
{ id: 'usage', label: '用量统计', icon: <BarChart3 className="w-4 h-4" /> },
|
||||
{ id: 'credits', label: '积分详情', icon: <Coins className="w-4 h-4" /> },
|
||||
{ id: 'models', label: '模型与 API', icon: <Cpu className="w-4 h-4" /> },
|
||||
{ id: 'mcp', label: 'MCP 服务', icon: <Puzzle className="w-4 h-4" /> },
|
||||
{ id: 'im', label: 'IM 频道', icon: <MessageSquare className="w-4 h-4" /> },
|
||||
@@ -89,6 +84,7 @@ const menuItems: { id: SettingsPage; label: string; icon: React.ReactNode; group
|
||||
{ id: 'audit', label: '审计日志', icon: <ClipboardList className="w-4 h-4" />, group: 'advanced' },
|
||||
{ id: 'tasks', label: '定时任务', icon: <Clock className="w-4 h-4" />, group: 'advanced' },
|
||||
{ id: 'heartbeat', label: '心跳配置', icon: <Heart className="w-4 h-4" />, group: 'advanced' },
|
||||
{ id: 'health', label: '系统健康', icon: <Activity className="w-4 h-4" />, group: 'advanced' },
|
||||
// --- Footer ---
|
||||
{ id: 'feedback', label: '提交反馈', icon: <HelpCircle className="w-4 h-4" /> },
|
||||
{ id: 'about', label: '关于', icon: <Info className="w-4 h-4" /> },
|
||||
@@ -101,8 +97,6 @@ export function SettingsLayout({ onBack }: SettingsLayoutProps) {
|
||||
const renderPage = () => {
|
||||
switch (activePage) {
|
||||
case 'general': return <General />;
|
||||
case 'usage': return <UsageStats />;
|
||||
case 'credits': return <Credits />;
|
||||
case 'models': return <ModelsAPI />;
|
||||
case 'mcp': return <MCPServices />;
|
||||
case 'skills': return <Skills />;
|
||||
@@ -175,6 +169,16 @@ export function SettingsLayout({ onBack }: SettingsLayoutProps) {
|
||||
</div>
|
||||
</ErrorBoundary>
|
||||
);
|
||||
case 'health': return (
|
||||
<ErrorBoundary
|
||||
fallback={<div className="p-6 text-center text-gray-500">系统健康面板加载失败</div>}
|
||||
onError={(err, info) => console.error('[Settings] Health page error:', err, info.componentStack)}
|
||||
>
|
||||
<div className="max-w-3xl h-full">
|
||||
<HealthPanel />
|
||||
</div>
|
||||
</ErrorBoundary>
|
||||
);
|
||||
case 'viking': return (
|
||||
<ErrorBoundary
|
||||
fallback={<div className="p-6 text-center text-gray-500">语义记忆加载失败</div>}
|
||||
|
||||
@@ -1,177 +0,0 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useAgentStore } from '../../store/agentStore';
|
||||
import { BarChart3, TrendingUp, Clock, Zap } from 'lucide-react';
|
||||
|
||||
export function UsageStats() {
|
||||
const usageStats = useAgentStore((s) => s.usageStats);
|
||||
const loadUsageStats = useAgentStore((s) => s.loadUsageStats);
|
||||
const [timeRange, setTimeRange] = useState<'7d' | '30d' | 'all'>('7d');
|
||||
|
||||
useEffect(() => {
|
||||
loadUsageStats();
|
||||
}, [loadUsageStats]);
|
||||
|
||||
const stats = usageStats || { totalSessions: 0, totalMessages: 0, totalTokens: 0, byModel: {} };
|
||||
const models = Object.entries(stats.byModel || {});
|
||||
|
||||
const formatTokens = (n: number) => {
|
||||
if (n >= 1_000_000) return `~${(n / 1_000_000).toFixed(1)} M`;
|
||||
if (n >= 1_000) return `~${(n / 1_000).toFixed(1)} k`;
|
||||
return `${n}`;
|
||||
};
|
||||
|
||||
// 计算总输入和输出 Token
|
||||
const totalInputTokens = models.reduce((sum, [_, data]) => sum + data.inputTokens, 0);
|
||||
const totalOutputTokens = models.reduce((sum, [_, data]) => sum + data.outputTokens, 0);
|
||||
|
||||
return (
|
||||
<div className="max-w-3xl">
|
||||
<div className="flex justify-between items-center mb-6">
|
||||
<h1 className="text-xl font-bold text-gray-900">用量统计</h1>
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="flex items-center bg-gray-100 rounded-lg p-0.5">
|
||||
{(['7d', '30d', 'all'] as const).map((range) => (
|
||||
<button
|
||||
key={range}
|
||||
onClick={() => setTimeRange(range)}
|
||||
className={`px-3 py-1 text-xs rounded-md transition-colors ${
|
||||
timeRange === range
|
||||
? 'bg-white text-gray-900 shadow-sm'
|
||||
: 'text-gray-500 hover:text-gray-700'
|
||||
}`}
|
||||
>
|
||||
{range === '7d' ? '近 7 天' : range === '30d' ? '近 30 天' : '全部'}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
<button
|
||||
onClick={() => loadUsageStats()}
|
||||
className="text-xs text-gray-500 hover:text-gray-700 px-3 py-1.5 border border-gray-200 rounded-lg transition-colors"
|
||||
>
|
||||
刷新
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="text-xs text-gray-500 mb-4">本设备所有已保存对话的使用统计。</div>
|
||||
|
||||
{/* 主要统计卡片 */}
|
||||
<div className="grid grid-cols-4 gap-4 mb-8">
|
||||
<StatCard
|
||||
icon={BarChart3}
|
||||
label="会话数"
|
||||
value={stats.totalSessions}
|
||||
color="text-blue-500"
|
||||
/>
|
||||
<StatCard
|
||||
icon={Zap}
|
||||
label="消息数"
|
||||
value={stats.totalMessages}
|
||||
color="text-purple-500"
|
||||
/>
|
||||
<StatCard
|
||||
icon={TrendingUp}
|
||||
label="输入 Token"
|
||||
value={formatTokens(totalInputTokens)}
|
||||
color="text-green-500"
|
||||
/>
|
||||
<StatCard
|
||||
icon={Clock}
|
||||
label="输出 Token"
|
||||
value={formatTokens(totalOutputTokens)}
|
||||
color="text-orange-500"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* 总 Token 使用量概览 */}
|
||||
<div className="bg-white rounded-xl border border-gray-200 p-5 shadow-sm mb-6">
|
||||
<h3 className="text-sm font-semibold mb-4 text-gray-900">Token 使用概览</h3>
|
||||
{stats.totalTokens === 0 ? (
|
||||
<p className="text-xs text-gray-400">Token 用量将在后续版本中支持</p>
|
||||
) : (
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="flex-1">
|
||||
<div className="flex justify-between text-xs text-gray-500 mb-1">
|
||||
<span>输入</span>
|
||||
<span>输出</span>
|
||||
</div>
|
||||
<div className="h-3 bg-gray-100 rounded-full overflow-hidden flex">
|
||||
<div
|
||||
className="bg-gradient-to-r from-green-400 to-green-500 h-full transition-all"
|
||||
style={{ width: `${(totalInputTokens / Math.max(totalInputTokens + totalOutputTokens, 1)) * 100}%` }}
|
||||
/>
|
||||
<div
|
||||
className="bg-gradient-to-r from-orange-400 to-orange-500 h-full transition-all"
|
||||
style={{ width: `${(totalOutputTokens / Math.max(totalInputTokens + totalOutputTokens, 1)) * 100}%` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="text-right flex-shrink-0">
|
||||
<div className="text-lg font-bold text-gray-900">{formatTokens(stats.totalTokens)}</div>
|
||||
<div className="text-xs text-gray-500">总计</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* 按模型分组 */}
|
||||
<h2 className="text-sm font-semibold mb-4 text-gray-900">按模型</h2>
|
||||
<div className="bg-white rounded-xl border border-gray-200 divide-y divide-gray-100 shadow-sm">
|
||||
{models.length === 0 ? (
|
||||
<div className="p-8 text-center">
|
||||
<div className="w-12 h-12 bg-gray-100 rounded-full flex items-center justify-center mx-auto mb-3">
|
||||
<BarChart3 className="w-6 h-6 text-gray-400" />
|
||||
</div>
|
||||
<p className="text-sm text-gray-400">暂无使用数据</p>
|
||||
<p className="text-xs text-gray-300 mt-1">开始对话后将自动记录用量统计</p>
|
||||
</div>
|
||||
) : (
|
||||
models.map(([model, data]) => {
|
||||
const total = data.inputTokens + data.outputTokens;
|
||||
const inputPct = (data.inputTokens / Math.max(total, 1)) * 100;
|
||||
const outputPct = (data.outputTokens / Math.max(total, 1)) * 100;
|
||||
|
||||
return (
|
||||
<div key={model} className="p-4">
|
||||
<div className="flex justify-between items-center mb-2">
|
||||
<span className="font-medium text-gray-900">{model}</span>
|
||||
<span className="text-xs text-gray-500">{data.messages} 条消息</span>
|
||||
</div>
|
||||
<div className="h-2 bg-gray-100 rounded-full overflow-hidden mb-2 flex">
|
||||
<div className="bg-orange-500 h-full" style={{ width: `${inputPct}%` }} />
|
||||
<div className="bg-orange-200 h-full" style={{ width: `${outputPct}%` }} />
|
||||
</div>
|
||||
<div className="flex justify-between text-xs text-gray-500">
|
||||
<span>输入: {formatTokens(data.inputTokens)}</span>
|
||||
<span>输出: {formatTokens(data.outputTokens)}</span>
|
||||
<span>总计: {formatTokens(total)}</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function StatCard({
|
||||
icon: Icon,
|
||||
label,
|
||||
value,
|
||||
color,
|
||||
}: {
|
||||
icon: typeof BarChart3;
|
||||
label: string;
|
||||
value: string | number;
|
||||
color: string;
|
||||
}) {
|
||||
return (
|
||||
<div className="bg-white rounded-xl border border-gray-200 p-4 shadow-sm">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<Icon className={`w-4 h-4 ${color}`} />
|
||||
<span className="text-xs text-gray-500">{label}</span>
|
||||
</div>
|
||||
<div className="text-2xl font-bold text-gray-900">{value}</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -7,10 +7,11 @@
|
||||
|
||||
import { useState } from 'react';
|
||||
import {
|
||||
Settings, LayoutGrid,
|
||||
Settings, LayoutGrid, SquarePen,
|
||||
Search, X,
|
||||
} from 'lucide-react';
|
||||
import { ConversationList } from './ConversationList';
|
||||
import { useChatStore } from '../store/chatStore';
|
||||
|
||||
interface SimpleSidebarProps {
|
||||
onOpenSettings?: () => void;
|
||||
@@ -19,6 +20,11 @@ interface SimpleSidebarProps {
|
||||
|
||||
export function SimpleSidebar({ onOpenSettings, onToggleMode }: SimpleSidebarProps) {
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
const newConversation = useChatStore((s) => s.newConversation);
|
||||
|
||||
const handleNewConversation = () => {
|
||||
newConversation();
|
||||
};
|
||||
|
||||
return (
|
||||
<aside className="w-64 sidebar-bg border-r border-[#e8e6e1] dark:border-gray-800 flex flex-col h-full shrink-0">
|
||||
@@ -27,11 +33,26 @@ export function SimpleSidebar({ onOpenSettings, onToggleMode }: SimpleSidebarPro
|
||||
<span className="text-lg font-semibold tracking-tight bg-gradient-to-r from-orange-500 to-amber-500 bg-clip-text text-transparent">
|
||||
ZCLAW
|
||||
</span>
|
||||
<button
|
||||
onClick={handleNewConversation}
|
||||
className="ml-auto p-1.5 hover:bg-black/5 dark:hover:bg-white/5 rounded-md transition-colors text-gray-600 dark:text-gray-400"
|
||||
title="新对话"
|
||||
>
|
||||
<SquarePen className="w-4 h-4" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* 内容区域 */}
|
||||
<div className="flex-1 overflow-hidden">
|
||||
<div className="p-2 h-full overflow-y-auto">
|
||||
{/* 新对话按钮 */}
|
||||
<button
|
||||
onClick={handleNewConversation}
|
||||
className="w-full flex items-center gap-3 px-3 py-2 rounded-lg bg-black/5 dark:bg-white/5 text-sm font-medium text-gray-900 dark:text-gray-100 hover:bg-black/10 dark:hover:bg-white/10 transition-colors mb-2"
|
||||
>
|
||||
<SquarePen className="w-4 h-4" />
|
||||
新对话
|
||||
</button>
|
||||
{/* 搜索框 */}
|
||||
<div className="relative mb-2">
|
||||
<Search className="absolute left-3 top-1/2 -translate-y-1/2 text-gray-400 w-4 h-4" />
|
||||
|
||||
@@ -196,68 +196,89 @@ export function VikingPanel() {
|
||||
)}
|
||||
|
||||
{/* Storage Info */}
|
||||
{status?.available && (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 p-4 mb-6 shadow-sm">
|
||||
<div className="flex items-center gap-3 mb-3">
|
||||
<div className="w-10 h-10 rounded-xl bg-gradient-to-br from-blue-500 to-indigo-500 flex items-center justify-center">
|
||||
<Database className="w-4 h-4 text-white" />
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 p-4 mb-6 shadow-sm">
|
||||
<div className="flex items-center gap-3 mb-3">
|
||||
<div className={`w-10 h-10 rounded-xl flex items-center justify-center ${status?.available ? 'bg-gradient-to-br from-blue-500 to-indigo-500' : 'bg-gray-300 dark:bg-gray-600'}`}>
|
||||
<Database className="w-4 h-4 text-white" />
|
||||
</div>
|
||||
<div>
|
||||
<div className="text-sm font-medium text-gray-900 dark:text-white">
|
||||
本地存储
|
||||
</div>
|
||||
<div>
|
||||
<div className="text-sm font-medium text-gray-900 dark:text-white">
|
||||
本地存储
|
||||
</div>
|
||||
<div className="text-xs text-gray-500 dark:text-gray-400">
|
||||
{status.version || 'Native'} · {status.dataDir || '默认路径'}
|
||||
</div>
|
||||
<div className="text-xs text-gray-500 dark:text-gray-400">
|
||||
{status?.available
|
||||
? `${status.version || 'Native'} · ${status.dataDir || '默认路径'}`
|
||||
: '存储未连接'}
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex gap-4 text-xs">
|
||||
<div className="flex items-center gap-1.5 text-gray-600 dark:text-gray-300">
|
||||
<CheckCircle className="w-3.5 h-3.5 text-green-500" />
|
||||
<span>SQLite + FTS5</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1.5 text-gray-600 dark:text-gray-300">
|
||||
<CheckCircle className="w-3.5 h-3.5 text-green-500" />
|
||||
<span>TF-IDF 语义评分</span>
|
||||
</div>
|
||||
{memoryCount !== null && (
|
||||
<div className="flex items-center gap-1.5 text-gray-600 dark:text-gray-300">
|
||||
<CheckCircle className="w-3.5 h-3.5 text-green-500" />
|
||||
<span>{memoryCount} 条记忆</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{!status?.available && (
|
||||
<button
|
||||
onClick={loadStatus}
|
||||
disabled={isLoading}
|
||||
className="ml-auto text-xs text-amber-600 dark:text-amber-400 hover:text-amber-700 dark:hover:text-amber-300 flex items-center gap-1 disabled:opacity-50"
|
||||
>
|
||||
<RefreshCw className={`w-3 h-3 ${isLoading ? 'animate-spin' : ''}`} /> 重新连接
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<div className="flex gap-4 text-xs">
|
||||
<div className="flex items-center gap-1.5 text-gray-600 dark:text-gray-300">
|
||||
{status?.available ? (
|
||||
<CheckCircle className="w-3.5 h-3.5 text-green-500" />
|
||||
) : (
|
||||
<AlertCircle className="w-3.5 h-3.5 text-amber-500" />
|
||||
)}
|
||||
<span>SQLite + FTS5</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1.5 text-gray-600 dark:text-gray-300">
|
||||
{status?.available ? (
|
||||
<CheckCircle className="w-3.5 h-3.5 text-green-500" />
|
||||
) : (
|
||||
<AlertCircle className="w-3.5 h-3.5 text-amber-500" />
|
||||
)}
|
||||
<span>TF-IDF 语义评分</span>
|
||||
</div>
|
||||
{memoryCount !== null && (
|
||||
<div className="flex items-center gap-1.5 text-gray-600 dark:text-gray-300">
|
||||
<CheckCircle className="w-3.5 h-3.5 text-green-500" />
|
||||
<span>{memoryCount} 条记忆</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Search Box */}
|
||||
{status?.available && (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 p-4 mb-6 shadow-sm">
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-white mb-3">语义搜索</h3>
|
||||
<div className="flex gap-2">
|
||||
<input
|
||||
type="text"
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
onKeyDown={(e) => e.key === 'Enter' && handleSearch()}
|
||||
placeholder="输入自然语言查询..."
|
||||
className="flex-1 px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-white text-sm focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||
/>
|
||||
<button
|
||||
onClick={handleSearch}
|
||||
disabled={isSearching || !searchQuery.trim()}
|
||||
className="px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 disabled:opacity-50 flex items-center gap-2 text-sm"
|
||||
>
|
||||
{isSearching ? (
|
||||
<RefreshCw className="w-4 h-4 animate-spin" />
|
||||
) : (
|
||||
<Search className="w-4 h-4" />
|
||||
)}
|
||||
搜索
|
||||
</button>
|
||||
</div>
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 p-4 mb-6 shadow-sm">
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-white mb-3">语义搜索</h3>
|
||||
{!status?.available && (
|
||||
<p className="text-xs text-amber-600 dark:text-amber-400 mb-2 flex items-center gap-1">
|
||||
<AlertCircle className="w-3 h-3" /> 存储未连接,搜索功能不可用
|
||||
</p>
|
||||
)}
|
||||
<div className="flex gap-2">
|
||||
<input
|
||||
type="text"
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
onKeyDown={(e) => e.key === 'Enter' && handleSearch()}
|
||||
placeholder="输入自然语言查询..."
|
||||
disabled={!status?.available}
|
||||
className="flex-1 px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-white text-sm focus:ring-2 focus:ring-blue-500 focus:border-transparent disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
/>
|
||||
<button
|
||||
onClick={handleSearch}
|
||||
disabled={isSearching || !searchQuery.trim() || !status?.available}
|
||||
className="px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 disabled:opacity-50 flex items-center gap-2 text-sm"
|
||||
>
|
||||
{isSearching ? (
|
||||
<RefreshCw className="w-4 h-4 animate-spin" />
|
||||
) : (
|
||||
<Search className="w-4 h-4" />
|
||||
)}
|
||||
搜索
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Search Results */}
|
||||
{searchResults.length > 0 && (
|
||||
@@ -385,59 +406,64 @@ export function VikingPanel() {
|
||||
)}
|
||||
|
||||
{/* Summary Generation */}
|
||||
{status?.available && (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 p-4 mb-6 shadow-sm">
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-white mb-3">智能摘要</h3>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mb-3">
|
||||
存储资源并自动通过 LLM 生成 L0/L1 多级摘要(需配置摘要驱动)
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 p-4 mb-6 shadow-sm">
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-white mb-3">智能摘要</h3>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mb-3">
|
||||
存储资源并自动通过 LLM 生成 L0/L1 多级摘要(需配置摘要驱动)
|
||||
</p>
|
||||
{!status?.available && (
|
||||
<p className="text-xs text-amber-600 dark:text-amber-400 mb-2 flex items-center gap-1">
|
||||
<AlertCircle className="w-3 h-3" /> 存储未连接,摘要功能不可用
|
||||
</p>
|
||||
<div className="space-y-2">
|
||||
<input
|
||||
type="text"
|
||||
value={summaryUri}
|
||||
onChange={(e) => setSummaryUri(e.target.value)}
|
||||
placeholder="资源 URI (如: notes/project-plan)"
|
||||
className="w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-white text-sm focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||
/>
|
||||
<textarea
|
||||
value={summaryContent}
|
||||
onChange={(e) => setSummaryContent(e.target.value)}
|
||||
placeholder="资源内容..."
|
||||
rows={3}
|
||||
className="w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-white text-sm focus:ring-2 focus:ring-blue-500 focus:border-transparent resize-none"
|
||||
/>
|
||||
<button
|
||||
onClick={async () => {
|
||||
if (!summaryUri.trim() || !summaryContent.trim()) return;
|
||||
setIsGeneratingSummary(true);
|
||||
setMessage(null);
|
||||
try {
|
||||
await storeWithSummaries(summaryUri, summaryContent);
|
||||
setMessage({ type: 'success', text: `摘要生成完成: ${summaryUri}` });
|
||||
setSummaryUri('');
|
||||
setSummaryContent('');
|
||||
} catch (error) {
|
||||
setMessage({
|
||||
type: 'error',
|
||||
text: `摘要生成失败: ${error instanceof Error ? error.message : '未知错误'}`,
|
||||
});
|
||||
} finally {
|
||||
setIsGeneratingSummary(false);
|
||||
}
|
||||
}}
|
||||
disabled={isGeneratingSummary || !summaryUri.trim() || !summaryContent.trim()}
|
||||
className="px-4 py-2 bg-indigo-600 text-white rounded-lg hover:bg-indigo-700 disabled:opacity-50 flex items-center gap-2 text-sm"
|
||||
>
|
||||
{isGeneratingSummary ? (
|
||||
<RefreshCw className="w-4 h-4 animate-spin" />
|
||||
) : (
|
||||
<Sparkles className="w-4 h-4" />
|
||||
)}
|
||||
生成摘要并存储
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
<div className="space-y-2">
|
||||
<input
|
||||
type="text"
|
||||
value={summaryUri}
|
||||
onChange={(e) => setSummaryUri(e.target.value)}
|
||||
placeholder="资源 URI (如: notes/project-plan)"
|
||||
disabled={!status?.available}
|
||||
className="w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-white text-sm focus:ring-2 focus:ring-blue-500 focus:border-transparent disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
/>
|
||||
<textarea
|
||||
value={summaryContent}
|
||||
onChange={(e) => setSummaryContent(e.target.value)}
|
||||
placeholder="资源内容..."
|
||||
rows={3}
|
||||
disabled={!status?.available}
|
||||
className="w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-lg bg-white dark:bg-gray-900 text-gray-900 dark:text-white text-sm focus:ring-2 focus:ring-blue-500 focus:border-transparent resize-none disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
/>
|
||||
<button
|
||||
onClick={async () => {
|
||||
if (!summaryUri.trim() || !summaryContent.trim()) return;
|
||||
setIsGeneratingSummary(true);
|
||||
setMessage(null);
|
||||
try {
|
||||
await storeWithSummaries(summaryUri, summaryContent);
|
||||
setMessage({ type: 'success', text: `摘要生成完成: ${summaryUri}` });
|
||||
setSummaryUri('');
|
||||
setSummaryContent('');
|
||||
} catch (error) {
|
||||
setMessage({
|
||||
type: 'error',
|
||||
text: `摘要生成失败: ${error instanceof Error ? error.message : '未知错误'}`,
|
||||
});
|
||||
} finally {
|
||||
setIsGeneratingSummary(false);
|
||||
}
|
||||
}}
|
||||
disabled={isGeneratingSummary || !summaryUri.trim() || !summaryContent.trim() || !status?.available}
|
||||
className="px-4 py-2 bg-indigo-600 text-white rounded-lg hover:bg-indigo-700 disabled:opacity-50 flex items-center gap-2 text-sm"
|
||||
>
|
||||
{isGeneratingSummary ? (
|
||||
<RefreshCw className="w-4 h-4 animate-spin" />
|
||||
) : (
|
||||
<Sparkles className="w-4 h-4" />
|
||||
)}
|
||||
生成摘要并存储
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Info Section */}
|
||||
<div className="mt-6 p-4 bg-gray-50 dark:bg-gray-800/50 rounded-lg border border-gray-200 dark:border-gray-700">
|
||||
|
||||
@@ -87,10 +87,14 @@ export function ArtifactPanel({
|
||||
<div className={`h-full flex flex-col ${className}`}>
|
||||
<div className="p-4 flex-1 overflow-y-auto custom-scrollbar">
|
||||
{artifacts.length === 0 ? (
|
||||
<div className="flex flex-col items-center justify-center h-full text-gray-400 dark:text-gray-500">
|
||||
<FileText className="w-8 h-8 mb-2 opacity-50" />
|
||||
<p className="text-sm">暂无产物文件</p>
|
||||
<p className="text-xs mt-1">Agent 生成文件后将在此显示</p>
|
||||
<div className="flex flex-col items-center justify-center h-full text-gray-400 dark:text-gray-500 gap-3">
|
||||
<div className="w-14 h-14 rounded-2xl bg-gray-100 dark:bg-gray-800 flex items-center justify-center">
|
||||
<FileText className="w-7 h-7 opacity-40" />
|
||||
</div>
|
||||
<div className="text-center">
|
||||
<p className="text-sm font-medium">暂无产物文件</p>
|
||||
<p className="text-xs mt-1.5 text-gray-400 dark:text-gray-500">Agent 生成代码、文档等文件后<br />将在此处显示,可实时预览</p>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className="space-y-2">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { useState, useRef, useEffect } from 'react';
|
||||
import { ChevronDown, Check } from 'lucide-react';
|
||||
import { ChevronDown, Check, AlertTriangle } from 'lucide-react';
|
||||
import { motion, AnimatePresence } from 'framer-motion';
|
||||
|
||||
/**
|
||||
@@ -15,6 +15,8 @@ interface ModelOption {
|
||||
id: string;
|
||||
name: string;
|
||||
provider?: string;
|
||||
/** If false, this model has no API key configured or previously failed */
|
||||
available?: boolean;
|
||||
}
|
||||
|
||||
interface ModelSelectorProps {
|
||||
@@ -99,7 +101,9 @@ export function ModelSelector({
|
||||
{/* Model list */}
|
||||
<div className="max-h-48 overflow-y-auto py-1" role="listbox">
|
||||
{filteredModels.length > 0 ? (
|
||||
filteredModels.map(model => (
|
||||
filteredModels.map(model => {
|
||||
const unavailable = model.available === false;
|
||||
return (
|
||||
<button
|
||||
key={model.id}
|
||||
onClick={() => {
|
||||
@@ -113,21 +117,31 @@ export function ModelSelector({
|
||||
w-full text-left px-3 py-2 text-xs flex items-center justify-between gap-2 transition-colors
|
||||
${model.id === currentModel
|
||||
? 'text-orange-600 dark:text-orange-400 bg-orange-50 dark:bg-orange-900/20'
|
||||
: 'text-gray-700 dark:text-gray-300 hover:bg-gray-50 dark:hover:bg-gray-700'
|
||||
: unavailable
|
||||
? 'text-gray-400 dark:text-gray-500 hover:bg-gray-50 dark:hover:bg-gray-700'
|
||||
: 'text-gray-700 dark:text-gray-300 hover:bg-gray-50 dark:hover:bg-gray-700'
|
||||
}
|
||||
`}
|
||||
>
|
||||
<div className="flex flex-col min-w-0">
|
||||
<span className="truncate font-medium">{model.name}</span>
|
||||
{model.provider && (
|
||||
<span className="text-[10px] text-gray-400 dark:text-gray-500">{model.provider}</span>
|
||||
)}
|
||||
<div className="flex items-center gap-1">
|
||||
{model.provider && (
|
||||
<span className="text-[10px] text-gray-400 dark:text-gray-500">{model.provider}</span>
|
||||
)}
|
||||
{unavailable && (
|
||||
<span className="text-[10px] text-amber-500 flex items-center gap-0.5">
|
||||
<AlertTriangle className="w-2.5 h-2.5" />未配置
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{model.id === currentModel && (
|
||||
<Check className="w-3.5 h-3.5 flex-shrink-0" />
|
||||
)}
|
||||
</button>
|
||||
))
|
||||
);
|
||||
})
|
||||
) : (
|
||||
<div className="px-3 py-2 text-xs text-gray-400">无匹配模型</div>
|
||||
)}
|
||||
|
||||
@@ -140,7 +140,7 @@ export function PanelToggleButton({
|
||||
<button
|
||||
onClick={onToggle}
|
||||
className="p-1.5 rounded-md text-gray-500 dark:text-gray-400 hover:text-gray-700 dark:hover:text-gray-200 hover:bg-gray-100 dark:hover:bg-gray-800 transition-colors"
|
||||
title={panelOpen ? '关闭侧面板' : '打开侧面板'}
|
||||
title={panelOpen ? '关闭侧面板' : '查看产物文件'}
|
||||
>
|
||||
{panelOpen
|
||||
? <PanelRightClose className="w-4 h-4" />
|
||||
|
||||
@@ -1190,10 +1190,10 @@ export const intelligenceClient = {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('heartbeat.updateMemoryStats', () =>
|
||||
invoke('heartbeat_update_memory_stats', {
|
||||
agent_id: agentId,
|
||||
task_count: taskCount,
|
||||
total_entries: totalEntries,
|
||||
storage_size_bytes: storageSizeBytes,
|
||||
agentId: agentId,
|
||||
taskCount: taskCount,
|
||||
totalEntries: totalEntries,
|
||||
storageSizeBytes: storageSizeBytes,
|
||||
})
|
||||
);
|
||||
} else {
|
||||
@@ -1212,8 +1212,8 @@ export const intelligenceClient = {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('heartbeat.recordCorrection', () =>
|
||||
invoke('heartbeat_record_correction', {
|
||||
agent_id: agentId,
|
||||
correction_type: correctionType,
|
||||
agentId: agentId,
|
||||
correctionType: correctionType,
|
||||
})
|
||||
);
|
||||
} else {
|
||||
@@ -1230,7 +1230,7 @@ export const intelligenceClient = {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('heartbeat.recordInteraction', () =>
|
||||
invoke('heartbeat_record_interaction', {
|
||||
agent_id: agentId,
|
||||
agentId: agentId,
|
||||
})
|
||||
);
|
||||
} else {
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
/**
|
||||
* Intelligence Layer - LocalStorage Compactor Fallback
|
||||
*
|
||||
* Provides rule-based compaction for browser/dev environment.
|
||||
*/
|
||||
|
||||
import type { CompactableMessage, CompactionResult, CompactionCheck, CompactionConfig } from '../intelligence-backend';
|
||||
|
||||
export const fallbackCompactor = {
|
||||
async estimateTokens(text: string): Promise<number> {
|
||||
// Simple heuristic: ~4 chars per token for English, ~1.5 for CJK
|
||||
const cjkChars = (text.match(/[\u4e00-\u9fff\u3040-\u30ff]/g) ?? []).length;
|
||||
const otherChars = text.length - cjkChars;
|
||||
return Math.ceil(cjkChars * 1.5 + otherChars / 4);
|
||||
},
|
||||
|
||||
async estimateMessagesTokens(messages: CompactableMessage[]): Promise<number> {
|
||||
let total = 0;
|
||||
for (const m of messages) {
|
||||
total += await fallbackCompactor.estimateTokens(m.content);
|
||||
}
|
||||
return total;
|
||||
},
|
||||
|
||||
async checkThreshold(
|
||||
messages: CompactableMessage[],
|
||||
config?: CompactionConfig
|
||||
): Promise<CompactionCheck> {
|
||||
const threshold = config?.soft_threshold_tokens ?? 15000;
|
||||
const currentTokens = await fallbackCompactor.estimateMessagesTokens(messages);
|
||||
|
||||
return {
|
||||
should_compact: currentTokens >= threshold,
|
||||
current_tokens: currentTokens,
|
||||
threshold,
|
||||
urgency: currentTokens >= (config?.hard_threshold_tokens ?? 20000) ? 'hard' :
|
||||
currentTokens >= threshold ? 'soft' : 'none',
|
||||
};
|
||||
},
|
||||
|
||||
async compact(
|
||||
messages: CompactableMessage[],
|
||||
_agentId: string,
|
||||
_conversationId?: string,
|
||||
config?: CompactionConfig
|
||||
): Promise<CompactionResult> {
|
||||
// Simple rule-based compaction: keep last N messages
|
||||
const keepRecent = config?.keep_recent_messages ?? 10;
|
||||
const retained = messages.slice(-keepRecent);
|
||||
|
||||
return {
|
||||
compacted_messages: retained,
|
||||
summary: `[Compacted ${messages.length - retained.length} earlier messages]`,
|
||||
original_count: messages.length,
|
||||
retained_count: retained.length,
|
||||
flushed_memories: 0,
|
||||
tokens_before_compaction: await fallbackCompactor.estimateMessagesTokens(messages),
|
||||
tokens_after_compaction: await fallbackCompactor.estimateMessagesTokens(retained),
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -1,54 +0,0 @@
|
||||
/**
|
||||
* Intelligence Layer - LocalStorage Heartbeat Fallback
|
||||
*
|
||||
* Provides no-op heartbeat for browser/dev environment.
|
||||
*/
|
||||
|
||||
import type { HeartbeatConfig, HeartbeatResult } from '../intelligence-backend';
|
||||
|
||||
export const fallbackHeartbeat = {
|
||||
_configs: new Map<string, HeartbeatConfig>(),
|
||||
|
||||
async init(agentId: string, config?: HeartbeatConfig): Promise<void> {
|
||||
if (config) {
|
||||
fallbackHeartbeat._configs.set(agentId, config);
|
||||
}
|
||||
},
|
||||
|
||||
async start(_agentId: string): Promise<void> {
|
||||
// No-op for fallback (no background tasks in browser)
|
||||
},
|
||||
|
||||
async stop(_agentId: string): Promise<void> {
|
||||
// No-op
|
||||
},
|
||||
|
||||
async tick(_agentId: string): Promise<HeartbeatResult> {
|
||||
return {
|
||||
status: 'ok',
|
||||
alerts: [],
|
||||
checked_items: 0,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
},
|
||||
|
||||
async getConfig(agentId: string): Promise<HeartbeatConfig> {
|
||||
return fallbackHeartbeat._configs.get(agentId) ?? {
|
||||
enabled: false,
|
||||
interval_minutes: 30,
|
||||
quiet_hours_start: null,
|
||||
quiet_hours_end: null,
|
||||
notify_channel: 'ui',
|
||||
proactivity_level: 'standard',
|
||||
max_alerts_per_tick: 5,
|
||||
};
|
||||
},
|
||||
|
||||
async updateConfig(agentId: string, config: HeartbeatConfig): Promise<void> {
|
||||
fallbackHeartbeat._configs.set(agentId, config);
|
||||
},
|
||||
|
||||
async getHistory(_agentId: string, _limit?: number): Promise<HeartbeatResult[]> {
|
||||
return [];
|
||||
},
|
||||
};
|
||||
@@ -1,239 +0,0 @@
|
||||
/**
|
||||
* Intelligence Layer - LocalStorage Identity Fallback
|
||||
*
|
||||
* Provides localStorage-based identity management for browser/dev environment.
|
||||
*/
|
||||
|
||||
import { createLogger } from '../logger';
|
||||
|
||||
import type { IdentityFiles, IdentityChangeProposal, IdentitySnapshot } from '../intelligence-backend';
|
||||
|
||||
const logger = createLogger('intelligence-client');
|
||||
|
||||
const IDENTITY_STORAGE_KEY = 'zclaw-fallback-identities';
|
||||
const PROPOSALS_STORAGE_KEY = 'zclaw-fallback-proposals';
|
||||
const SNAPSHOTS_STORAGE_KEY = 'zclaw-fallback-snapshots';
|
||||
|
||||
function loadIdentitiesFromStorage(): Map<string, IdentityFiles> {
|
||||
try {
|
||||
const stored = localStorage.getItem(IDENTITY_STORAGE_KEY);
|
||||
if (stored) {
|
||||
const parsed = JSON.parse(stored) as Record<string, IdentityFiles>;
|
||||
return new Map(Object.entries(parsed));
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to load identities from localStorage', { error: e });
|
||||
}
|
||||
return new Map();
|
||||
}
|
||||
|
||||
function saveIdentitiesToStorage(identities: Map<string, IdentityFiles>): void {
|
||||
try {
|
||||
const obj = Object.fromEntries(identities);
|
||||
localStorage.setItem(IDENTITY_STORAGE_KEY, JSON.stringify(obj));
|
||||
} catch (e) {
|
||||
logger.warn('Failed to save identities to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
function loadProposalsFromStorage(): IdentityChangeProposal[] {
|
||||
try {
|
||||
const stored = localStorage.getItem(PROPOSALS_STORAGE_KEY);
|
||||
if (stored) {
|
||||
return JSON.parse(stored) as IdentityChangeProposal[];
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to load proposals from localStorage', { error: e });
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
function saveProposalsToStorage(proposals: IdentityChangeProposal[]): void {
|
||||
try {
|
||||
localStorage.setItem(PROPOSALS_STORAGE_KEY, JSON.stringify(proposals));
|
||||
} catch (e) {
|
||||
logger.warn('Failed to save proposals to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
function loadSnapshotsFromStorage(): IdentitySnapshot[] {
|
||||
try {
|
||||
const stored = localStorage.getItem(SNAPSHOTS_STORAGE_KEY);
|
||||
if (stored) {
|
||||
return JSON.parse(stored) as IdentitySnapshot[];
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to load snapshots from localStorage', { error: e });
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
function saveSnapshotsToStorage(snapshots: IdentitySnapshot[]): void {
|
||||
try {
|
||||
localStorage.setItem(SNAPSHOTS_STORAGE_KEY, JSON.stringify(snapshots));
|
||||
} catch (e) {
|
||||
logger.warn('Failed to save snapshots to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
// Module-level state initialized from localStorage
|
||||
const fallbackIdentities = loadIdentitiesFromStorage();
|
||||
const fallbackProposals = loadProposalsFromStorage();
|
||||
let fallbackSnapshots = loadSnapshotsFromStorage();
|
||||
|
||||
export const fallbackIdentity = {
|
||||
async get(agentId: string): Promise<IdentityFiles> {
|
||||
if (!fallbackIdentities.has(agentId)) {
|
||||
const defaults: IdentityFiles = {
|
||||
soul: '# Agent Soul\n\nA helpful AI assistant.',
|
||||
instructions: '# Instructions\n\nBe helpful and concise.',
|
||||
user_profile: '# User Profile\n\nNo profile yet.',
|
||||
};
|
||||
fallbackIdentities.set(agentId, defaults);
|
||||
saveIdentitiesToStorage(fallbackIdentities);
|
||||
}
|
||||
return fallbackIdentities.get(agentId)!;
|
||||
},
|
||||
|
||||
async getFile(agentId: string, file: string): Promise<string> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
return files[file as keyof IdentityFiles] ?? '';
|
||||
},
|
||||
|
||||
async buildPrompt(agentId: string, memoryContext?: string): Promise<string> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
let prompt = `${files.soul}\n\n## Instructions\n${files.instructions}\n\n## User Profile\n${files.user_profile}`;
|
||||
if (memoryContext) {
|
||||
prompt += `\n\n## Memory Context\n${memoryContext}`;
|
||||
}
|
||||
return prompt;
|
||||
},
|
||||
|
||||
async updateUserProfile(agentId: string, content: string): Promise<void> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
files.user_profile = content;
|
||||
fallbackIdentities.set(agentId, files);
|
||||
saveIdentitiesToStorage(fallbackIdentities);
|
||||
},
|
||||
|
||||
async appendUserProfile(agentId: string, addition: string): Promise<void> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
files.user_profile += `\n\n${addition}`;
|
||||
fallbackIdentities.set(agentId, files);
|
||||
saveIdentitiesToStorage(fallbackIdentities);
|
||||
},
|
||||
|
||||
async proposeChange(
|
||||
agentId: string,
|
||||
file: 'soul' | 'instructions',
|
||||
suggestedContent: string,
|
||||
reason: string
|
||||
): Promise<IdentityChangeProposal> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
const proposal: IdentityChangeProposal = {
|
||||
id: `prop_${Date.now()}`,
|
||||
agent_id: agentId,
|
||||
file,
|
||||
reason,
|
||||
current_content: files[file] ?? '',
|
||||
suggested_content: suggestedContent,
|
||||
status: 'pending',
|
||||
created_at: new Date().toISOString(),
|
||||
};
|
||||
fallbackProposals.push(proposal);
|
||||
saveProposalsToStorage(fallbackProposals);
|
||||
return proposal;
|
||||
},
|
||||
|
||||
async approveProposal(proposalId: string): Promise<IdentityFiles> {
|
||||
const proposal = fallbackProposals.find(p => p.id === proposalId);
|
||||
if (!proposal) throw new Error('Proposal not found');
|
||||
|
||||
const files = await fallbackIdentity.get(proposal.agent_id);
|
||||
|
||||
// Create snapshot before applying change
|
||||
const snapshot: IdentitySnapshot = {
|
||||
id: `snap_${Date.now()}`,
|
||||
agent_id: proposal.agent_id,
|
||||
files: { ...files },
|
||||
timestamp: new Date().toISOString(),
|
||||
reason: `Before applying: ${proposal.reason}`,
|
||||
};
|
||||
fallbackSnapshots.unshift(snapshot);
|
||||
// Keep only last 20 snapshots per agent
|
||||
const agentSnapshots = fallbackSnapshots.filter(s => s.agent_id === proposal.agent_id);
|
||||
if (agentSnapshots.length > 20) {
|
||||
const toRemove = agentSnapshots.slice(20);
|
||||
fallbackSnapshots = fallbackSnapshots.filter(s => !toRemove.includes(s));
|
||||
}
|
||||
saveSnapshotsToStorage(fallbackSnapshots);
|
||||
|
||||
proposal.status = 'approved';
|
||||
files[proposal.file] = proposal.suggested_content;
|
||||
fallbackIdentities.set(proposal.agent_id, files);
|
||||
saveIdentitiesToStorage(fallbackIdentities);
|
||||
saveProposalsToStorage(fallbackProposals);
|
||||
return files;
|
||||
},
|
||||
|
||||
async rejectProposal(proposalId: string): Promise<void> {
|
||||
const proposal = fallbackProposals.find(p => p.id === proposalId);
|
||||
if (proposal) {
|
||||
proposal.status = 'rejected';
|
||||
saveProposalsToStorage(fallbackProposals);
|
||||
}
|
||||
},
|
||||
|
||||
async getPendingProposals(agentId?: string): Promise<IdentityChangeProposal[]> {
|
||||
return fallbackProposals.filter(p =>
|
||||
p.status === 'pending' && (!agentId || p.agent_id === agentId)
|
||||
);
|
||||
},
|
||||
|
||||
async updateFile(agentId: string, file: string, content: string): Promise<void> {
|
||||
const files = await fallbackIdentity.get(agentId);
|
||||
if (file in files) {
|
||||
// IdentityFiles has known properties, update safely
|
||||
const key = file as keyof IdentityFiles;
|
||||
if (key in files) {
|
||||
files[key] = content;
|
||||
fallbackIdentities.set(agentId, files);
|
||||
saveIdentitiesToStorage(fallbackIdentities);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
async getSnapshots(agentId: string, limit?: number): Promise<IdentitySnapshot[]> {
|
||||
const agentSnapshots = fallbackSnapshots.filter(s => s.agent_id === agentId);
|
||||
return agentSnapshots.slice(0, limit ?? 10);
|
||||
},
|
||||
|
||||
async restoreSnapshot(agentId: string, snapshotId: string): Promise<void> {
|
||||
const snapshot = fallbackSnapshots.find(s => s.id === snapshotId && s.agent_id === agentId);
|
||||
if (!snapshot) throw new Error('Snapshot not found');
|
||||
|
||||
// Create a snapshot of current state before restore
|
||||
const currentFiles = await fallbackIdentity.get(agentId);
|
||||
const beforeRestoreSnapshot: IdentitySnapshot = {
|
||||
id: `snap_${Date.now()}`,
|
||||
agent_id: agentId,
|
||||
files: { ...currentFiles },
|
||||
timestamp: new Date().toISOString(),
|
||||
reason: 'Auto-backup before restore',
|
||||
};
|
||||
fallbackSnapshots.unshift(beforeRestoreSnapshot);
|
||||
saveSnapshotsToStorage(fallbackSnapshots);
|
||||
|
||||
// Restore the snapshot
|
||||
fallbackIdentities.set(agentId, { ...snapshot.files });
|
||||
saveIdentitiesToStorage(fallbackIdentities);
|
||||
},
|
||||
|
||||
async listAgents(): Promise<string[]> {
|
||||
return Array.from(fallbackIdentities.keys());
|
||||
},
|
||||
|
||||
async deleteAgent(agentId: string): Promise<void> {
|
||||
fallbackIdentities.delete(agentId);
|
||||
},
|
||||
};
|
||||
@@ -1,186 +0,0 @@
|
||||
/**
|
||||
* Intelligence Layer - LocalStorage Memory Fallback
|
||||
*
|
||||
* Provides localStorage-based memory operations for browser/dev environment.
|
||||
*/
|
||||
|
||||
import { createLogger } from '../logger';
|
||||
import { generateRandomString } from '../crypto-utils';
|
||||
|
||||
import type { MemoryEntry, MemorySearchOptions, MemoryStats, MemoryType, MemorySource } from './types';
|
||||
|
||||
const logger = createLogger('intelligence-client');
|
||||
|
||||
import type { MemoryEntryInput } from '../intelligence-backend';
|
||||
|
||||
const FALLBACK_STORAGE_KEY = 'zclaw-intelligence-fallback';
|
||||
|
||||
interface FallbackMemoryStore {
|
||||
memories: MemoryEntry[];
|
||||
}
|
||||
|
||||
function getFallbackStore(): FallbackMemoryStore {
|
||||
try {
|
||||
const stored = localStorage.getItem(FALLBACK_STORAGE_KEY);
|
||||
if (stored) {
|
||||
return JSON.parse(stored);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug('Failed to read fallback store from localStorage', { error: e });
|
||||
}
|
||||
return { memories: [] };
|
||||
}
|
||||
|
||||
function saveFallbackStore(store: FallbackMemoryStore): void {
|
||||
try {
|
||||
localStorage.setItem(FALLBACK_STORAGE_KEY, JSON.stringify(store));
|
||||
} catch (e) {
|
||||
logger.warn('Failed to save fallback store to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
export const fallbackMemory = {
|
||||
async init(): Promise<void> {
|
||||
// No-op for localStorage
|
||||
},
|
||||
|
||||
async store(entry: MemoryEntryInput): Promise<string> {
|
||||
const store = getFallbackStore();
|
||||
|
||||
// Content-based deduplication: update existing entry with same agentId + content
|
||||
const normalizedContent = entry.content.trim().toLowerCase();
|
||||
const existingIdx = store.memories.findIndex(
|
||||
m => m.agentId === entry.agent_id && m.content.trim().toLowerCase() === normalizedContent
|
||||
);
|
||||
|
||||
if (existingIdx >= 0) {
|
||||
// Update existing entry instead of creating duplicate
|
||||
const existing = store.memories[existingIdx];
|
||||
store.memories[existingIdx] = {
|
||||
...existing,
|
||||
importance: Math.max(existing.importance, entry.importance ?? 5),
|
||||
lastAccessedAt: new Date().toISOString(),
|
||||
accessCount: existing.accessCount + 1,
|
||||
tags: [...new Set([...existing.tags, ...(entry.tags ?? [])])],
|
||||
};
|
||||
saveFallbackStore(store);
|
||||
return existing.id;
|
||||
}
|
||||
|
||||
const id = `mem_${Date.now()}_${generateRandomString(6)}`;
|
||||
const now = new Date().toISOString();
|
||||
|
||||
const memory: MemoryEntry = {
|
||||
id,
|
||||
agentId: entry.agent_id,
|
||||
content: entry.content,
|
||||
type: entry.memory_type as MemoryType,
|
||||
importance: entry.importance ?? 5,
|
||||
source: (entry.source as MemorySource) ?? 'auto',
|
||||
tags: entry.tags ?? [],
|
||||
createdAt: now,
|
||||
lastAccessedAt: now,
|
||||
accessCount: 0,
|
||||
conversationId: entry.conversation_id,
|
||||
};
|
||||
|
||||
store.memories.push(memory);
|
||||
saveFallbackStore(store);
|
||||
return id;
|
||||
},
|
||||
|
||||
async get(id: string): Promise<MemoryEntry | null> {
|
||||
const store = getFallbackStore();
|
||||
return store.memories.find(m => m.id === id) ?? null;
|
||||
},
|
||||
|
||||
async search(options: MemorySearchOptions): Promise<MemoryEntry[]> {
|
||||
const store = getFallbackStore();
|
||||
let results = store.memories;
|
||||
|
||||
if (options.agentId) {
|
||||
results = results.filter(m => m.agentId === options.agentId);
|
||||
}
|
||||
if (options.type) {
|
||||
results = results.filter(m => m.type === options.type);
|
||||
}
|
||||
if (options.minImportance !== undefined) {
|
||||
results = results.filter(m => m.importance >= options.minImportance!);
|
||||
}
|
||||
if (options.query) {
|
||||
const queryLower = options.query.toLowerCase();
|
||||
results = results.filter(m =>
|
||||
m.content.toLowerCase().includes(queryLower) ||
|
||||
m.tags.some(t => t.toLowerCase().includes(queryLower))
|
||||
);
|
||||
}
|
||||
if (options.limit) {
|
||||
results = results.slice(0, options.limit);
|
||||
}
|
||||
|
||||
return results;
|
||||
},
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
const store = getFallbackStore();
|
||||
store.memories = store.memories.filter(m => m.id !== id);
|
||||
saveFallbackStore(store);
|
||||
},
|
||||
|
||||
async deleteAll(agentId: string): Promise<number> {
|
||||
const store = getFallbackStore();
|
||||
const before = store.memories.length;
|
||||
store.memories = store.memories.filter(m => m.agentId !== agentId);
|
||||
saveFallbackStore(store);
|
||||
return before - store.memories.length;
|
||||
},
|
||||
|
||||
async stats(): Promise<MemoryStats> {
|
||||
const store = getFallbackStore();
|
||||
const byType: Record<string, number> = {};
|
||||
const byAgent: Record<string, number> = {};
|
||||
|
||||
for (const m of store.memories) {
|
||||
byType[m.type] = (byType[m.type] ?? 0) + 1;
|
||||
byAgent[m.agentId] = (byAgent[m.agentId] ?? 0) + 1;
|
||||
}
|
||||
|
||||
const sorted = [...store.memories].sort((a, b) =>
|
||||
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
|
||||
);
|
||||
|
||||
// Estimate storage size from serialized data
|
||||
let storageSizeBytes = 0;
|
||||
try {
|
||||
const serialized = JSON.stringify(store.memories);
|
||||
storageSizeBytes = new Blob([serialized]).size;
|
||||
} catch (e) {
|
||||
logger.debug('Failed to estimate storage size', { error: e });
|
||||
}
|
||||
|
||||
return {
|
||||
totalEntries: store.memories.length,
|
||||
byType,
|
||||
byAgent,
|
||||
oldestEntry: sorted[0]?.createdAt ?? null,
|
||||
newestEntry: sorted[sorted.length - 1]?.createdAt ?? null,
|
||||
storageSizeBytes,
|
||||
};
|
||||
},
|
||||
|
||||
async export(): Promise<MemoryEntry[]> {
|
||||
const store = getFallbackStore();
|
||||
return store.memories;
|
||||
},
|
||||
|
||||
async import(memories: MemoryEntry[]): Promise<number> {
|
||||
const store = getFallbackStore();
|
||||
store.memories.push(...memories);
|
||||
saveFallbackStore(store);
|
||||
return memories.length;
|
||||
},
|
||||
|
||||
async dbPath(): Promise<string> {
|
||||
return 'localStorage://zclaw-intelligence-fallback';
|
||||
},
|
||||
};
|
||||
@@ -1,167 +0,0 @@
|
||||
/**
|
||||
* Intelligence Layer - LocalStorage Reflection Fallback
|
||||
*
|
||||
* Provides rule-based reflection for browser/dev environment.
|
||||
*/
|
||||
|
||||
import type {
|
||||
ReflectionResult,
|
||||
ReflectionState,
|
||||
ReflectionConfig,
|
||||
PatternObservation,
|
||||
ImprovementSuggestion,
|
||||
ReflectionIdentityProposal,
|
||||
MemoryEntryForAnalysis,
|
||||
} from '../intelligence-backend';
|
||||
|
||||
export const fallbackReflection = {
|
||||
_conversationCount: 0,
|
||||
_lastReflection: null as string | null,
|
||||
_history: [] as ReflectionResult[],
|
||||
|
||||
async init(_config?: ReflectionConfig): Promise<void> {
|
||||
// No-op
|
||||
},
|
||||
|
||||
async recordConversation(): Promise<void> {
|
||||
fallbackReflection._conversationCount++;
|
||||
},
|
||||
|
||||
async shouldReflect(): Promise<boolean> {
|
||||
return fallbackReflection._conversationCount >= 5;
|
||||
},
|
||||
|
||||
async reflect(agentId: string, memories: MemoryEntryForAnalysis[]): Promise<ReflectionResult> {
|
||||
fallbackReflection._conversationCount = 0;
|
||||
fallbackReflection._lastReflection = new Date().toISOString();
|
||||
|
||||
// Analyze patterns (simple rule-based implementation)
|
||||
const patterns: PatternObservation[] = [];
|
||||
const improvements: ImprovementSuggestion[] = [];
|
||||
const identityProposals: ReflectionIdentityProposal[] = [];
|
||||
|
||||
// Count memory types
|
||||
const typeCounts: Record<string, number> = {};
|
||||
for (const m of memories) {
|
||||
typeCounts[m.memory_type] = (typeCounts[m.memory_type] || 0) + 1;
|
||||
}
|
||||
|
||||
// Pattern: Too many tasks
|
||||
const taskCount = typeCounts['task'] || 0;
|
||||
if (taskCount >= 5) {
|
||||
const taskMemories = memories.filter(m => m.memory_type === 'task').slice(0, 3);
|
||||
patterns.push({
|
||||
observation: `积累了 ${taskCount} 个待办任务,可能存在任务管理不善`,
|
||||
frequency: taskCount,
|
||||
sentiment: 'negative',
|
||||
evidence: taskMemories.map(m => m.content),
|
||||
});
|
||||
improvements.push({
|
||||
area: '任务管理',
|
||||
suggestion: '清理已完成的任务记忆,对长期未处理的任务降低重要性',
|
||||
priority: 'high',
|
||||
});
|
||||
}
|
||||
|
||||
// Pattern: Strong preference accumulation
|
||||
const prefCount = typeCounts['preference'] || 0;
|
||||
if (prefCount >= 5) {
|
||||
const prefMemories = memories.filter(m => m.memory_type === 'preference').slice(0, 3);
|
||||
patterns.push({
|
||||
observation: `已记录 ${prefCount} 个用户偏好,对用户习惯有较好理解`,
|
||||
frequency: prefCount,
|
||||
sentiment: 'positive',
|
||||
evidence: prefMemories.map(m => m.content),
|
||||
});
|
||||
}
|
||||
|
||||
// Pattern: Lessons learned
|
||||
const lessonCount = typeCounts['lesson'] || 0;
|
||||
if (lessonCount >= 5) {
|
||||
patterns.push({
|
||||
observation: `积累了 ${lessonCount} 条经验教训,知识库在成长`,
|
||||
frequency: lessonCount,
|
||||
sentiment: 'positive',
|
||||
evidence: memories.filter(m => m.memory_type === 'lesson').slice(0, 3).map(m => m.content),
|
||||
});
|
||||
}
|
||||
|
||||
// Pattern: High-access important memories
|
||||
const highAccessMemories = memories.filter(m => m.access_count >= 5 && m.importance >= 7);
|
||||
if (highAccessMemories.length >= 3) {
|
||||
patterns.push({
|
||||
observation: `有 ${highAccessMemories.length} 条高频访问的重要记忆,核心知识正在形成`,
|
||||
frequency: highAccessMemories.length,
|
||||
sentiment: 'positive',
|
||||
evidence: highAccessMemories.slice(0, 3).map(m => m.content),
|
||||
});
|
||||
}
|
||||
|
||||
// Pattern: Low importance memories accumulating
|
||||
const lowImportanceCount = memories.filter(m => m.importance <= 3).length;
|
||||
if (lowImportanceCount > 20) {
|
||||
patterns.push({
|
||||
observation: `有 ${lowImportanceCount} 条低重要性记忆,建议清理`,
|
||||
frequency: lowImportanceCount,
|
||||
sentiment: 'neutral',
|
||||
evidence: [],
|
||||
});
|
||||
improvements.push({
|
||||
area: '记忆管理',
|
||||
suggestion: '执行记忆清理,移除30天以上未访问且重要性低于3的记忆',
|
||||
priority: 'medium',
|
||||
});
|
||||
}
|
||||
|
||||
// Generate identity proposal if negative patterns exist
|
||||
const negativePatterns = patterns.filter(p => p.sentiment === 'negative');
|
||||
if (negativePatterns.length >= 2) {
|
||||
const additions = negativePatterns.map(p => `- 注意: ${p.observation}`).join('\n');
|
||||
identityProposals.push({
|
||||
agent_id: agentId,
|
||||
field: 'instructions',
|
||||
current_value: '...',
|
||||
proposed_value: `\n\n## 自我反思改进\n${additions}`,
|
||||
reason: `基于 ${negativePatterns.length} 个负面模式观察,建议在指令中增加自我改进提醒`,
|
||||
});
|
||||
}
|
||||
|
||||
// Suggestion: User profile enrichment
|
||||
if (prefCount < 3) {
|
||||
improvements.push({
|
||||
area: '用户理解',
|
||||
suggestion: '主动在对话中了解用户偏好(沟通风格、技术栈、工作习惯),丰富用户画像',
|
||||
priority: 'medium',
|
||||
});
|
||||
}
|
||||
|
||||
const result: ReflectionResult = {
|
||||
patterns,
|
||||
improvements,
|
||||
identity_proposals: identityProposals,
|
||||
new_memories: patterns.filter(p => p.frequency >= 3).length + improvements.filter(i => i.priority === 'high').length,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Store in history
|
||||
fallbackReflection._history.push(result);
|
||||
if (fallbackReflection._history.length > 20) {
|
||||
fallbackReflection._history = fallbackReflection._history.slice(-10);
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
async getHistory(limit?: number, _agentId?: string): Promise<ReflectionResult[]> {
|
||||
const l = limit ?? 10;
|
||||
return fallbackReflection._history.slice(-l).reverse();
|
||||
},
|
||||
|
||||
async getState(): Promise<ReflectionState> {
|
||||
return {
|
||||
conversations_since_reflection: fallbackReflection._conversationCount,
|
||||
last_reflection_time: fallbackReflection._lastReflection,
|
||||
last_reflection_agent_id: null,
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -1,72 +0,0 @@
|
||||
/**
|
||||
* Intelligence Layer - Barrel Re-export
|
||||
*
|
||||
* Re-exports everything from sub-modules to maintain backward compatibility.
|
||||
* Existing imports like `import { intelligenceClient } from './intelligence-client'`
|
||||
* continue to work unchanged because TypeScript resolves directory imports
|
||||
* through this index.ts file.
|
||||
*/
|
||||
|
||||
// Types
|
||||
export type {
|
||||
MemoryType,
|
||||
MemorySource,
|
||||
MemoryEntry,
|
||||
MemorySearchOptions,
|
||||
MemoryStats,
|
||||
BehaviorPattern,
|
||||
PatternTypeVariant,
|
||||
PatternContext,
|
||||
WorkflowRecommendation,
|
||||
MeshConfig,
|
||||
MeshAnalysisResult,
|
||||
ActivityType,
|
||||
EvolutionChangeType,
|
||||
InsightCategory,
|
||||
IdentityFileType,
|
||||
ProposalStatus,
|
||||
EvolutionProposal,
|
||||
ProfileUpdate,
|
||||
EvolutionInsight,
|
||||
EvolutionResult,
|
||||
PersonaEvolverConfig,
|
||||
PersonaEvolverState,
|
||||
} from './types';
|
||||
|
||||
export {
|
||||
getPatternTypeString,
|
||||
} from './types';
|
||||
|
||||
// Re-exported types from intelligence-backend
|
||||
export type {
|
||||
HeartbeatConfig,
|
||||
HeartbeatResult,
|
||||
HeartbeatAlert,
|
||||
CompactableMessage,
|
||||
CompactionResult,
|
||||
CompactionCheck,
|
||||
CompactionConfig,
|
||||
PatternObservation,
|
||||
ImprovementSuggestion,
|
||||
ReflectionResult,
|
||||
ReflectionState,
|
||||
ReflectionConfig,
|
||||
ReflectionIdentityProposal,
|
||||
IdentityFiles,
|
||||
IdentityChangeProposal,
|
||||
IdentitySnapshot,
|
||||
MemoryEntryForAnalysis,
|
||||
} from './types';
|
||||
|
||||
// Type conversion utilities
|
||||
export {
|
||||
toFrontendMemory,
|
||||
toBackendMemoryInput,
|
||||
toBackendSearchOptions,
|
||||
toFrontendStats,
|
||||
parseTags,
|
||||
} from './type-conversions';
|
||||
|
||||
// Unified client
|
||||
export { intelligenceClient } from './unified-client';
|
||||
export { intelligenceClient as default } from './unified-client';
|
||||
@@ -1,101 +0,0 @@
|
||||
/**
|
||||
* Intelligence Layer - Type Conversion Utilities
|
||||
*
|
||||
* Functions for converting between frontend and backend data formats.
|
||||
*/
|
||||
|
||||
import { intelligence } from '../intelligence-backend';
|
||||
import type {
|
||||
MemoryEntryInput,
|
||||
PersistentMemory,
|
||||
MemorySearchOptions as BackendSearchOptions,
|
||||
MemoryStats as BackendMemoryStats,
|
||||
} from '../intelligence-backend';
|
||||
|
||||
import { createLogger } from '../logger';
|
||||
|
||||
import type { MemoryEntry, MemorySearchOptions, MemoryStats, MemoryType, MemorySource } from './types';
|
||||
|
||||
const logger = createLogger('intelligence-client');
|
||||
|
||||
// Re-import intelligence for use in conversions (already imported above but
|
||||
// the `intelligence` binding is needed by unified-client.ts indirectly).
|
||||
|
||||
export { intelligence };
|
||||
export type { MemoryEntryInput, PersistentMemory, BackendSearchOptions, BackendMemoryStats };
|
||||
|
||||
/**
|
||||
* Convert backend PersistentMemory to frontend MemoryEntry format
|
||||
*/
|
||||
export function toFrontendMemory(backend: PersistentMemory): MemoryEntry {
|
||||
return {
|
||||
id: backend.id,
|
||||
agentId: backend.agent_id,
|
||||
content: backend.content,
|
||||
type: backend.memory_type as MemoryType,
|
||||
importance: backend.importance,
|
||||
source: backend.source as MemorySource,
|
||||
tags: parseTags(backend.tags),
|
||||
createdAt: backend.created_at,
|
||||
lastAccessedAt: backend.last_accessed_at,
|
||||
accessCount: backend.access_count,
|
||||
conversationId: backend.conversation_id ?? undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert frontend MemoryEntry to backend MemoryEntryInput format
|
||||
*/
|
||||
export function toBackendMemoryInput(entry: Omit<MemoryEntry, 'id' | 'createdAt' | 'lastAccessedAt' | 'accessCount'>): MemoryEntryInput {
|
||||
return {
|
||||
agent_id: entry.agentId,
|
||||
memory_type: entry.type,
|
||||
content: entry.content,
|
||||
importance: entry.importance,
|
||||
source: entry.source,
|
||||
tags: entry.tags,
|
||||
conversation_id: entry.conversationId,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert frontend search options to backend format
|
||||
*/
|
||||
export function toBackendSearchOptions(options: MemorySearchOptions): BackendSearchOptions {
|
||||
return {
|
||||
agent_id: options.agentId,
|
||||
memory_type: options.type,
|
||||
tags: options.tags,
|
||||
query: options.query,
|
||||
limit: options.limit,
|
||||
min_importance: options.minImportance,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert backend stats to frontend format
|
||||
*/
|
||||
export function toFrontendStats(backend: BackendMemoryStats): MemoryStats {
|
||||
return {
|
||||
totalEntries: backend.total_entries,
|
||||
byType: backend.by_type,
|
||||
byAgent: backend.by_agent,
|
||||
oldestEntry: backend.oldest_entry,
|
||||
newestEntry: backend.newest_entry,
|
||||
storageSizeBytes: backend.storage_size_bytes ?? 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse tags from backend (JSON string or array)
|
||||
*/
|
||||
export function parseTags(tags: string | string[]): string[] {
|
||||
if (Array.isArray(tags)) return tags;
|
||||
if (!tags) return [];
|
||||
try {
|
||||
return JSON.parse(tags);
|
||||
} catch (e) {
|
||||
logger.debug('JSON parse failed for tags, using fallback', { error: e });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -1,199 +0,0 @@
|
||||
/**
|
||||
* Intelligence Layer - Type Definitions
|
||||
*
|
||||
* All frontend types, mesh types, persona evolver types,
|
||||
* and re-exports from intelligence-backend.
|
||||
*/
|
||||
|
||||
// === Re-export types from intelligence-backend ===
|
||||
|
||||
export type {
|
||||
HeartbeatConfig,
|
||||
HeartbeatResult,
|
||||
HeartbeatAlert,
|
||||
CompactableMessage,
|
||||
CompactionResult,
|
||||
CompactionCheck,
|
||||
CompactionConfig,
|
||||
PatternObservation,
|
||||
ImprovementSuggestion,
|
||||
ReflectionResult,
|
||||
ReflectionState,
|
||||
ReflectionConfig,
|
||||
ReflectionIdentityProposal,
|
||||
IdentityFiles,
|
||||
IdentityChangeProposal,
|
||||
IdentitySnapshot,
|
||||
MemoryEntryForAnalysis,
|
||||
} from '../intelligence-backend';
|
||||
|
||||
// === Frontend Types (for backward compatibility) ===
|
||||
|
||||
export type MemoryType = 'fact' | 'preference' | 'lesson' | 'context' | 'task';
|
||||
export type MemorySource = 'auto' | 'user' | 'reflection' | 'llm-reflection';
|
||||
|
||||
export interface MemoryEntry {
|
||||
id: string;
|
||||
agentId: string;
|
||||
content: string;
|
||||
type: MemoryType;
|
||||
importance: number;
|
||||
source: MemorySource;
|
||||
tags: string[];
|
||||
createdAt: string;
|
||||
lastAccessedAt: string;
|
||||
accessCount: number;
|
||||
conversationId?: string;
|
||||
}
|
||||
|
||||
export interface MemorySearchOptions {
|
||||
agentId?: string;
|
||||
type?: MemoryType;
|
||||
types?: MemoryType[];
|
||||
tags?: string[];
|
||||
query?: string;
|
||||
limit?: number;
|
||||
minImportance?: number;
|
||||
}
|
||||
|
||||
export interface MemoryStats {
|
||||
totalEntries: number;
|
||||
byType: Record<string, number>;
|
||||
byAgent: Record<string, number>;
|
||||
oldestEntry: string | null;
|
||||
newestEntry: string | null;
|
||||
storageSizeBytes: number;
|
||||
}
|
||||
|
||||
// === Mesh Types ===
|
||||
|
||||
export type PatternTypeVariant =
|
||||
| { type: 'SkillCombination'; skill_ids: string[] }
|
||||
| { type: 'TemporalTrigger'; hand_id: string; time_pattern: string }
|
||||
| { type: 'TaskPipelineMapping'; task_type: string; pipeline_id: string }
|
||||
| { type: 'InputPattern'; keywords: string[]; intent: string };
|
||||
|
||||
export interface BehaviorPattern {
|
||||
id: string;
|
||||
pattern_type: PatternTypeVariant;
|
||||
frequency: number;
|
||||
last_occurrence: string;
|
||||
first_occurrence: string;
|
||||
confidence: number;
|
||||
context: PatternContext;
|
||||
}
|
||||
|
||||
export function getPatternTypeString(patternType: PatternTypeVariant): string {
|
||||
if (typeof patternType === 'string') {
|
||||
return patternType;
|
||||
}
|
||||
return patternType.type;
|
||||
}
|
||||
|
||||
export interface PatternContext {
|
||||
skill_ids?: string[];
|
||||
recent_topics?: string[];
|
||||
intent?: string;
|
||||
time_of_day?: number;
|
||||
day_of_week?: number;
|
||||
}
|
||||
|
||||
export interface WorkflowRecommendation {
|
||||
id: string;
|
||||
pipeline_id: string;
|
||||
confidence: number;
|
||||
reason: string;
|
||||
suggested_inputs: Record<string, unknown>;
|
||||
patterns_matched: string[];
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface MeshConfig {
|
||||
enabled: boolean;
|
||||
min_confidence: number;
|
||||
max_recommendations: number;
|
||||
analysis_window_hours: number;
|
||||
}
|
||||
|
||||
export interface MeshAnalysisResult {
|
||||
recommendations: WorkflowRecommendation[];
|
||||
patterns_detected: number;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export type ActivityType =
|
||||
| { type: 'skill_used'; skill_ids: string[] }
|
||||
| { type: 'pipeline_executed'; task_type: string; pipeline_id: string }
|
||||
| { type: 'input_received'; keywords: string[]; intent: string };
|
||||
|
||||
// === Persona Evolver Types ===
|
||||
|
||||
export type EvolutionChangeType =
|
||||
| 'instruction_addition'
|
||||
| 'instruction_refinement'
|
||||
| 'trait_addition'
|
||||
| 'style_adjustment'
|
||||
| 'domain_expansion';
|
||||
|
||||
export type InsightCategory =
|
||||
| 'communication_style'
|
||||
| 'technical_expertise'
|
||||
| 'task_efficiency'
|
||||
| 'user_preference'
|
||||
| 'knowledge_gap';
|
||||
|
||||
export type IdentityFileType = 'soul' | 'instructions';
|
||||
export type ProposalStatus = 'pending' | 'approved' | 'rejected';
|
||||
|
||||
export interface EvolutionProposal {
|
||||
id: string;
|
||||
agent_id: string;
|
||||
target_file: IdentityFileType;
|
||||
change_type: EvolutionChangeType;
|
||||
reason: string;
|
||||
current_content: string;
|
||||
proposed_content: string;
|
||||
confidence: number;
|
||||
evidence: string[];
|
||||
status: ProposalStatus;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export interface ProfileUpdate {
|
||||
section: string;
|
||||
previous: string;
|
||||
updated: string;
|
||||
source: string;
|
||||
}
|
||||
|
||||
export interface EvolutionInsight {
|
||||
category: InsightCategory;
|
||||
observation: string;
|
||||
recommendation: string;
|
||||
confidence: number;
|
||||
}
|
||||
|
||||
export interface EvolutionResult {
|
||||
agent_id: string;
|
||||
timestamp: string;
|
||||
profile_updates: ProfileUpdate[];
|
||||
proposals: EvolutionProposal[];
|
||||
insights: EvolutionInsight[];
|
||||
evolved: boolean;
|
||||
}
|
||||
|
||||
export interface PersonaEvolverConfig {
|
||||
auto_profile_update: boolean;
|
||||
min_preferences_for_update: number;
|
||||
min_conversations_for_evolution: number;
|
||||
enable_instruction_refinement: boolean;
|
||||
enable_soul_evolution: boolean;
|
||||
max_proposals_per_cycle: number;
|
||||
}
|
||||
|
||||
export interface PersonaEvolverState {
|
||||
last_evolution: string | null;
|
||||
total_evolutions: number;
|
||||
pending_proposals: number;
|
||||
profile_enrichment_score: number;
|
||||
}
|
||||
@@ -1,561 +0,0 @@
|
||||
/**
|
||||
* Intelligence Layer Unified Client
|
||||
*
|
||||
* Provides a unified API for intelligence operations that:
|
||||
* - Uses Rust backend (via Tauri commands) when running in Tauri environment
|
||||
* - Falls back to localStorage-based implementation in browser/dev environment
|
||||
*
|
||||
* Degradation strategy:
|
||||
* - In Tauri mode: if a Tauri invoke fails, the error is logged and re-thrown.
|
||||
* The caller is responsible for handling the error. We do NOT silently fall
|
||||
* back to localStorage, because that would give users degraded functionality
|
||||
* (localStorage instead of SQLite, rule-based instead of LLM-based, no-op
|
||||
* instead of real execution) without any indication that something is wrong.
|
||||
* - In browser/dev mode: localStorage fallback is the intended behavior for
|
||||
* development and testing without a Tauri backend.
|
||||
*
|
||||
* This replaces direct usage of:
|
||||
* - agent-memory.ts
|
||||
* - heartbeat-engine.ts
|
||||
* - context-compactor.ts
|
||||
* - reflection-engine.ts
|
||||
* - agent-identity.ts
|
||||
*
|
||||
* Usage:
|
||||
* ```typescript
|
||||
* import { intelligenceClient, toFrontendMemory, toBackendMemoryInput } from './intelligence-client';
|
||||
*
|
||||
* // Store memory
|
||||
* const id = await intelligenceClient.memory.store({
|
||||
* agent_id: 'agent-1',
|
||||
* memory_type: 'fact',
|
||||
* content: 'User prefers concise responses',
|
||||
* importance: 7,
|
||||
* });
|
||||
*
|
||||
* // Search memories
|
||||
* const memories = await intelligenceClient.memory.search({
|
||||
* agent_id: 'agent-1',
|
||||
* query: 'user preference',
|
||||
* limit: 10,
|
||||
* });
|
||||
*
|
||||
* // Convert to frontend format if needed
|
||||
* const frontendMemories = memories.map(toFrontendMemory);
|
||||
* ```
|
||||
*/
|
||||
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
|
||||
import { isTauriRuntime } from '../tauri-gateway';
|
||||
import { intelligence } from './type-conversions';
|
||||
import type { PersistentMemory } from '../intelligence-backend';
|
||||
import type {
|
||||
HeartbeatConfig,
|
||||
HeartbeatResult,
|
||||
CompactableMessage,
|
||||
CompactionResult,
|
||||
CompactionCheck,
|
||||
CompactionConfig,
|
||||
ReflectionConfig,
|
||||
ReflectionResult,
|
||||
ReflectionState,
|
||||
MemoryEntryForAnalysis,
|
||||
IdentityFiles,
|
||||
IdentityChangeProposal,
|
||||
IdentitySnapshot,
|
||||
} from '../intelligence-backend';
|
||||
|
||||
import type { MemoryEntry, MemorySearchOptions, MemoryStats } from './types';
|
||||
import { toFrontendMemory, toBackendSearchOptions, toFrontendStats } from './type-conversions';
|
||||
import { fallbackMemory } from './fallback-memory';
|
||||
import { fallbackCompactor } from './fallback-compactor';
|
||||
import { fallbackReflection } from './fallback-reflection';
|
||||
import { fallbackIdentity } from './fallback-identity';
|
||||
import { fallbackHeartbeat } from './fallback-heartbeat';
|
||||
|
||||
/**
|
||||
* Helper: wrap a Tauri invoke call so that failures are logged and re-thrown
|
||||
* instead of silently falling back to localStorage implementations.
|
||||
*/
|
||||
function tauriInvoke<T>(label: string, fn: () => Promise<T>): Promise<T> {
|
||||
return fn().catch((e: unknown) => {
|
||||
console.warn(`[IntelligenceClient] Tauri invoke failed (${label}):`, e);
|
||||
throw e;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Unified intelligence client that automatically selects backend or fallback.
|
||||
*
|
||||
* - In Tauri mode: calls Rust backend via invoke(). On failure, logs a warning
|
||||
* and re-throws -- does NOT fall back to localStorage.
|
||||
* - In browser/dev mode: uses localStorage-based fallback implementations.
|
||||
*/
|
||||
export const intelligenceClient = {
|
||||
memory: {
|
||||
init: async (): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('memory.init', () => intelligence.memory.init());
|
||||
} else {
|
||||
await fallbackMemory.init();
|
||||
}
|
||||
},
|
||||
|
||||
store: async (entry: import('../intelligence-backend').MemoryEntryInput): Promise<string> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('memory.store', () => intelligence.memory.store(entry));
|
||||
}
|
||||
return fallbackMemory.store(entry);
|
||||
},
|
||||
|
||||
get: async (id: string): Promise<MemoryEntry | null> => {
|
||||
if (isTauriRuntime()) {
|
||||
const result = await tauriInvoke('memory.get', () => intelligence.memory.get(id));
|
||||
return result ? toFrontendMemory(result) : null;
|
||||
}
|
||||
return fallbackMemory.get(id);
|
||||
},
|
||||
|
||||
search: async (options: MemorySearchOptions): Promise<MemoryEntry[]> => {
|
||||
if (isTauriRuntime()) {
|
||||
const results = await tauriInvoke('memory.search', () =>
|
||||
intelligence.memory.search(toBackendSearchOptions(options))
|
||||
);
|
||||
return results.map(toFrontendMemory);
|
||||
}
|
||||
return fallbackMemory.search(options);
|
||||
},
|
||||
|
||||
delete: async (id: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('memory.delete', () => intelligence.memory.delete(id));
|
||||
} else {
|
||||
await fallbackMemory.delete(id);
|
||||
}
|
||||
},
|
||||
|
||||
deleteAll: async (agentId: string): Promise<number> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('memory.deleteAll', () => intelligence.memory.deleteAll(agentId));
|
||||
}
|
||||
return fallbackMemory.deleteAll(agentId);
|
||||
},
|
||||
|
||||
stats: async (): Promise<MemoryStats> => {
|
||||
if (isTauriRuntime()) {
|
||||
const stats = await tauriInvoke('memory.stats', () => intelligence.memory.stats());
|
||||
return toFrontendStats(stats);
|
||||
}
|
||||
return fallbackMemory.stats();
|
||||
},
|
||||
|
||||
export: async (): Promise<MemoryEntry[]> => {
|
||||
if (isTauriRuntime()) {
|
||||
const results = await tauriInvoke('memory.export', () => intelligence.memory.export());
|
||||
return results.map(toFrontendMemory);
|
||||
}
|
||||
return fallbackMemory.export();
|
||||
},
|
||||
|
||||
import: async (memories: MemoryEntry[]): Promise<number> => {
|
||||
if (isTauriRuntime()) {
|
||||
const backendMemories = memories.map(m => ({
|
||||
...m,
|
||||
agent_id: m.agentId,
|
||||
memory_type: m.type,
|
||||
last_accessed_at: m.lastAccessedAt,
|
||||
created_at: m.createdAt,
|
||||
access_count: m.accessCount,
|
||||
conversation_id: m.conversationId ?? null,
|
||||
tags: JSON.stringify(m.tags),
|
||||
embedding: null,
|
||||
}));
|
||||
return tauriInvoke('memory.import', () =>
|
||||
intelligence.memory.import(backendMemories as PersistentMemory[])
|
||||
);
|
||||
}
|
||||
return fallbackMemory.import(memories);
|
||||
},
|
||||
|
||||
dbPath: async (): Promise<string> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('memory.dbPath', () => intelligence.memory.dbPath());
|
||||
}
|
||||
return fallbackMemory.dbPath();
|
||||
},
|
||||
|
||||
buildContext: async (
|
||||
agentId: string,
|
||||
query: string,
|
||||
maxTokens?: number,
|
||||
): Promise<{ systemPromptAddition: string; totalTokens: number; memoriesUsed: number }> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('memory.buildContext', () =>
|
||||
intelligence.memory.buildContext(agentId, query, maxTokens ?? null)
|
||||
);
|
||||
}
|
||||
// Browser/dev fallback: use basic search
|
||||
const memories = await fallbackMemory.search({
|
||||
agentId,
|
||||
query,
|
||||
limit: 8,
|
||||
minImportance: 3,
|
||||
});
|
||||
const addition = memories.length > 0
|
||||
? `## 相关记忆\n${memories.map(m => `- [${m.type}] ${m.content}`).join('\n')}`
|
||||
: '';
|
||||
return { systemPromptAddition: addition, totalTokens: 0, memoriesUsed: memories.length };
|
||||
},
|
||||
},
|
||||
|
||||
heartbeat: {
|
||||
init: async (agentId: string, config?: HeartbeatConfig): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('heartbeat.init', () => intelligence.heartbeat.init(agentId, config));
|
||||
} else {
|
||||
await fallbackHeartbeat.init(agentId, config);
|
||||
}
|
||||
},
|
||||
|
||||
start: async (agentId: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('heartbeat.start', () => intelligence.heartbeat.start(agentId));
|
||||
} else {
|
||||
await fallbackHeartbeat.start(agentId);
|
||||
}
|
||||
},
|
||||
|
||||
stop: async (agentId: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('heartbeat.stop', () => intelligence.heartbeat.stop(agentId));
|
||||
} else {
|
||||
await fallbackHeartbeat.stop(agentId);
|
||||
}
|
||||
},
|
||||
|
||||
tick: async (agentId: string): Promise<HeartbeatResult> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('heartbeat.tick', () => intelligence.heartbeat.tick(agentId));
|
||||
}
|
||||
return fallbackHeartbeat.tick(agentId);
|
||||
},
|
||||
|
||||
getConfig: async (agentId: string): Promise<HeartbeatConfig> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('heartbeat.getConfig', () => intelligence.heartbeat.getConfig(agentId));
|
||||
}
|
||||
return fallbackHeartbeat.getConfig(agentId);
|
||||
},
|
||||
|
||||
updateConfig: async (agentId: string, config: HeartbeatConfig): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('heartbeat.updateConfig', () =>
|
||||
intelligence.heartbeat.updateConfig(agentId, config)
|
||||
);
|
||||
} else {
|
||||
await fallbackHeartbeat.updateConfig(agentId, config);
|
||||
}
|
||||
},
|
||||
|
||||
getHistory: async (agentId: string, limit?: number): Promise<HeartbeatResult[]> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('heartbeat.getHistory', () =>
|
||||
intelligence.heartbeat.getHistory(agentId, limit)
|
||||
);
|
||||
}
|
||||
return fallbackHeartbeat.getHistory(agentId, limit);
|
||||
},
|
||||
|
||||
updateMemoryStats: async (
|
||||
agentId: string,
|
||||
taskCount: number,
|
||||
totalEntries: number,
|
||||
storageSizeBytes: number
|
||||
): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('heartbeat.updateMemoryStats', () =>
|
||||
invoke('heartbeat_update_memory_stats', {
|
||||
agent_id: agentId,
|
||||
task_count: taskCount,
|
||||
total_entries: totalEntries,
|
||||
storage_size_bytes: storageSizeBytes,
|
||||
})
|
||||
);
|
||||
} else {
|
||||
// Browser/dev fallback only
|
||||
const cache = {
|
||||
taskCount,
|
||||
totalEntries,
|
||||
storageSizeBytes,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
};
|
||||
localStorage.setItem(`zclaw-memory-stats-${agentId}`, JSON.stringify(cache));
|
||||
}
|
||||
},
|
||||
|
||||
recordCorrection: async (agentId: string, correctionType: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('heartbeat.recordCorrection', () =>
|
||||
invoke('heartbeat_record_correction', {
|
||||
agent_id: agentId,
|
||||
correction_type: correctionType,
|
||||
})
|
||||
);
|
||||
} else {
|
||||
// Browser/dev fallback only
|
||||
const key = `zclaw-corrections-${agentId}`;
|
||||
const stored = localStorage.getItem(key);
|
||||
const counters = stored ? JSON.parse(stored) : {};
|
||||
counters[correctionType] = (counters[correctionType] || 0) + 1;
|
||||
localStorage.setItem(key, JSON.stringify(counters));
|
||||
}
|
||||
},
|
||||
|
||||
recordInteraction: async (agentId: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('heartbeat.recordInteraction', () =>
|
||||
invoke('heartbeat_record_interaction', {
|
||||
agent_id: agentId,
|
||||
})
|
||||
);
|
||||
} else {
|
||||
// Browser/dev fallback only
|
||||
localStorage.setItem(`zclaw-last-interaction-${agentId}`, new Date().toISOString());
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
compactor: {
|
||||
estimateTokens: async (text: string): Promise<number> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('compactor.estimateTokens', () =>
|
||||
intelligence.compactor.estimateTokens(text)
|
||||
);
|
||||
}
|
||||
return fallbackCompactor.estimateTokens(text);
|
||||
},
|
||||
|
||||
estimateMessagesTokens: async (messages: CompactableMessage[]): Promise<number> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('compactor.estimateMessagesTokens', () =>
|
||||
intelligence.compactor.estimateMessagesTokens(messages)
|
||||
);
|
||||
}
|
||||
return fallbackCompactor.estimateMessagesTokens(messages);
|
||||
},
|
||||
|
||||
checkThreshold: async (
|
||||
messages: CompactableMessage[],
|
||||
config?: CompactionConfig
|
||||
): Promise<CompactionCheck> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('compactor.checkThreshold', () =>
|
||||
intelligence.compactor.checkThreshold(messages, config)
|
||||
);
|
||||
}
|
||||
return fallbackCompactor.checkThreshold(messages, config);
|
||||
},
|
||||
|
||||
compact: async (
|
||||
messages: CompactableMessage[],
|
||||
agentId: string,
|
||||
conversationId?: string,
|
||||
config?: CompactionConfig
|
||||
): Promise<CompactionResult> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('compactor.compact', () =>
|
||||
intelligence.compactor.compact(messages, agentId, conversationId, config)
|
||||
);
|
||||
}
|
||||
return fallbackCompactor.compact(messages, agentId, conversationId, config);
|
||||
},
|
||||
},
|
||||
|
||||
reflection: {
|
||||
init: async (config?: ReflectionConfig): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('reflection.init', () => intelligence.reflection.init(config));
|
||||
} else {
|
||||
await fallbackReflection.init(config);
|
||||
}
|
||||
},
|
||||
|
||||
recordConversation: async (): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('reflection.recordConversation', () =>
|
||||
intelligence.reflection.recordConversation()
|
||||
);
|
||||
} else {
|
||||
await fallbackReflection.recordConversation();
|
||||
}
|
||||
},
|
||||
|
||||
shouldReflect: async (): Promise<boolean> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('reflection.shouldReflect', () =>
|
||||
intelligence.reflection.shouldReflect()
|
||||
);
|
||||
}
|
||||
return fallbackReflection.shouldReflect();
|
||||
},
|
||||
|
||||
reflect: async (agentId: string, memories: MemoryEntryForAnalysis[]): Promise<ReflectionResult> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('reflection.reflect', () =>
|
||||
intelligence.reflection.reflect(agentId, memories)
|
||||
);
|
||||
}
|
||||
return fallbackReflection.reflect(agentId, memories);
|
||||
},
|
||||
|
||||
getHistory: async (limit?: number, agentId?: string): Promise<ReflectionResult[]> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('reflection.getHistory', () =>
|
||||
intelligence.reflection.getHistory(limit, agentId)
|
||||
);
|
||||
}
|
||||
return fallbackReflection.getHistory(limit, agentId);
|
||||
},
|
||||
|
||||
getState: async (): Promise<ReflectionState> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('reflection.getState', () => intelligence.reflection.getState());
|
||||
}
|
||||
return fallbackReflection.getState();
|
||||
},
|
||||
},
|
||||
|
||||
identity: {
|
||||
get: async (agentId: string): Promise<IdentityFiles> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('identity.get', () => intelligence.identity.get(agentId));
|
||||
}
|
||||
return fallbackIdentity.get(agentId);
|
||||
},
|
||||
|
||||
getFile: async (agentId: string, file: string): Promise<string> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('identity.getFile', () => intelligence.identity.getFile(agentId, file));
|
||||
}
|
||||
return fallbackIdentity.getFile(agentId, file);
|
||||
},
|
||||
|
||||
buildPrompt: async (agentId: string, memoryContext?: string): Promise<string> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('identity.buildPrompt', () =>
|
||||
intelligence.identity.buildPrompt(agentId, memoryContext)
|
||||
);
|
||||
}
|
||||
return fallbackIdentity.buildPrompt(agentId, memoryContext);
|
||||
},
|
||||
|
||||
updateUserProfile: async (agentId: string, content: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('identity.updateUserProfile', () =>
|
||||
intelligence.identity.updateUserProfile(agentId, content)
|
||||
);
|
||||
} else {
|
||||
await fallbackIdentity.updateUserProfile(agentId, content);
|
||||
}
|
||||
},
|
||||
|
||||
appendUserProfile: async (agentId: string, addition: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('identity.appendUserProfile', () =>
|
||||
intelligence.identity.appendUserProfile(agentId, addition)
|
||||
);
|
||||
} else {
|
||||
await fallbackIdentity.appendUserProfile(agentId, addition);
|
||||
}
|
||||
},
|
||||
|
||||
proposeChange: async (
|
||||
agentId: string,
|
||||
file: 'soul' | 'instructions',
|
||||
suggestedContent: string,
|
||||
reason: string
|
||||
): Promise<IdentityChangeProposal> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('identity.proposeChange', () =>
|
||||
intelligence.identity.proposeChange(agentId, file, suggestedContent, reason)
|
||||
);
|
||||
}
|
||||
return fallbackIdentity.proposeChange(agentId, file, suggestedContent, reason);
|
||||
},
|
||||
|
||||
approveProposal: async (proposalId: string): Promise<IdentityFiles> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('identity.approveProposal', () =>
|
||||
intelligence.identity.approveProposal(proposalId)
|
||||
);
|
||||
}
|
||||
return fallbackIdentity.approveProposal(proposalId);
|
||||
},
|
||||
|
||||
rejectProposal: async (proposalId: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('identity.rejectProposal', () =>
|
||||
intelligence.identity.rejectProposal(proposalId)
|
||||
);
|
||||
} else {
|
||||
await fallbackIdentity.rejectProposal(proposalId);
|
||||
}
|
||||
},
|
||||
|
||||
getPendingProposals: async (agentId?: string): Promise<IdentityChangeProposal[]> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('identity.getPendingProposals', () =>
|
||||
intelligence.identity.getPendingProposals(agentId)
|
||||
);
|
||||
}
|
||||
return fallbackIdentity.getPendingProposals(agentId);
|
||||
},
|
||||
|
||||
updateFile: async (agentId: string, file: string, content: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('identity.updateFile', () =>
|
||||
intelligence.identity.updateFile(agentId, file, content)
|
||||
);
|
||||
} else {
|
||||
await fallbackIdentity.updateFile(agentId, file, content);
|
||||
}
|
||||
},
|
||||
|
||||
getSnapshots: async (agentId: string, limit?: number): Promise<IdentitySnapshot[]> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('identity.getSnapshots', () =>
|
||||
intelligence.identity.getSnapshots(agentId, limit)
|
||||
);
|
||||
}
|
||||
return fallbackIdentity.getSnapshots(agentId, limit);
|
||||
},
|
||||
|
||||
restoreSnapshot: async (agentId: string, snapshotId: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('identity.restoreSnapshot', () =>
|
||||
intelligence.identity.restoreSnapshot(agentId, snapshotId)
|
||||
);
|
||||
} else {
|
||||
await fallbackIdentity.restoreSnapshot(agentId, snapshotId);
|
||||
}
|
||||
},
|
||||
|
||||
listAgents: async (): Promise<string[]> => {
|
||||
if (isTauriRuntime()) {
|
||||
return tauriInvoke('identity.listAgents', () => intelligence.identity.listAgents());
|
||||
}
|
||||
return fallbackIdentity.listAgents();
|
||||
},
|
||||
|
||||
deleteAgent: async (agentId: string): Promise<void> => {
|
||||
if (isTauriRuntime()) {
|
||||
await tauriInvoke('identity.deleteAgent', () => intelligence.identity.deleteAgent(agentId));
|
||||
} else {
|
||||
await fallbackIdentity.deleteAgent(agentId);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export default intelligenceClient;
|
||||
@@ -56,16 +56,63 @@ export function installAgentMethods(ClientClass: { prototype: KernelClient }): v
|
||||
|
||||
/**
|
||||
* List clones — maps to listAgents() with field adaptation
|
||||
* Maps all available AgentInfo fields to Clone interface properties
|
||||
*/
|
||||
proto.listClones = async function (this: KernelClient): Promise<{ clones: any[] }> {
|
||||
const agents = await this.listAgents();
|
||||
const clones = agents.map((agent) => ({
|
||||
id: agent.id,
|
||||
name: agent.name,
|
||||
role: agent.description,
|
||||
model: agent.model,
|
||||
createdAt: new Date().toISOString(),
|
||||
}));
|
||||
const clones = agents.map((agent) => {
|
||||
// Parse personality/emoji/nickname from SOUL.md content
|
||||
const soulLines = (agent.soul || '').split('\n');
|
||||
let emoji: string | undefined;
|
||||
let personality: string | undefined;
|
||||
let nickname: string | undefined;
|
||||
for (const line of soulLines) {
|
||||
if (!emoji || !nickname) {
|
||||
// Parse header line: "> 🦞 Nickname" or "> 🦞"
|
||||
const headerMatch = line.match(/^>\s*(\p{Emoji_Presentation}|\p{Extended_Pictographic})?\s*(.+)$/u);
|
||||
if (headerMatch) {
|
||||
if (headerMatch[1] && !emoji) emoji = headerMatch[1];
|
||||
if (headerMatch[2]?.trim() && !nickname) nickname = headerMatch[2].trim();
|
||||
}
|
||||
// Also check emoji without nickname
|
||||
if (!emoji) {
|
||||
const emojiOnly = line.match(/^>\s*(\p{Emoji_Presentation}|\p{Extended_Pictographic})\s*$/u);
|
||||
if (emojiOnly) emoji = emojiOnly[1];
|
||||
}
|
||||
}
|
||||
if (!personality) {
|
||||
const match = line.match(/##\s*(?:性格|核心特质|沟通风格)/);
|
||||
if (match) personality = line.trim();
|
||||
}
|
||||
}
|
||||
|
||||
// Parse userName/userRole from userProfile
|
||||
let userName: string | undefined;
|
||||
let userRole: string | undefined;
|
||||
if (agent.userProfile && typeof agent.userProfile === 'object') {
|
||||
const profile = agent.userProfile as Record<string, unknown>;
|
||||
userName = profile.userName as string | undefined || profile.name as string | undefined;
|
||||
userRole = profile.userRole as string | undefined || profile.role as string | undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
id: agent.id,
|
||||
name: agent.name,
|
||||
role: agent.description,
|
||||
nickname,
|
||||
model: agent.model,
|
||||
soul: agent.soul,
|
||||
systemPrompt: agent.systemPrompt,
|
||||
temperature: agent.temperature,
|
||||
maxTokens: agent.maxTokens,
|
||||
emoji,
|
||||
personality,
|
||||
userName,
|
||||
userRole,
|
||||
createdAt: agent.createdAt || new Date().toISOString(),
|
||||
updatedAt: agent.updatedAt,
|
||||
};
|
||||
});
|
||||
return { clones };
|
||||
};
|
||||
|
||||
@@ -119,7 +166,7 @@ export function installAgentMethods(ClientClass: { prototype: KernelClient }): v
|
||||
};
|
||||
|
||||
/**
|
||||
* Update clone — maps to kernel agent_update
|
||||
* Update clone — maps to kernel agent_update + identity system for nickname/userName
|
||||
*/
|
||||
proto.updateClone = async function (this: KernelClient, id: string, updates: Record<string, unknown>): Promise<{ clone: unknown }> {
|
||||
await invoke('agent_update', {
|
||||
@@ -135,16 +182,130 @@ export function installAgentMethods(ClientClass: { prototype: KernelClient }): v
|
||||
},
|
||||
});
|
||||
|
||||
// Sync nickname/emoji to SOUL.md via identity system
|
||||
const nickname = updates.nickname as string | undefined;
|
||||
const emoji = updates.emoji as string | undefined;
|
||||
if (nickname || emoji) {
|
||||
try {
|
||||
const currentSoul = await invoke<string | null>('identity_get_file', { agentId: id, file: 'soul' });
|
||||
const soul = currentSoul || '';
|
||||
// Inject or update nickname line in SOUL.md header
|
||||
const lines = soul.split('\n');
|
||||
const headerIdx = lines.findIndex((l: string) => l.startsWith('> '));
|
||||
if (headerIdx >= 0) {
|
||||
// Update existing header line
|
||||
let header = lines[headerIdx];
|
||||
if (emoji && !header.match(/\p{Emoji_Presentation}|\p{Extended_Pictographic}/u)) {
|
||||
header = `> ${emoji} ${header.slice(2)}`;
|
||||
}
|
||||
lines[headerIdx] = header;
|
||||
} else if (emoji || nickname) {
|
||||
// Add header line after title
|
||||
const label = nickname || '';
|
||||
const icon = emoji || '';
|
||||
const titleIdx = lines.findIndex((l: string) => l.startsWith('# '));
|
||||
if (titleIdx >= 0) {
|
||||
lines.splice(titleIdx + 1, 0, `> ${icon} ${label}`.trim());
|
||||
}
|
||||
}
|
||||
await invoke('identity_update_file', { agentId: id, file: 'soul', content: lines.join('\n') });
|
||||
} catch {
|
||||
// Identity system update is non-critical
|
||||
}
|
||||
}
|
||||
|
||||
// Sync userName/userRole to USER.md via identity system
|
||||
const userName = updates.userName as string | undefined;
|
||||
const userRole = updates.userRole as string | undefined;
|
||||
if (userName || userRole) {
|
||||
try {
|
||||
const currentProfile = await invoke<string | null>('identity_get_file', { agentId: id, file: 'user_profile' });
|
||||
const profile = currentProfile || '# 用户档案\n';
|
||||
const profileLines = profile.split('\n');
|
||||
|
||||
// Update or add userName
|
||||
if (userName) {
|
||||
const nameIdx = profileLines.findIndex((l: string) => l.includes('姓名') || l.includes('userName'));
|
||||
if (nameIdx >= 0) {
|
||||
profileLines[nameIdx] = `- 姓名:${userName}`;
|
||||
} else {
|
||||
const sectionIdx = profileLines.findIndex((l: string) => l.startsWith('## 基本信息'));
|
||||
if (sectionIdx >= 0) {
|
||||
profileLines.splice(sectionIdx + 1, 0, '', `- 姓名:${userName}`);
|
||||
} else {
|
||||
profileLines.push('', '## 基本信息', '', `- 姓名:${userName}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Update or add userRole
|
||||
if (userRole) {
|
||||
const roleIdx = profileLines.findIndex((l: string) => l.includes('角色') || l.includes('userRole'));
|
||||
if (roleIdx >= 0) {
|
||||
profileLines[roleIdx] = `- 角色:${userRole}`;
|
||||
} else {
|
||||
profileLines.push(`- 角色:${userRole}`);
|
||||
}
|
||||
}
|
||||
|
||||
await invoke('identity_update_file', { agentId: id, file: 'user_profile', content: profileLines.join('\n') });
|
||||
} catch {
|
||||
// Identity system update is non-critical
|
||||
}
|
||||
}
|
||||
|
||||
// Return updated clone representation
|
||||
const clone = {
|
||||
id,
|
||||
name: updates.name,
|
||||
role: updates.description || updates.role,
|
||||
nickname: updates.nickname,
|
||||
model: updates.model,
|
||||
emoji: updates.emoji,
|
||||
personality: updates.personality,
|
||||
communicationStyle: updates.communicationStyle,
|
||||
systemPrompt: updates.systemPrompt,
|
||||
userName: updates.userName,
|
||||
userRole: updates.userRole,
|
||||
};
|
||||
return { clone };
|
||||
};
|
||||
}
|
||||
|
||||
// === Agent ID Resolution ===
|
||||
|
||||
/**
|
||||
* Cached kernel default agent UUID.
|
||||
* The conversationStore's DEFAULT_AGENT has id="1", but VikingStorage
|
||||
* stores data under kernel UUIDs. This cache bridges the gap.
|
||||
*/
|
||||
let _cachedDefaultKernelAgentId: string | null = null;
|
||||
|
||||
/**
|
||||
* Resolve an agent ID to the kernel's actual agent UUID.
|
||||
* - If already a UUID (8-4-4 hex pattern), return as-is.
|
||||
* - If "1" or undefined, query agent_list and cache the first kernel agent's UUID.
|
||||
* - Falls back to the original ID if kernel has no agents.
|
||||
*/
|
||||
export async function resolveKernelAgentId(agentId: string | undefined): Promise<string> {
|
||||
if (agentId && /^[0-9a-f]{8}-[0-9a-f]{4}-/.test(agentId)) {
|
||||
return agentId;
|
||||
}
|
||||
if (_cachedDefaultKernelAgentId) {
|
||||
return _cachedDefaultKernelAgentId;
|
||||
}
|
||||
try {
|
||||
const agents = await invoke<{ id: string }[]>('agent_list');
|
||||
if (agents.length > 0) {
|
||||
_cachedDefaultKernelAgentId = agents[0].id;
|
||||
return _cachedDefaultKernelAgentId;
|
||||
}
|
||||
} catch {
|
||||
// Kernel may not be available
|
||||
}
|
||||
return agentId || '1';
|
||||
}
|
||||
|
||||
/** Invalidate cache when kernel reconnects (new instance may have different UUIDs) */
|
||||
export function invalidateKernelAgentIdCache(): void {
|
||||
_cachedDefaultKernelAgentId = null;
|
||||
}
|
||||
|
||||
@@ -164,6 +164,11 @@ export class KernelClient {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
/** Get current kernel configuration (for auth token refresh) */
|
||||
getConfig(): KernelConfig | undefined {
|
||||
return this.config;
|
||||
}
|
||||
|
||||
getState(): ConnectionState {
|
||||
return this.state;
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
import { DEFAULT_MODEL_ID, DEFAULT_OPENAI_BASE_URL } from '../constants/models';
|
||||
import { createLogger } from './logger';
|
||||
import { recordLLMUsage } from './telemetry-collector';
|
||||
|
||||
const log = createLogger('LLMService');
|
||||
|
||||
@@ -819,7 +820,6 @@ function trackLLMCall(
|
||||
error?: unknown,
|
||||
): void {
|
||||
try {
|
||||
const { recordLLMUsage } = require('./telemetry-collector');
|
||||
recordLLMUsage(
|
||||
response.model || adapter.getProvider(),
|
||||
response.tokensUsed?.input ?? 0,
|
||||
@@ -832,7 +832,7 @@ function trackLLMCall(
|
||||
},
|
||||
);
|
||||
} catch (e) {
|
||||
log.debug('Telemetry recording failed (SSR or unavailable)', { error: e });
|
||||
log.debug('Telemetry recording failed', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -97,6 +97,27 @@ export const SCENARIO_TAGS: ScenarioTag[] = [
|
||||
icon: 'Palette',
|
||||
keywords: ['设计', 'UI', 'UX', '视觉', '原型', '界面'],
|
||||
},
|
||||
{
|
||||
id: 'healthcare',
|
||||
label: '医疗健康',
|
||||
description: '医院管理、患者服务、医疗数据分析',
|
||||
icon: 'HeartPulse',
|
||||
keywords: ['医疗', '医院', '健康', '患者', '临床', '护理', '行政'],
|
||||
},
|
||||
{
|
||||
id: 'education',
|
||||
label: '教育培训',
|
||||
description: '课程设计、教学辅助、学习规划',
|
||||
icon: 'GraduationCap',
|
||||
keywords: ['教育', '教学', '课程', '培训', '学习', '考试'],
|
||||
},
|
||||
{
|
||||
id: 'finance',
|
||||
label: '金融财务',
|
||||
description: '财务分析、风险管理、投资研究',
|
||||
icon: 'Landmark',
|
||||
keywords: ['金融', '财务', '投资', '风控', '审计', '报表'],
|
||||
},
|
||||
{
|
||||
id: 'devops',
|
||||
label: '运维部署',
|
||||
@@ -118,6 +139,13 @@ export const SCENARIO_TAGS: ScenarioTag[] = [
|
||||
icon: 'Megaphone',
|
||||
keywords: ['营销', '推广', '运营', '社媒', '增长', '转化'],
|
||||
},
|
||||
{
|
||||
id: 'legal',
|
||||
label: '法律合规',
|
||||
description: '合同审查、法规研究、合规管理',
|
||||
icon: 'Scale',
|
||||
keywords: ['法律', '合同', '合规', '法规', '审查', '风险'],
|
||||
},
|
||||
{
|
||||
id: 'other',
|
||||
label: '其他',
|
||||
|
||||
@@ -16,6 +16,39 @@ import { createLogger } from './logger';
|
||||
|
||||
const log = createLogger('SaaSRelayGateway');
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Memory injection helper — injects relevant memories into system prompt
|
||||
// before sending to SaaS relay (mirrors MemoryMiddleware in Tauri kernel path)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Attempt to inject relevant memories into the system prompt via Tauri IPC.
|
||||
* Falls back gracefully in non-Tauri contexts (browser mode).
|
||||
*/
|
||||
async function injectMemories(
|
||||
agentId: string | undefined,
|
||||
basePrompt: string,
|
||||
userInput: string,
|
||||
): Promise<string> {
|
||||
try {
|
||||
// Dynamic import — only available in Tauri context
|
||||
const { invoke } = await import('@tauri-apps/api/core');
|
||||
const enhanced = await invoke<string>('viking_inject_prompt', {
|
||||
agentId: agentId ?? 'default',
|
||||
basePrompt,
|
||||
userInput,
|
||||
maxTokens: 500,
|
||||
});
|
||||
if (enhanced && enhanced !== basePrompt) {
|
||||
log.debug('Memory injection succeeded for relay request');
|
||||
return enhanced;
|
||||
}
|
||||
} catch {
|
||||
// Non-Tauri context or viking not initialized — skip silently
|
||||
}
|
||||
return basePrompt;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Frontend DataMasking — mirrors Rust DataMasking middleware for SaaS Relay
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -180,6 +213,17 @@ export function createSaaSRelayGatewayClient(
|
||||
? [...history, { role: 'user' as const, content: maskedMessage }]
|
||||
: [{ role: 'user' as const, content: maskedMessage }];
|
||||
|
||||
// BUG-M5 fix: Inject relevant memories into system prompt via Tauri IPC.
|
||||
// This mirrors the MemoryMiddleware that runs in the kernel path.
|
||||
const enhancedSystemPrompt = await injectMemories(
|
||||
opts?.agentId,
|
||||
'',
|
||||
message,
|
||||
);
|
||||
if (enhancedSystemPrompt) {
|
||||
messages.unshift({ role: 'system', content: enhancedSystemPrompt });
|
||||
}
|
||||
|
||||
const model = getModel();
|
||||
if (!model) {
|
||||
callbacks.onError('No model available — please check SaaS relay configuration');
|
||||
|
||||
@@ -204,7 +204,28 @@ export const useAgentStore = create<AgentStore>((set, get) => ({
|
||||
set({ isLoading: true, error: null });
|
||||
try {
|
||||
// Step 1: Call backend to get server-processed config (tools merge)
|
||||
const config = await saasClient.createAgentFromTemplate(template.id);
|
||||
// Fallback to template data directly if SaaS is unreachable
|
||||
let config;
|
||||
try {
|
||||
config = await saasClient.createAgentFromTemplate(template.id);
|
||||
} catch (saasErr) {
|
||||
log.warn('[AgentStore] SaaS createAgentFromTemplate failed, using template directly:', saasErr);
|
||||
// Fallback: build config from template data without server-side tools merge
|
||||
config = {
|
||||
name: template.name,
|
||||
model: template.model,
|
||||
system_prompt: template.system_prompt,
|
||||
tools: template.tools || [],
|
||||
soul_content: template.soul_content,
|
||||
welcome_message: template.welcome_message,
|
||||
quick_commands: template.quick_commands,
|
||||
temperature: template.temperature,
|
||||
max_tokens: template.max_tokens,
|
||||
personality: template.personality,
|
||||
communication_style: template.communication_style,
|
||||
emoji: template.emoji,
|
||||
};
|
||||
}
|
||||
|
||||
// Resolve model: template model > first available SaaS model > 'default'
|
||||
const resolvedModel = config.model
|
||||
@@ -212,16 +233,30 @@ export const useAgentStore = create<AgentStore>((set, get) => ({
|
||||
?? 'default';
|
||||
|
||||
// Step 2: Create clone with merged data from backend
|
||||
const result = await client.createClone({
|
||||
name: config.name,
|
||||
emoji: config.emoji,
|
||||
personality: config.personality,
|
||||
scenarios: template.scenarios,
|
||||
communicationStyle: config.communication_style,
|
||||
model: resolvedModel,
|
||||
});
|
||||
|
||||
const cloneId = result?.clone?.id;
|
||||
// In saas-relay mode the local Kernel may not be running,
|
||||
// so wrap createClone in a try-catch and skip gracefully.
|
||||
let cloneId: string | undefined;
|
||||
let freshClone: Clone | undefined;
|
||||
try {
|
||||
const result = await client.createClone({
|
||||
name: config.name,
|
||||
emoji: config.emoji,
|
||||
personality: config.personality,
|
||||
scenarios: template.scenarios,
|
||||
communicationStyle: config.communication_style,
|
||||
model: resolvedModel,
|
||||
});
|
||||
cloneId = result?.clone?.id;
|
||||
} catch (cloneErr) {
|
||||
log.warn('[AgentStore] createClone failed (likely saas-relay mode without local kernel):', cloneErr);
|
||||
// In SaaS relay mode, the agent was already created server-side in Step 1.
|
||||
// Just refresh the clone list from the server.
|
||||
await get().loadClones();
|
||||
freshClone = get().clones.find(c => c.name === config.name);
|
||||
if (freshClone) {
|
||||
cloneId = freshClone.id;
|
||||
}
|
||||
}
|
||||
|
||||
if (cloneId) {
|
||||
// Persist SOUL.md via identity system
|
||||
@@ -265,7 +300,15 @@ export const useAgentStore = create<AgentStore>((set, get) => ({
|
||||
await get().loadClones();
|
||||
|
||||
// Return a fresh clone from the store (immutable — no in-place mutation)
|
||||
const freshClone = get().clones.find((c) => c.id === cloneId);
|
||||
const storedClone = get().clones.find((c) => c.id === cloneId);
|
||||
if (storedClone) {
|
||||
return {
|
||||
...storedClone,
|
||||
...(config.welcome_message ? { welcomeMessage: config.welcome_message } : {}),
|
||||
...(config.quick_commands?.length ? { quickCommands: config.quick_commands } : {}),
|
||||
};
|
||||
}
|
||||
// Fallback: if clone was found by name earlier in the catch path
|
||||
if (freshClone) {
|
||||
return {
|
||||
...freshClone,
|
||||
@@ -273,7 +316,7 @@ export const useAgentStore = create<AgentStore>((set, get) => ({
|
||||
...(config.quick_commands?.length ? { quickCommands: config.quick_commands } : {}),
|
||||
};
|
||||
}
|
||||
return result?.clone as Clone | undefined;
|
||||
return undefined;
|
||||
} catch (error) {
|
||||
set({ error: String(error) });
|
||||
return undefined;
|
||||
|
||||
@@ -326,8 +326,16 @@ export const useConversationStore = create<ConversationState>()(
|
||||
upsertActiveConversation: (currentMessages: ChatMessage[]) => {
|
||||
const state = get();
|
||||
const currentId = state.currentConversationId || null;
|
||||
// Strip transient fields (error, streaming, optimistic) before persistence
|
||||
// so old errors don't permanently show "重试" buttons on reload
|
||||
const cleanMessages = currentMessages.map(m => ({
|
||||
...m,
|
||||
error: undefined,
|
||||
streaming: undefined,
|
||||
optimistic: undefined,
|
||||
}));
|
||||
const conversations = upsertActiveConversation(
|
||||
[...state.conversations], currentMessages, state.sessionKey,
|
||||
[...state.conversations], cleanMessages, state.sessionKey,
|
||||
state.currentConversationId, state.currentAgent,
|
||||
);
|
||||
// If this was a new conversation (no prior currentConversationId),
|
||||
|
||||
@@ -38,6 +38,46 @@ import { useArtifactStore } from './artifactStore';
|
||||
|
||||
const log = createLogger('StreamStore');
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 401 Auth Error Recovery
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Detect and handle 401 auth errors during chat streaming.
|
||||
* Attempts token refresh → kernel reconnect → auto-retry.
|
||||
* Returns a user-friendly error message if recovery fails.
|
||||
*/
|
||||
async function tryRecoverFromAuthError(error: string): Promise<string | null> {
|
||||
const is401 = /401|Unauthorized|UNAUTHORIZED|未认证|认证已过期/.test(error);
|
||||
if (!is401) return null;
|
||||
|
||||
log.info('Detected 401 auth error, attempting token refresh...');
|
||||
try {
|
||||
const { saasClient } = await import('../../lib/saas-client');
|
||||
const newToken = await saasClient.refreshMutex();
|
||||
if (newToken) {
|
||||
// Update kernel config with refreshed token → triggers kernel re-init via changed api_key detection
|
||||
const { getKernelClient } = await import('../../lib/kernel-client');
|
||||
const kernelClient = getKernelClient();
|
||||
const currentConfig = kernelClient.getConfig();
|
||||
if (currentConfig) {
|
||||
kernelClient.setConfig({ ...currentConfig, apiKey: newToken });
|
||||
await kernelClient.connect();
|
||||
log.info('Kernel reconnected with refreshed token');
|
||||
}
|
||||
return '认证已刷新,请重新发送消息';
|
||||
}
|
||||
} catch (refreshErr) {
|
||||
log.warn('Token refresh failed, triggering logout:', refreshErr);
|
||||
try {
|
||||
const { useSaaSStore } = await import('../saasStore');
|
||||
useSaaSStore.getState().logout();
|
||||
} catch { /* non-critical */ }
|
||||
return 'SaaS 会话已过期,请重新登录';
|
||||
}
|
||||
return '认证失败,请重新登录';
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -481,21 +521,24 @@ export const useStreamStore = create<StreamState>()(
|
||||
});
|
||||
}
|
||||
|
||||
// Async memory extraction
|
||||
// Async memory extraction — dispatch update event AFTER extraction completes
|
||||
const msgs = _chat?.getMessages() || [];
|
||||
const filtered = msgs
|
||||
.filter(m => m.role === 'user' || m.role === 'assistant')
|
||||
.map(m => ({ role: m.role, content: m.content }));
|
||||
const convId = useConversationStore.getState().currentConversationId;
|
||||
getMemoryExtractor().extractFromConversation(filtered, agentId, convId ?? undefined).catch(err => {
|
||||
log.warn('Memory extraction failed:', err);
|
||||
});
|
||||
// Notify RightPanel to refresh UserProfile after memory extraction
|
||||
if (typeof window !== 'undefined') {
|
||||
window.dispatchEvent(new CustomEvent('zclaw:agent-profile-updated', {
|
||||
detail: { agentId }
|
||||
}));
|
||||
}
|
||||
getMemoryExtractor().extractFromConversation(filtered, agentId, convId ?? undefined)
|
||||
.then(() => {
|
||||
// Notify RightPanel to refresh UserProfile after memory extraction completes
|
||||
if (typeof window !== 'undefined') {
|
||||
window.dispatchEvent(new CustomEvent('zclaw:agent-profile-updated', {
|
||||
detail: { agentId }
|
||||
}));
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
log.warn('Memory extraction failed:', err);
|
||||
});
|
||||
intelligenceClient.reflection.recordConversation().catch(err => {
|
||||
log.warn('Recording conversation failed:', err);
|
||||
});
|
||||
@@ -517,7 +560,7 @@ export const useStreamStore = create<StreamState>()(
|
||||
}
|
||||
}
|
||||
},
|
||||
onError: (error: string) => {
|
||||
onError: async (error: string) => {
|
||||
// Flush any remaining buffered deltas before erroring
|
||||
if (flushTimer !== null) {
|
||||
clearTimeout(flushTimer);
|
||||
@@ -525,10 +568,14 @@ export const useStreamStore = create<StreamState>()(
|
||||
}
|
||||
flushBuffers();
|
||||
|
||||
// Attempt 401 auth recovery (token refresh + kernel reconnect)
|
||||
const recoveryMsg = await tryRecoverFromAuthError(error);
|
||||
const displayError = recoveryMsg || error;
|
||||
|
||||
_chat?.updateMessages(msgs =>
|
||||
msgs.map(m =>
|
||||
m.id === assistantId
|
||||
? { ...m, content: error, streaming: false, error }
|
||||
? { ...m, content: displayError, streaming: false, error: displayError }
|
||||
: m.role === 'user' && m.optimistic && m.timestamp.getTime() >= streamStartTime
|
||||
? { ...m, optimistic: false }
|
||||
: m
|
||||
@@ -573,13 +620,18 @@ export const useStreamStore = create<StreamState>()(
|
||||
textBuffer = '';
|
||||
thinkBuffer = '';
|
||||
|
||||
const errorMessage = err instanceof Error ? err.message : '无法连接 Gateway';
|
||||
let errorMessage = err instanceof Error ? err.message : '无法连接 Gateway';
|
||||
|
||||
// Attempt 401 auth recovery
|
||||
const recoveryMsg = await tryRecoverFromAuthError(errorMessage);
|
||||
if (recoveryMsg) errorMessage = recoveryMsg;
|
||||
|
||||
_chat?.updateMessages(msgs =>
|
||||
msgs.map(m =>
|
||||
m.id === assistantId
|
||||
? {
|
||||
...m,
|
||||
content: `⚠️ ${errorMessage}`,
|
||||
content: errorMessage,
|
||||
streaming: false,
|
||||
error: errorMessage,
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user