refactor(crates): kernel/generation module split + DeerFlow optimizations + middleware + dead code cleanup

- Split zclaw-kernel/kernel.rs (1486 lines) into 9 domain modules
- Split zclaw-kernel/generation.rs (1080 lines) into 3 modules
- Add DeerFlow-inspired middleware: DanglingTool, SubagentLimit, ToolError, ToolOutputGuard
- Add PromptBuilder for structured system prompt assembly
- Add FactStore (zclaw-memory) for persistent fact extraction
- Add task builtin tool for agent task management
- Driver improvements: Anthropic/OpenAI extended thinking, Gemini safety settings
- Replace let _ = with proper log::warn! across SaaS handlers
- Remove unused dependency (url) from zclaw-hands
This commit is contained in:
iven
2026-04-03 00:28:03 +08:00
parent 0a04b260a4
commit 52bdafa633
55 changed files with 4130 additions and 1959 deletions

View File

@@ -185,8 +185,8 @@ pub async fn increment_usage(
input_tokens: i64,
output_tokens: i64,
) -> SaasResult<()> {
// 确保 quota 行存在(幂等)
let _ = get_or_create_usage(pool, account_id).await?;
// 确保 quota 行存在(幂等)— 返回值仅用于确认行存在,无需绑定
get_or_create_usage(pool, account_id).await?;
// 直接用 account_id + period 原子更新,无需 SELECT 获取 ID
let now = chrono::Utc::now();

View File

@@ -887,7 +887,7 @@ async fn fix_seed_data(pool: &PgPool) -> SaasResult<()> {
}
// 也更新 api_tokens 表的 account_id
let _ = sqlx::query("UPDATE api_tokens SET account_id = $1 WHERE account_id != $1")
sqlx::query("UPDATE api_tokens SET account_id = $1 WHERE account_id != $1")
.bind(primary_admin).execute(pool).await?;
tracing::info!("Seed data fix completed");

View File

@@ -231,13 +231,12 @@ pub async fn batch_create_items(
}
match service::create_item(&state.db, &ctx.account_id, req).await {
Ok(item) => {
let _ = state.worker_dispatcher.dispatch(
if let Err(e) = state.worker_dispatcher.dispatch(
"generate_embedding",
serde_json::json!({ "item_id": item.id }),
).await.map_err(|e| {
).await {
tracing::warn!("[Knowledge] Failed to dispatch embedding for item {}: {}", item.id, e);
e
});
}
created.push(item.id);
}
Err(e) => {
@@ -563,13 +562,12 @@ pub async fn import_items(
match service::create_item(&state.db, &ctx.account_id, &item_req).await {
Ok(item) => {
let _ = state.worker_dispatcher.dispatch(
if let Err(e) = state.worker_dispatcher.dispatch(
"generate_embedding",
serde_json::json!({ "item_id": item.id }),
).await.map_err(|e| {
).await {
tracing::warn!("[Knowledge] Failed to dispatch embedding for item {}: {}", item.id, e);
e
});
}
created.push(item.id);
}
Err(e) => {

View File

@@ -259,7 +259,9 @@ pub async fn execute_relay(
}
}
let key_id = current_key_id.as_ref().unwrap().clone();
let key_id = current_key_id.as_ref()
.ok_or_else(|| SaasError::Internal("Key pool selection failed: no key_id".into()))?
.clone();
let api_key = current_api_key.clone();
let mut req_builder = client.post(&url)
@@ -309,7 +311,10 @@ pub async fn execute_relay(
}
}
Err(e) => {
let _ = tx.send(Err(std::io::Error::other(e))).await;
let err_msg = e.to_string();
if tx.send(Err(std::io::Error::other(e))).await.is_err() {
tracing::debug!("SSE relay: client disconnected before error sent: {}", err_msg);
}
break;
}
}
@@ -372,12 +377,12 @@ pub async fn execute_relay(
let (input_tokens, output_tokens) = extract_token_usage(&body);
update_task_status(db, task_id, "completed",
Some(input_tokens), Some(output_tokens), None).await?;
// 记录 Key 使用量
let _ = super::key_pool::record_key_usage(
// 记录 Key 使用量(失败仅记录,不阻塞响应)
if let Err(e) = super::key_pool::record_key_usage(
db, &key_id, Some(input_tokens + output_tokens),
).await.map_err(|e| {
).await {
tracing::warn!("[Relay] Failed to record key usage for billing: {}", e);
});
}
return Ok(RelayResponse::Json(body));
}
}
@@ -557,7 +562,10 @@ fn hash_request(body: &str) -> String {
fn extract_token_usage(body: &str) -> (i64, i64) {
let parsed: serde_json::Value = match serde_json::from_str(body) {
Ok(v) => v,
Err(_) => return (0, 0),
Err(e) => {
tracing::debug!("extract_token_usage: JSON parse failed (body len={}): {}", body.len(), e);
return (0, 0);
}
};
let usage = parsed.get("usage");