fix(kernel,desktop): Core Chain Hardening 穷尽审计 7 项修复
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

审计发现 1 CRITICAL + 4 HIGH + 4 MEDIUM + 4 LOW, 修复如下:

CRITICAL:
- TS seam 测试改为 JSON round-trip 验证 (12 测试覆盖 10 事件类型)

HIGH:
- post_conversation_hook 拦截路径 driver=None 加 debug 日志
- schedule intercept channel send 失败回退 LLM (return Ok(None))

MEDIUM:
- DeltaBuffer.flush() 先 mutation 再 clear, 防止异常丢数据
- ModelsAPI.tsx 去重: 改用 model-config.ts 导出 (消除 2 函数+1 接口+2 常量)
- boot_with_driver docstring 记录跳过 agent 恢复

TypeScript 0 错误, Rust 76 kernel 测试通过, TS 12 seam 测试通过
This commit is contained in:
iven
2026-04-21 23:30:08 +08:00
parent 27006157da
commit 58ff0bdde7
6 changed files with 148 additions and 93 deletions

View File

@@ -380,6 +380,9 @@ pub async fn agent_chat_stream(
let hb = hb_state.clone();
let rf = rf_state.clone();
let driver = llm_driver.clone();
if driver.is_none() {
tracing::debug!("[agent_chat_stream] Post-hook firing without LLM driver (schedule intercept path)");
}
tokio::spawn(async move {
crate::intelligence_hooks::post_conversation_hook(
&agent_id_hook, &message_hook, &hb, &rf, driver,

View File

@@ -7,20 +7,16 @@ import { useConversationStore } from '../../store/chat/conversationStore';
import { silentErrorHandler } from '../../lib/error-utils';
import { secureStorage } from '../../lib/secure-storage';
import { LLM_PROVIDER_URLS } from '../../constants/api-urls';
import {
type CustomModel,
loadCustomModels as loadCustomModelsBase,
saveCustomModels as saveCustomModelsBase,
getCustomModelApiKey,
saveCustomModelApiKey,
deleteCustomModelApiKey,
} from '../../lib/model-config';
import { Plus, Pencil, Trash2, Star, Eye, EyeOff, AlertCircle, X, Zap, Check } from 'lucide-react';
// 自定义模型数据结构
interface CustomModel {
id: string;
name: string;
provider: string;
apiKey?: string;
apiProtocol: 'openai' | 'anthropic' | 'custom';
baseUrl?: string;
isDefault?: boolean;
createdAt: string;
}
// Embedding 配置数据结构
interface EmbeddingConfig {
provider: string;
@@ -56,8 +52,6 @@ const AVAILABLE_PROVIDERS = [
{ id: 'custom', name: '自定义', baseUrl: '' },
];
const STORAGE_KEY = 'zclaw-custom-models';
const MODEL_KEY_SECURE_PREFIX = 'zclaw-secure-model-key:';
const EMBEDDING_STORAGE_KEY = 'zclaw-embedding-config';
const EMBEDDING_KEY_SECURE = 'zclaw-secure-embedding-apikey';
@@ -123,32 +117,6 @@ async function loadEmbeddingApiKey(): Promise<string | null> {
return secureStorage.get(EMBEDDING_KEY_SECURE);
}
// 从 localStorage 加载自定义模型 (apiKeys are stripped from localStorage)
function loadCustomModelsBase(): CustomModel[] {
try {
const stored = localStorage.getItem(STORAGE_KEY);
if (stored) {
return JSON.parse(stored);
}
} catch (e) {
console.warn('[ModelsAPI] Failed to load model config:', e);
}
return [];
}
// 保存自定义模型到 localStorage (apiKeys are stripped before saving)
function saveCustomModelsBase(models: CustomModel[]): void {
try {
const sanitized = models.map(m => {
const { apiKey: _, ...rest } = m;
return rest;
});
localStorage.setItem(STORAGE_KEY, JSON.stringify(sanitized));
} catch (e) {
console.warn('[ModelsAPI] Failed to save model config:', e);
}
}
/**
* Async load: fetches models from localStorage and merges apiKeys from secure storage.
*/
@@ -156,7 +124,7 @@ async function loadCustomModelsWithKeys(): Promise<CustomModel[]> {
const models = loadCustomModelsBase();
const modelsWithKeys = await Promise.all(
models.map(async (model) => {
const apiKey = await secureStorage.get(MODEL_KEY_SECURE_PREFIX + model.id);
const apiKey = await getCustomModelApiKey(model.id);
return { ...model, apiKey: apiKey || undefined };
})
);
@@ -281,9 +249,9 @@ export function ModelsAPI() {
// Save apiKey to secure storage
if (newModel.apiKey) {
await secureStorage.set(MODEL_KEY_SECURE_PREFIX + newModel.id, newModel.apiKey);
await saveCustomModelApiKey(newModel.id, newModel.apiKey);
} else {
await secureStorage.delete(MODEL_KEY_SECURE_PREFIX + newModel.id);
await deleteCustomModelApiKey(newModel.id);
}
setCustomModels(updatedModels);
@@ -301,7 +269,7 @@ export function ModelsAPI() {
setCustomModels(updatedModels);
saveCustomModelsBase(updatedModels);
// Also remove apiKey from secure storage
await secureStorage.delete(MODEL_KEY_SECURE_PREFIX + modelId);
await deleteCustomModelApiKey(modelId);
};
// 设为默认模型

View File

@@ -172,8 +172,6 @@ class DeltaBuffer {
this.timer = null;
const text = this.text;
const think = this.think;
this.text = '';
this.think = '';
if (text || think) {
this.chat.updateMessages(msgs =>
msgs.map(m => {
@@ -186,6 +184,8 @@ class DeltaBuffer {
})
);
}
this.text = '';
this.think = '';
}
clear() {