fix(kernel,desktop): Core Chain Hardening 穷尽审计 7 项修复
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

审计发现 1 CRITICAL + 4 HIGH + 4 MEDIUM + 4 LOW, 修复如下:

CRITICAL:
- TS seam 测试改为 JSON round-trip 验证 (12 测试覆盖 10 事件类型)

HIGH:
- post_conversation_hook 拦截路径 driver=None 加 debug 日志
- schedule intercept channel send 失败回退 LLM (return Ok(None))

MEDIUM:
- DeltaBuffer.flush() 先 mutation 再 clear, 防止异常丢数据
- ModelsAPI.tsx 去重: 改用 model-config.ts 导出 (消除 2 函数+1 接口+2 常量)
- boot_with_driver docstring 记录跳过 agent 恢复

TypeScript 0 错误, Rust 76 kernel 测试通过, TS 12 seam 测试通过
This commit is contained in:
iven
2026-04-21 23:30:08 +08:00
parent 27006157da
commit 58ff0bdde7
6 changed files with 148 additions and 93 deletions

View File

@@ -77,7 +77,8 @@ impl Kernel {
let (tx, rx) = mpsc::channel(32);
if tx.send(zclaw_runtime::LoopEvent::Delta(confirm_msg)).await.is_err() {
tracing::warn!("[Kernel] Failed to send confirm msg to channel");
tracing::warn!("[Kernel] Failed to send confirm msg to channel — falling through to LLM");
return Ok(None);
}
if tx.send(zclaw_runtime::LoopEvent::Complete(
zclaw_runtime::AgentLoopResult {

View File

@@ -181,8 +181,12 @@ impl Kernel {
/// Boot the kernel with a pre-configured driver (for testing).
///
/// Skips `config.create_driver()` and uses the provided driver directly.
/// Uses an in-memory SQLite database to avoid filesystem side effects.
/// **TEST ONLY.** Do not call from production code.
///
/// Differences from `boot()`:
/// - Uses the provided `driver` instead of `config.create_driver()`
/// - Uses an in-memory SQLite database (no filesystem side effects)
/// - Skips agent recovery from persistent storage (`memory.list_agents_with_runtime()`)
pub async fn boot_with_driver(
config: KernelConfig,
driver: Arc<dyn LlmDriver>,

View File

@@ -380,6 +380,9 @@ pub async fn agent_chat_stream(
let hb = hb_state.clone();
let rf = rf_state.clone();
let driver = llm_driver.clone();
if driver.is_none() {
tracing::debug!("[agent_chat_stream] Post-hook firing without LLM driver (schedule intercept path)");
}
tokio::spawn(async move {
crate::intelligence_hooks::post_conversation_hook(
&agent_id_hook, &message_hook, &hb, &rf, driver,

View File

@@ -7,20 +7,16 @@ import { useConversationStore } from '../../store/chat/conversationStore';
import { silentErrorHandler } from '../../lib/error-utils';
import { secureStorage } from '../../lib/secure-storage';
import { LLM_PROVIDER_URLS } from '../../constants/api-urls';
import {
type CustomModel,
loadCustomModels as loadCustomModelsBase,
saveCustomModels as saveCustomModelsBase,
getCustomModelApiKey,
saveCustomModelApiKey,
deleteCustomModelApiKey,
} from '../../lib/model-config';
import { Plus, Pencil, Trash2, Star, Eye, EyeOff, AlertCircle, X, Zap, Check } from 'lucide-react';
// 自定义模型数据结构
interface CustomModel {
id: string;
name: string;
provider: string;
apiKey?: string;
apiProtocol: 'openai' | 'anthropic' | 'custom';
baseUrl?: string;
isDefault?: boolean;
createdAt: string;
}
// Embedding 配置数据结构
interface EmbeddingConfig {
provider: string;
@@ -56,8 +52,6 @@ const AVAILABLE_PROVIDERS = [
{ id: 'custom', name: '自定义', baseUrl: '' },
];
const STORAGE_KEY = 'zclaw-custom-models';
const MODEL_KEY_SECURE_PREFIX = 'zclaw-secure-model-key:';
const EMBEDDING_STORAGE_KEY = 'zclaw-embedding-config';
const EMBEDDING_KEY_SECURE = 'zclaw-secure-embedding-apikey';
@@ -123,32 +117,6 @@ async function loadEmbeddingApiKey(): Promise<string | null> {
return secureStorage.get(EMBEDDING_KEY_SECURE);
}
// 从 localStorage 加载自定义模型 (apiKeys are stripped from localStorage)
function loadCustomModelsBase(): CustomModel[] {
try {
const stored = localStorage.getItem(STORAGE_KEY);
if (stored) {
return JSON.parse(stored);
}
} catch (e) {
console.warn('[ModelsAPI] Failed to load model config:', e);
}
return [];
}
// 保存自定义模型到 localStorage (apiKeys are stripped before saving)
function saveCustomModelsBase(models: CustomModel[]): void {
try {
const sanitized = models.map(m => {
const { apiKey: _, ...rest } = m;
return rest;
});
localStorage.setItem(STORAGE_KEY, JSON.stringify(sanitized));
} catch (e) {
console.warn('[ModelsAPI] Failed to save model config:', e);
}
}
/**
* Async load: fetches models from localStorage and merges apiKeys from secure storage.
*/
@@ -156,7 +124,7 @@ async function loadCustomModelsWithKeys(): Promise<CustomModel[]> {
const models = loadCustomModelsBase();
const modelsWithKeys = await Promise.all(
models.map(async (model) => {
const apiKey = await secureStorage.get(MODEL_KEY_SECURE_PREFIX + model.id);
const apiKey = await getCustomModelApiKey(model.id);
return { ...model, apiKey: apiKey || undefined };
})
);
@@ -281,9 +249,9 @@ export function ModelsAPI() {
// Save apiKey to secure storage
if (newModel.apiKey) {
await secureStorage.set(MODEL_KEY_SECURE_PREFIX + newModel.id, newModel.apiKey);
await saveCustomModelApiKey(newModel.id, newModel.apiKey);
} else {
await secureStorage.delete(MODEL_KEY_SECURE_PREFIX + newModel.id);
await deleteCustomModelApiKey(newModel.id);
}
setCustomModels(updatedModels);
@@ -301,7 +269,7 @@ export function ModelsAPI() {
setCustomModels(updatedModels);
saveCustomModelsBase(updatedModels);
// Also remove apiKey from secure storage
await secureStorage.delete(MODEL_KEY_SECURE_PREFIX + modelId);
await deleteCustomModelApiKey(modelId);
};
// 设为默认模型

View File

@@ -172,8 +172,6 @@ class DeltaBuffer {
this.timer = null;
const text = this.text;
const think = this.think;
this.text = '';
this.think = '';
if (text || think) {
this.chat.updateMessages(msgs =>
msgs.map(m => {
@@ -186,6 +184,8 @@ class DeltaBuffer {
})
);
}
this.text = '';
this.think = '';
}
clear() {

View File

@@ -1,8 +1,9 @@
/**
* Chat seam tests — verify request/response type contracts
*
* Tests that the TypeScript types match the Rust serde-serialized format.
* These are pure type contract tests — no Tauri dependency needed.
* Validates that TypeScript types match the Rust serde-serialized format.
* Uses round-trip JSON serialization to catch field name mismatches
* (e.g., if Rust changes `rename_all = "camelCase"` or adds/removes fields).
*/
import { describe, it, expect } from 'vitest';
@@ -51,16 +52,49 @@ type StreamChatEvent =
| { type: 'complete'; inputTokens: number; outputTokens: number }
| { type: 'error'; message: string };
describe('Chat Seam: request format contract', () => {
it('StreamChatRequest has required camelCase fields', () => {
const req: StreamChatRequest = {
agentId: 'test-agent',
sessionId: 'session-123',
message: 'Hello',
};
expect(req.agentId).toBe('test-agent');
expect(req.sessionId).toBe('session-123');
// ---------------------------------------------------------------------------
// Simulated Rust serde output — these strings represent what Rust would emit.
// If a field name changes in Rust, the JSON.parse round-trip will fail here.
// ---------------------------------------------------------------------------
const RUST_STREAM_CHAT_REQUEST = `{
"agentId": "agent-1",
"sessionId": "sess-abc",
"message": "Hello",
"thinkingEnabled": true,
"reasoningEffort": "high",
"planMode": false,
"subagentEnabled": true,
"model": "gpt-4o"
}`;
const RUST_CHAT_RESPONSE = `{
"content": "Hello back!",
"inputTokens": 10,
"outputTokens": 5
}`;
const RUST_EVENT_DELTA = `{"type":"delta","delta":"Hello world"}`;
const RUST_EVENT_THINKING = `{"type":"thinkingDelta","delta":"thinking..."}`;
const RUST_EVENT_TOOL_START = `{"type":"toolStart","name":"web_search","input":{"query":"test"}}`;
const RUST_EVENT_TOOL_END = `{"type":"toolEnd","name":"web_search","output":{"results":[]}}`;
const RUST_EVENT_HAND_START = `{"type":"handStart","name":"hand_quiz","params":{"topic":"math"}}`;
const RUST_EVENT_HAND_END = `{"type":"handEnd","name":"hand_quiz","result":{"questions":[]}}`;
const RUST_EVENT_SUBTASK = `{"type":"subtaskStatus","taskId":"t1","description":"Research","status":"running","detail":"Searching"}`;
const RUST_EVENT_ITERATION = `{"type":"iterationStart","iteration":2,"maxIterations":10}`;
const RUST_EVENT_COMPLETE = `{"type":"complete","inputTokens":100,"outputTokens":50}`;
const RUST_EVENT_ERROR = `{"type":"error","message":"已取消"}`;
describe('Chat Seam: request format contract (JSON round-trip)', () => {
it('StreamChatRequest parses from simulated Rust output', () => {
const req: StreamChatRequest = JSON.parse(RUST_STREAM_CHAT_REQUEST);
expect(req.agentId).toBe('agent-1');
expect(req.sessionId).toBe('sess-abc');
expect(req.message).toBe('Hello');
expect(req.thinkingEnabled).toBe(true);
expect(req.reasoningEffort).toBe('high');
expect(req.planMode).toBe(false);
expect(req.subagentEnabled).toBe(true);
expect(req.model).toBe('gpt-4o');
});
it('StreamChatRequest optional fields are camelCase', () => {
@@ -74,55 +108,56 @@ describe('Chat Seam: request format contract', () => {
subagentEnabled: true,
model: 'gpt-4o',
};
expect(req.thinkingEnabled).toBe(true);
expect(req.reasoningEffort).toBe('high');
expect(req.planMode).toBe(false);
expect(req.subagentEnabled).toBe(true);
expect(req.model).toBe('gpt-4o');
// Verify camelCase naming by serializing and checking no snake_case
const json = JSON.stringify(req);
expect(json).not.toContain('thinking_enabled');
expect(json).not.toContain('reasoning_effort');
expect(json).not.toContain('plan_mode');
expect(json).not.toContain('subagent_enabled');
});
it('ChatRequest format for non-streaming', () => {
const req: ChatRequest = {
agentId: 'test-agent',
message: 'Hello',
model: 'gpt-4o',
};
expect(req.agentId).toBe('test-agent');
expect(req.message).toBe('Hello');
});
it('ChatResponse has expected fields', () => {
const resp: ChatResponse = {
content: 'Hello back!',
inputTokens: 10,
outputTokens: 5,
};
it('ChatResponse parses from simulated Rust output', () => {
const resp: ChatResponse = JSON.parse(RUST_CHAT_RESPONSE);
expect(resp.content).toBe('Hello back!');
expect(resp.inputTokens).toBe(10);
expect(resp.outputTokens).toBe(5);
});
});
describe('Chat Seam: StreamChatEvent format contract', () => {
it('delta event matches Rust StreamChatEvent::Delta', () => {
const event: StreamChatEvent = { type: 'delta', delta: 'Hello' };
describe('Chat Seam: StreamChatEvent format contract (JSON round-trip)', () => {
it('delta event parses from simulated Rust output', () => {
const event: StreamChatEvent = JSON.parse(RUST_EVENT_DELTA);
expect(event.type).toBe('delta');
if (event.type === 'delta') {
expect(typeof event.delta).toBe('string');
expect(event.delta).toBe('Hello world');
}
});
it('complete event has token counts', () => {
const event: StreamChatEvent = { type: 'complete', inputTokens: 10, outputTokens: 5 };
if (event.type === 'complete') {
expect(event.inputTokens).toBeGreaterThanOrEqual(0);
expect(event.outputTokens).toBeGreaterThanOrEqual(0);
it('thinkingDelta event parses correctly', () => {
const event: StreamChatEvent = JSON.parse(RUST_EVENT_THINKING);
expect(event.type).toBe('thinkingDelta');
if (event.type === 'thinkingDelta') {
expect(event.delta).toBe('thinking...');
}
});
it('toolStart/toolEnd events parse with correct fields', () => {
const start: StreamChatEvent = JSON.parse(RUST_EVENT_TOOL_START);
const end: StreamChatEvent = JSON.parse(RUST_EVENT_TOOL_END);
if (start.type === 'toolStart') {
expect(start.name).toBe('web_search');
expect(start.input).toBeDefined();
}
if (end.type === 'toolEnd') {
expect(end.name).toBe('web_search');
expect(end.output).toBeDefined();
}
});
it('handStart/handEnd events have correct structure', () => {
const start: StreamChatEvent = { type: 'handStart', name: 'hand_quiz', params: { topic: 'math' } };
const end: StreamChatEvent = { type: 'handEnd', name: 'hand_quiz', result: { questions: [] } };
const start: StreamChatEvent = JSON.parse(RUST_EVENT_HAND_START);
const end: StreamChatEvent = JSON.parse(RUST_EVENT_HAND_END);
if (start.type === 'handStart') {
expect(start.name).toMatch(/^hand_/);
@@ -134,10 +169,54 @@ describe('Chat Seam: StreamChatEvent format contract', () => {
}
});
it('subtaskStatus event parses all fields including optional detail', () => {
const event: StreamChatEvent = JSON.parse(RUST_EVENT_SUBTASK);
if (event.type === 'subtaskStatus') {
expect(event.taskId).toBe('t1');
expect(event.description).toBe('Research');
expect(event.status).toBe('running');
expect(event.detail).toBe('Searching');
}
});
it('iterationStart event parses iteration and maxIterations', () => {
const event: StreamChatEvent = JSON.parse(RUST_EVENT_ITERATION);
if (event.type === 'iterationStart') {
expect(event.iteration).toBe(2);
expect(event.maxIterations).toBe(10);
}
});
it('complete event has token counts', () => {
const event: StreamChatEvent = JSON.parse(RUST_EVENT_COMPLETE);
if (event.type === 'complete') {
expect(event.inputTokens).toBeGreaterThanOrEqual(0);
expect(event.outputTokens).toBeGreaterThanOrEqual(0);
}
});
it('error event has message field', () => {
const event: StreamChatEvent = { type: 'error', message: '已取消' };
const event: StreamChatEvent = JSON.parse(RUST_EVENT_ERROR);
if (event.type === 'error') {
expect(event.message).toBeTruthy();
}
});
it('all 10 StreamChatEvent variants are represented in Rust output', () => {
const variants = [
RUST_EVENT_DELTA, RUST_EVENT_THINKING,
RUST_EVENT_TOOL_START, RUST_EVENT_TOOL_END,
RUST_EVENT_SUBTASK, RUST_EVENT_ITERATION,
RUST_EVENT_HAND_START, RUST_EVENT_HAND_END,
RUST_EVENT_COMPLETE, RUST_EVENT_ERROR,
];
const types = variants.map(v => JSON.parse(v).type);
expect(types).toEqual([
'delta', 'thinkingDelta',
'toolStart', 'toolEnd',
'subtaskStatus', 'iterationStart',
'handStart', 'handEnd',
'complete', 'error',
]);
});
});