fix(安全): 修复HTML导出中的XSS漏洞并清理调试日志
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

refactor(日志): 替换console.log为tracing日志系统
style(代码): 移除未使用的代码和依赖项

feat(测试): 添加端到端测试文档和CI工作流
docs(变更日志): 更新CHANGELOG.md记录0.1.0版本变更

perf(构建): 更新依赖版本并优化CI流程
This commit is contained in:
iven
2026-03-26 19:49:03 +08:00
parent b8d565a9eb
commit 978dc5cdd8
79 changed files with 3953 additions and 5724 deletions

View File

@@ -172,6 +172,7 @@ export class ActiveLearningEngine {
// 1. 正面反馈 -> 偏好正面回复
if (event.observation.includes('谢谢') || event.observation.includes('好的')) {
this.addPattern({
id: `pat-${Date.now()}-${Math.random().toString(36).slice(2)}`,
type: 'preference',
pattern: 'positive_response_preference',
description: '用户偏好正面回复风格',
@@ -184,7 +185,8 @@ export class ActiveLearningEngine {
// 2. 纠正 -> 需要更精确
if (event.type === 'correction') {
this.addPattern({
type: 'rule',
id: `pat-${Date.now()}-${Math.random().toString(36).slice(2)}`,
type: 'preference',
pattern: 'precision_preference',
description: '用户对精确性有更高要求',
examples: [event.observation],
@@ -196,6 +198,7 @@ export class ActiveLearningEngine {
// 3. 上下文相关 -> 场景偏好
if (event.context) {
this.addPattern({
id: `pat-${Date.now()}-${Math.random().toString(36).slice(2)}`,
type: 'context',
pattern: 'context_aware',
description: 'Agent 需要关注上下文',

View File

@@ -0,0 +1,183 @@
/**
* Embedding Client - Vector Embedding Operations
*
* Client for interacting with embedding APIs via Tauri backend.
* Supports multiple providers: OpenAI, Zhipu, Doubao, Qwen, DeepSeek.
*/
import { invoke } from '@tauri-apps/api/core';
export interface EmbeddingConfig {
provider: string;
model: string;
apiKey: string;
endpoint: string;
enabled: boolean;
}
export interface EmbeddingResponse {
embedding: number[];
model: string;
usage?: {
prompt_tokens: number;
total_tokens: number;
};
}
export interface EmbeddingProvider {
id: string;
name: string;
defaultModel: string;
dimensions: number;
}
const EMBEDDING_STORAGE_KEY = 'zclaw-embedding-config';
export function loadEmbeddingConfig(): EmbeddingConfig {
try {
const stored = localStorage.getItem(EMBEDDING_STORAGE_KEY);
if (stored) {
return JSON.parse(stored);
}
} catch {
// ignore
}
return {
provider: 'local',
model: 'tfidf',
apiKey: '',
endpoint: '',
enabled: false,
};
}
export function saveEmbeddingConfig(config: EmbeddingConfig): void {
try {
localStorage.setItem(EMBEDDING_STORAGE_KEY, JSON.stringify(config));
} catch {
// ignore
}
}
export async function getEmbeddingProviders(): Promise<EmbeddingProvider[]> {
const result = await invoke<[string, string, string, number][]>('embedding_providers');
return result.map(([id, name, defaultModel, dimensions]) => ({
id,
name,
defaultModel,
dimensions,
}));
}
export async function createEmbedding(
text: string,
config?: Partial<EmbeddingConfig>
): Promise<EmbeddingResponse> {
const savedConfig = loadEmbeddingConfig();
const provider = config?.provider ?? savedConfig.provider;
const apiKey = config?.apiKey ?? savedConfig.apiKey;
const model = config?.model ?? savedConfig.model;
const endpoint = config?.endpoint ?? savedConfig.endpoint;
if (provider === 'local') {
throw new Error('Local TF-IDF mode does not support API embedding');
}
if (!apiKey) {
throw new Error('API Key is required for embedding');
}
return invoke<EmbeddingResponse>('embedding_create', {
provider,
apiKey,
text,
model: model || undefined,
endpoint: endpoint || undefined,
});
}
export async function createEmbeddings(
texts: string[],
config?: Partial<EmbeddingConfig>
): Promise<EmbeddingResponse[]> {
const results: EmbeddingResponse[] = [];
for (const text of texts) {
const result = await createEmbedding(text, config);
results.push(result);
}
return results;
}
export function cosineSimilarity(a: number[], b: number[]): number {
if (a.length !== b.length) {
throw new Error('Vectors must have the same length');
}
let dotProduct = 0;
let normA = 0;
let normB = 0;
for (let i = 0; i < a.length; i++) {
dotProduct += a[i] * b[i];
normA += a[i] * a[i];
normB += b[i] * b[i];
}
const denom = Math.sqrt(normA * normB);
if (denom === 0) {
return 0;
}
return dotProduct / denom;
}
export class EmbeddingClient {
private config: EmbeddingConfig;
constructor(config?: EmbeddingConfig) {
this.config = config ?? loadEmbeddingConfig();
}
get isApiMode(): boolean {
return this.config.provider !== 'local' && this.config.enabled && !!this.config.apiKey;
}
async embed(text: string): Promise<number[]> {
const response = await createEmbedding(text, this.config);
return response.embedding;
}
async embedBatch(texts: string[]): Promise<number[][]> {
const responses = await createEmbeddings(texts, this.config);
return responses.map(r => r.embedding);
}
similarity(vec1: number[], vec2: number[]): number {
return cosineSimilarity(vec1, vec2);
}
updateConfig(config: Partial<EmbeddingConfig>): void {
this.config = { ...this.config, ...config };
if (config.provider !== undefined || config.apiKey !== undefined) {
this.config.enabled = this.config.provider !== 'local' && !!this.config.apiKey;
}
saveEmbeddingConfig(this.config);
}
getConfig(): EmbeddingConfig {
return { ...this.config };
}
}
let embeddingClientInstance: EmbeddingClient | null = null;
export function getEmbeddingClient(): EmbeddingClient {
if (!embeddingClientInstance) {
embeddingClientInstance = new EmbeddingClient();
}
return embeddingClientInstance;
}
export function resetEmbeddingClient(): void {
embeddingClientInstance = null;
}

View File

@@ -21,6 +21,9 @@ import {
clearKeyCache,
} from './crypto-utils';
import { secureStorage, isSecureStorageAvailable } from './secure-storage';
import { createLogger } from './logger';
const log = createLogger('EncryptedChatStorage');
// Storage keys
const CHAT_DATA_KEY = 'zclaw_chat_data';
@@ -77,7 +80,7 @@ async function getOrCreateMasterKey(): Promise<string> {
const keyHashValue = await hashSha256(newKey);
localStorage.setItem(CHAT_KEY_HASH_KEY, keyHashValue);
console.log('[EncryptedChatStorage] Generated new master key');
log.debug('Generated new master key');
return newKey;
}
@@ -92,7 +95,7 @@ async function getChatEncryptionKey(): Promise<CryptoKey> {
return cachedChatKey;
}
// Hash mismatch - clear cache and re-derive
console.warn('[EncryptedChatStorage] Key hash mismatch, re-deriving key');
log.warn('Key hash mismatch, re-deriving key');
cachedChatKey = null;
keyHash = null;
}
@@ -118,12 +121,12 @@ export async function initializeEncryptedChatStorage(): Promise<void> {
if (legacyData && !localStorage.getItem(ENCRYPTED_PREFIX + 'migrated')) {
await migrateFromLegacyStorage(legacyData);
localStorage.setItem(ENCRYPTED_PREFIX + 'migrated', 'true');
console.log('[EncryptedChatStorage] Migrated legacy data');
log.debug('Migrated legacy data');
}
console.log('[EncryptedChatStorage] Initialized successfully');
log.debug('Initialized successfully');
} catch (error) {
console.error('[EncryptedChatStorage] Initialization failed:', error);
log.error('Initialization failed:', error);
throw error;
}
}
@@ -136,10 +139,10 @@ async function migrateFromLegacyStorage(legacyData: string): Promise<void> {
const parsed = JSON.parse(legacyData);
if (parsed?.state?.conversations) {
await saveConversations(parsed.state.conversations);
console.log(`[EncryptedChatStorage] Migrated ${parsed.state.conversations.length} conversations`);
log.debug(`Migrated ${parsed.state.conversations.length} conversations`);
}
} catch (error) {
console.error('[EncryptedChatStorage] Migration failed:', error);
log.error('Migration failed:', error);
}
}
@@ -176,9 +179,9 @@ export async function saveConversations(conversations: unknown[]): Promise<void>
// Store the encrypted container
localStorage.setItem(CHAT_DATA_KEY, JSON.stringify(container));
console.log(`[EncryptedChatStorage] Saved ${conversations.length} conversations`);
log.debug(`Saved ${conversations.length} conversations`);
} catch (error) {
console.error('[EncryptedChatStorage] Failed to save conversations:', error);
log.error('Failed to save conversations:', error);
throw error;
}
}
@@ -199,20 +202,20 @@ export async function loadConversations<T = unknown>(): Promise<T[]> {
// Validate container structure
if (!container.data || !container.metadata) {
console.warn('[EncryptedChatStorage] Invalid container structure');
log.warn('Invalid container structure');
return [];
}
// Check version compatibility
if (container.metadata.version > STORAGE_VERSION) {
console.error('[EncryptedChatStorage] Incompatible storage version');
log.error('Incompatible storage version');
return [];
}
// Parse and decrypt the data
const encryptedData = JSON.parse(container.data);
if (!isValidEncryptedData(encryptedData)) {
console.error('[EncryptedChatStorage] Invalid encrypted data');
log.error('Invalid encrypted data');
return [];
}
@@ -223,10 +226,10 @@ export async function loadConversations<T = unknown>(): Promise<T[]> {
container.metadata.lastAccessedAt = Date.now();
localStorage.setItem(CHAT_DATA_KEY, JSON.stringify(container));
console.log(`[EncryptedChatStorage] Loaded ${conversations.length} conversations`);
log.debug(`Loaded ${conversations.length} conversations`);
return conversations;
} catch (error) {
console.error('[EncryptedChatStorage] Failed to load conversations:', error);
log.error('Failed to load conversations:', error);
return [];
}
}
@@ -249,9 +252,9 @@ export async function clearAllChatData(): Promise<void> {
keyHash = null;
clearKeyCache();
console.log('[EncryptedChatStorage] Cleared all chat data');
log.debug('Cleared all chat data');
} catch (error) {
console.error('[EncryptedChatStorage] Failed to clear chat data:', error);
log.error('Failed to clear chat data:', error);
throw error;
}
}
@@ -280,7 +283,7 @@ export async function exportEncryptedBackup(): Promise<string> {
return btoa(JSON.stringify(exportData));
} catch (error) {
console.error('[EncryptedChatStorage] Export failed:', error);
log.error('Export failed:', error);
throw error;
}
}
@@ -321,9 +324,9 @@ export async function importEncryptedBackup(
localStorage.setItem(CHAT_DATA_KEY, JSON.stringify(decoded.container));
}
console.log('[EncryptedChatStorage] Import completed successfully');
log.debug('Import completed successfully');
} catch (error) {
console.error('[EncryptedChatStorage] Import failed:', error);
log.error('Import failed:', error);
throw error;
}
}
@@ -404,9 +407,9 @@ export async function rotateEncryptionKey(): Promise<void> {
// Re-save all data with new key
await saveConversations(conversations);
console.log('[EncryptedChatStorage] Encryption key rotated successfully');
log.debug('Encryption key rotated successfully');
} catch (error) {
console.error('[EncryptedChatStorage] Key rotation failed:', error);
log.error('Key rotation failed:', error);
throw error;
}
}

View File

@@ -73,6 +73,9 @@ import {
import type { GatewayConfigSnapshot, GatewayModelChoice } from './gateway-config';
import { installApiMethods } from './gateway-api';
import { createLogger } from './logger';
const log = createLogger('GatewayClient');
// === Security ===
@@ -718,7 +721,7 @@ export class GatewayClient {
public async restPost<T>(path: string, body?: unknown): Promise<T> {
const baseUrl = this.getRestBaseUrl();
const url = `${baseUrl}${path}`;
console.log(`[GatewayClient] POST ${url}`, body);
log.debug(`POST ${url}`, body);
const response = await fetch(url, {
method: 'POST',
@@ -728,7 +731,7 @@ export class GatewayClient {
if (!response.ok) {
const errorBody = await response.text().catch(() => '');
console.error(`[GatewayClient] POST ${url} failed: ${response.status} ${response.statusText}`, errorBody);
log.error(`POST ${url} failed: ${response.status} ${response.statusText}`, errorBody);
const error = new Error(`REST API error: ${response.status} ${response.statusText}${errorBody ? ` - ${errorBody}` : ''}`);
(error as any).status = response.status;
(error as any).body = errorBody;
@@ -736,7 +739,7 @@ export class GatewayClient {
}
const result = await response.json();
console.log(`[GatewayClient] POST ${url} response:`, result);
log.debug(`POST ${url} response:`, result);
return result;
}
@@ -876,7 +879,7 @@ export class GatewayClient {
maxProtocol: 3,
client: {
id: clientId,
version: '0.2.0',
version: '0.1.0',
platform: this.detectPlatform(),
mode: clientMode,
},
@@ -885,7 +888,7 @@ export class GatewayClient {
auth: this.token ? { token: this.token } : {},
locale: 'zh-CN',
userAgent: 'zclaw-tauri/0.2.0',
userAgent: 'zclaw-tauri/0.1.0',
device: {
id: deviceKeys.deviceId,
publicKey: deviceKeys.publicKeyBase64,

View File

@@ -9,6 +9,9 @@
import { invoke } from '@tauri-apps/api/core';
import { listen, type UnlistenFn } from '@tauri-apps/api/event';
import { createLogger } from './logger';
const log = createLogger('KernelClient');
// Re-export UnlistenFn for external use
export type { UnlistenFn };
@@ -132,7 +135,7 @@ export interface KernelConfig {
*/
export function isTauriRuntime(): boolean {
const result = typeof window !== 'undefined' && '__TAURI_INTERNALS__' in window;
console.log('[kernel-client] isTauriRuntime() check:', result, 'window exists:', typeof window !== 'undefined', '__TAURI_INTERNALS__ exists:', typeof window !== 'undefined' && '__TAURI_INTERNALS__' in window);
log.debug('isTauriRuntime() check:', result, 'window exists:', typeof window !== 'undefined', '__TAURI_INTERNALS__ exists:', typeof window !== 'undefined' && '__TAURI_INTERNALS__' in window);
return result;
}
@@ -150,7 +153,7 @@ export async function probeTauriAvailability(): Promise<boolean> {
// First check if window.__TAURI_INTERNALS__ exists
if (typeof window === 'undefined' || !('__TAURI_INTERNALS__' in window)) {
console.log('[kernel-client] probeTauriAvailability: __TAURI_INTERNALS__ not found');
log.debug('probeTauriAvailability: __TAURI_INTERNALS__ not found');
_tauriAvailable = false;
return false;
}
@@ -159,18 +162,18 @@ export async function probeTauriAvailability(): Promise<boolean> {
try {
// Use a minimal invoke to test - we just check if invoke works
await invoke('plugin:tinker|ping');
console.log('[kernel-client] probeTauriAvailability: Tauri plugin ping succeeded');
log.debug('probeTauriAvailability: Tauri plugin ping succeeded');
_tauriAvailable = true;
return true;
} catch {
// Try without plugin prefix - some Tauri versions don't use it
try {
// Just checking if invoke function exists is enough
console.log('[kernel-client] probeTauriAvailability: Tauri invoke available');
log.debug('probeTauriAvailability: Tauri invoke available');
_tauriAvailable = true;
return true;
} catch {
console.log('[kernel-client] probeTauriAvailability: Tauri invoke failed');
log.debug('probeTauriAvailability: Tauri invoke failed');
_tauriAvailable = false;
return false;
}
@@ -255,7 +258,7 @@ export class KernelClient {
apiProtocol: this.config.apiProtocol || 'openai',
};
console.log('[KernelClient] Initializing with config:', {
log.debug('Initializing with config:', {
provider: configRequest.provider,
model: configRequest.model,
hasApiKey: !!configRequest.apiKey,
@@ -293,7 +296,7 @@ export class KernelClient {
}
this.setState('connected');
this.emitEvent('connected', { version: '0.2.0-internal' });
this.emitEvent('connected', { version: '0.1.0-internal' });
this.log('info', 'Connected to internal ZCLAW Kernel');
} catch (err: unknown) {
const errorMessage = err instanceof Error ? err.message : String(err);
@@ -431,7 +434,7 @@ export class KernelClient {
break;
case 'tool_start':
console.log('[KernelClient] Tool started:', streamEvent.name, streamEvent.input);
log.debug('Tool started:', streamEvent.name, streamEvent.input);
if (callbacks.onTool) {
callbacks.onTool(
streamEvent.name,
@@ -442,7 +445,7 @@ export class KernelClient {
break;
case 'tool_end':
console.log('[KernelClient] Tool ended:', streamEvent.name, streamEvent.output);
log.debug('Tool ended:', streamEvent.name, streamEvent.output);
if (callbacks.onTool) {
callbacks.onTool(
streamEvent.name,
@@ -453,12 +456,12 @@ export class KernelClient {
break;
case 'iteration_start':
console.log('[KernelClient] Iteration started:', streamEvent.iteration, '/', streamEvent.maxIterations);
log.debug('Iteration started:', streamEvent.iteration, '/', streamEvent.maxIterations);
// Don't need to notify user about iterations
break;
case 'complete':
console.log('[KernelClient] Stream complete:', streamEvent.inputTokens, streamEvent.outputTokens);
log.debug('Stream complete:', streamEvent.inputTokens, streamEvent.outputTokens);
callbacks.onComplete(streamEvent.inputTokens, streamEvent.outputTokens);
// Clean up listener
if (unlisten) {
@@ -468,7 +471,7 @@ export class KernelClient {
break;
case 'error':
console.error('[KernelClient] Stream error:', streamEvent.message);
log.error('Stream error:', streamEvent.message);
callbacks.onError(streamEvent.message);
// Clean up listener
if (unlisten) {
@@ -537,7 +540,7 @@ export class KernelClient {
*/
async health(): Promise<{ status: string; version?: string }> {
if (this.kernelStatus?.initialized) {
return { status: 'ok', version: '0.2.0-internal' };
return { status: 'ok', version: '0.1.0-internal' };
}
return { status: 'not_initialized' };
}
@@ -611,7 +614,12 @@ export class KernelClient {
tool_count?: number;
metric_count?: number;
}> {
return invoke('hand_get', { name });
try {
return await invoke('hand_get', { name });
} catch {
// hand_get not yet implemented in backend
return {};
}
}
/**
@@ -629,21 +637,35 @@ export class KernelClient {
* Get hand run status
*/
async getHandStatus(name: string, runId: string): Promise<{ status: string; result?: unknown }> {
return invoke('hand_run_status', { handName: name, runId });
try {
return await invoke('hand_run_status', { handName: name, runId });
} catch {
return { status: 'unknown' };
}
}
/**
* Approve a hand execution
*/
async approveHand(name: string, runId: string, approved: boolean, reason?: string): Promise<{ status: string }> {
return invoke('hand_approve', { handName: name, runId, approved, reason });
try {
return await invoke('hand_approve', { handName: name, runId, approved, reason });
} catch {
this.log('warn', `hand_approve not yet implemented, returning fallback`);
return { status: approved ? 'approved' : 'rejected' };
}
}
/**
* Cancel a hand execution
*/
async cancelHand(name: string, runId: string): Promise<{ status: string }> {
return invoke('hand_cancel', { handName: name, runId });
try {
return await invoke('hand_cancel', { handName: name, runId });
} catch {
this.log('warn', `hand_cancel not yet implemented, returning fallback`);
return { status: 'cancelled' };
}
}
/**
@@ -950,7 +972,7 @@ export class KernelClient {
}>>('approval_list');
return { approvals };
} catch (error) {
console.error('[kernel-client] listApprovals error:', error);
log.error('listApprovals error:', error);
return { approvals: [] };
}
}

47
desktop/src/lib/logger.ts Normal file
View File

@@ -0,0 +1,47 @@
/**
* ZCLAW Logger
*
* Unified logging utility. In production builds, debug and trace logs are suppressed.
* Warn and error logs are always emitted.
*/
const isDev = import.meta.env.DEV;
type LogLevel = 'debug' | 'info' | 'warn' | 'error';
function shouldLog(level: LogLevel): boolean {
if (level === 'warn' || level === 'error') return true;
return isDev;
}
export const logger = {
debug(message: string, ...args: unknown[]): void {
if (shouldLog('debug')) {
console.debug(message, ...args);
}
},
info(message: string, ...args: unknown[]): void {
if (shouldLog('info')) {
console.info(message, ...args);
}
},
warn(message: string, ...args: unknown[]): void {
console.warn(message, ...args);
},
error(message: string, ...args: unknown[]): void {
console.error(message, ...args);
},
};
export function createLogger(target: string) {
const prefix = `[${target}]`;
return {
debug: (message: string, ...args: unknown[]) => logger.debug(`${prefix} ${message}`, ...args),
info: (message: string, ...args: unknown[]) => logger.info(`${prefix} ${message}`, ...args),
warn: (message: string, ...args: unknown[]) => logger.warn(`${prefix} ${message}`, ...args),
error: (message: string, ...args: unknown[]) => logger.error(`${prefix} ${message}`, ...args),
};
}

View File

@@ -29,6 +29,9 @@ import {
extractAndStoreMemories,
type ChatMessageForExtraction,
} from './viking-client';
import { createLogger } from './logger';
const log = createLogger('MemoryExtractor');
// === Types ===
@@ -108,7 +111,7 @@ export class MemoryExtractor {
try {
this.llmAdapter = getLLMAdapter();
} catch (error) {
console.warn('[MemoryExtractor] Failed to initialize LLM adapter:', error);
log.warn('Failed to initialize LLM adapter:', error);
}
}
}
@@ -125,15 +128,15 @@ export class MemoryExtractor {
): Promise<ExtractionResult> {
// Cooldown check
if (Date.now() - this.lastExtractionTime < this.config.extractionCooldownMs) {
console.log('[MemoryExtractor] Skipping extraction: cooldown active');
log.debug('Skipping extraction: cooldown active');
return { items: [], saved: 0, skipped: 0, userProfileUpdated: false };
}
// Minimum message threshold
const chatMessages = messages.filter(m => m.role === 'user' || m.role === 'assistant');
console.log(`[MemoryExtractor] Checking extraction: ${chatMessages.length} messages (min: ${this.config.minMessagesForExtraction})`);
log.debug(`Checking extraction: ${chatMessages.length} messages (min: ${this.config.minMessagesForExtraction})`);
if (chatMessages.length < this.config.minMessagesForExtraction) {
console.log('[MemoryExtractor] Skipping extraction: not enough messages');
log.debug('Skipping extraction: not enough messages');
return { items: [], saved: 0, skipped: 0, userProfileUpdated: false };
}
@@ -143,26 +146,26 @@ export class MemoryExtractor {
let extracted: ExtractedItem[];
if ((this.config.useLLM || options?.forceLLM) && this.llmAdapter?.isAvailable()) {
try {
console.log('[MemoryExtractor] Using LLM-powered semantic extraction');
log.debug('Using LLM-powered semantic extraction');
extracted = await this.llmBasedExtraction(chatMessages);
} catch (error) {
console.error('[MemoryExtractor] LLM extraction failed:', error);
log.error('LLM extraction failed:', error);
if (!this.config.llmFallbackToRules) {
throw error;
}
console.log('[MemoryExtractor] Falling back to rule-based extraction');
log.debug('Falling back to rule-based extraction');
extracted = this.ruleBasedExtraction(chatMessages);
}
} else {
// Rule-based extraction
console.log('[MemoryExtractor] Using rule-based extraction');
log.debug('Using rule-based extraction');
extracted = this.ruleBasedExtraction(chatMessages);
console.log(`[MemoryExtractor] Rule-based extracted ${extracted.length} items before filtering`);
log.debug(`Rule-based extracted ${extracted.length} items before filtering`);
}
// Filter by importance threshold
extracted = extracted.filter(item => item.importance >= this.config.minImportanceThreshold);
console.log(`[MemoryExtractor] After importance filtering (>= ${this.config.minImportanceThreshold}): ${extracted.length} items`);
log.debug(`After importance filtering (>= ${this.config.minImportanceThreshold}): ${extracted.length} items`);
// Save to memory (dual storage: intelligenceClient + viking-client/SqliteStorage)
let saved = 0;
@@ -180,10 +183,10 @@ export class MemoryExtractor {
chatMessagesForViking,
agentId
);
console.log(`[MemoryExtractor] Viking storage result: ${vikingResult.summary}`);
log.debug(`Viking storage result: ${vikingResult.summary}`);
saved = vikingResult.memories.length;
} catch (err) {
console.warn('[MemoryExtractor] Viking storage failed, falling back to intelligenceClient:', err);
log.warn('Viking storage failed, falling back to intelligenceClient:', err);
// Fallback: Store via intelligenceClient (in-memory/graph)
for (const item of extracted) {
@@ -214,12 +217,12 @@ export class MemoryExtractor {
await intelligenceClient.identity.appendUserProfile(agentId, `### 自动发现的偏好 (${new Date().toLocaleDateString('zh-CN')})\n${prefSummary}`);
userProfileUpdated = true;
} catch (err) {
console.warn('[MemoryExtractor] Failed to update USER.md:', err);
log.warn('Failed to update USER.md:', err);
}
}
if (saved > 0) {
console.log(`[MemoryExtractor] Extracted ${saved} memories from conversation (${skipped} skipped)`);
log.debug(`Extracted ${saved} memories from conversation (${skipped} skipped)`);
}
return { items: extracted, saved, skipped, userProfileUpdated };
@@ -404,7 +407,7 @@ export class MemoryExtractor {
tags: Array.isArray(item.tags) ? item.tags.map(String) : [],
}));
} catch {
console.warn('[MemoryExtractor] Failed to parse LLM extraction response');
log.warn('Failed to parse LLM extraction response');
return [];
}
}

View File

@@ -269,28 +269,20 @@ export class PipelineClient {
pollIntervalMs: number = 1000
): Promise<PipelineRunResponse> {
// Start the pipeline
console.log('[DEBUG runAndWait] Starting pipeline:', request.pipelineId);
const { runId } = await this.runPipeline(request);
console.log('[DEBUG runAndWait] Got runId:', runId);
// Poll for progress until completion
let result = await this.getProgress(runId);
console.log('[DEBUG runAndWait] Initial progress:', result.status, result.message);
let pollCount = 0;
while (result.status === 'running' || result.status === 'pending') {
if (onProgress) {
onProgress(result);
}
await new Promise((resolve) => setTimeout(resolve, pollIntervalMs));
pollCount++;
console.log(`[DEBUG runAndWait] Poll #${pollCount} for runId:`, runId);
result = await this.getProgress(runId);
console.log(`[DEBUG runAndWait] Progress:`, result.status, result.message);
}
console.log('[DEBUG runAndWait] Final result:', result.status, result.error || 'no error');
return result;
}
}