docs(guide): rewrite CLAUDE.md with ZCLAW-first perspective

Major changes:
- Shift from "OpenFang desktop client" to "independent AI Agent desktop app"
- Add decision principle: "Is this useful for ZCLAW? Does it affect ZCLAW?"
- Simplify project structure and tech stack sections
- Replace OpenClaw vs OpenFang comparison with unified backend approach
- Consolidate troubleshooting from scattered sections into organized FAQ
- Update Hands system documentation with 8 capabilities and status
- Stream
This commit is contained in:
iven
2026-03-20 19:30:09 +08:00
parent 3518fc8ece
commit 6f72442531
63 changed files with 8920 additions and 857 deletions

View File

@@ -104,6 +104,8 @@ interface QuickConfig {
personality?: string;
communicationStyle?: string;
notes?: string;
// 启用的 Provider 列表
enabledProviders?: string[];
}
interface WorkspaceInfo {
@@ -779,6 +781,8 @@ export const useGatewayStore = create<GatewayStore>((set, get) => {
get().loadWorkflows(),
get().loadTriggers(),
get().loadSecurityStatus(),
// Load available models
get().loadModels(),
]);
await get().loadChannels();
} catch (err: unknown) {
@@ -852,8 +856,64 @@ export const useGatewayStore = create<GatewayStore>((set, get) => {
loadUsageStats: async () => {
try {
const stats = await get().client.getUsageStats();
set({ usageStats: stats });
} catch { /* ignore */ }
// 如果 API 返回了有效数据,使用它
if (stats && (stats.totalMessages > 0 || stats.totalTokens > 0 || Object.keys(stats.byModel || {}).length > 0)) {
set({ usageStats: stats });
return;
}
} catch { /* ignore API error, fallback to local */ }
// Fallback: 从本地聊天存储计算统计数据
try {
const stored = localStorage.getItem('zclaw-chat-storage');
if (!stored) {
set({ usageStats: { totalSessions: 0, totalMessages: 0, totalTokens: 0, byModel: {} } });
return;
}
const parsed = JSON.parse(stored);
// 处理 persist 中间件格式
const state = parsed?.state || parsed;
const conversations = state?.conversations || [];
// 计算统计数据
const usageStats: UsageStats = {
totalSessions: conversations.length,
totalMessages: 0,
totalTokens: 0,
byModel: {},
};
for (const conv of conversations) {
const messages = conv.messages || [];
usageStats.totalMessages += messages.length;
// 估算 token 数量 (粗略估算: 中文约 1.5 字符/token, 英文约 4 字符/token)
for (const msg of messages) {
const content = msg.content || '';
// 简单估算: 每个字符约 0.3 token (混合中英文的平均值)
const estimatedTokens = Math.ceil(content.length * 0.3);
usageStats.totalTokens += estimatedTokens;
// 按模型分组 (使用 currentModel 或默认)
const model = state.currentModel || 'default';
if (!usageStats.byModel[model]) {
usageStats.byModel[model] = { messages: 0, inputTokens: 0, outputTokens: 0 };
}
usageStats.byModel[model].messages++;
if (msg.role === 'user') {
usageStats.byModel[model].inputTokens += estimatedTokens;
} else {
usageStats.byModel[model].outputTokens += estimatedTokens;
}
}
}
set({ usageStats });
} catch (error) {
console.error('[GatewayStore] Failed to calculate local usage stats:', error);
set({ usageStats: { totalSessions: 0, totalMessages: 0, totalTokens: 0, byModel: {} } });
}
},
loadPluginStatus: async () => {
@@ -1191,9 +1251,15 @@ export const useGatewayStore = create<GatewayStore>((set, get) => {
// === OpenFang Actions ===
loadHands: async () => {
const client = get().client;
if (!client) {
console.warn('[GatewayStore] No client available, skipping loadHands');
return;
}
set({ isLoading: true });
try {
const result = await get().client.listHands();
const result = await client.listHands();
// Map API response to Hand interface
const validStatuses = ['idle', 'running', 'needs_approval', 'error', 'unavailable', 'setup_needed'] as const;
const hands: Hand[] = (result?.hands || []).map(h => {
@@ -1213,8 +1279,10 @@ export const useGatewayStore = create<GatewayStore>((set, get) => {
};
});
set({ hands, isLoading: false });
} catch {
set({ isLoading: false });
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error);
console.warn('[GatewayStore] Failed to load hands:', errorMsg);
set({ hands: [], isLoading: false });
/* ignore if hands API not available */
}
},
@@ -1330,12 +1398,20 @@ export const useGatewayStore = create<GatewayStore>((set, get) => {
},
loadWorkflows: async () => {
const client = get().client;
if (!client) {
console.warn('[GatewayStore] No client available, skipping loadWorkflows');
return;
}
set({ isLoading: true });
try {
const result = await get().client.listWorkflows();
const result = await client.listWorkflows();
set({ workflows: result?.workflows || [], isLoading: false });
} catch {
set({ isLoading: false });
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error);
console.warn('[GatewayStore] Failed to load workflows:', errorMsg);
set({ workflows: [], isLoading: false });
/* ignore if workflows API not available */
}
},
@@ -1681,7 +1757,28 @@ export const useGatewayStore = create<GatewayStore>((set, get) => {
try {
set({ modelsLoading: true, modelsError: null });
const result = await get().client.listModels();
const models: GatewayModelChoice[] = result?.models || [];
const rawModels: GatewayModelChoice[] = result?.models || [];
// 获取用户启用的 provider 列表
const enabledProviders = get().quickConfig.enabledProviders as string[] | undefined;
// 去重:基于 id 去重,保留第一个出现的
const seen = new Set<string>();
const models: GatewayModelChoice[] = rawModels.filter(model => {
if (seen.has(model.id)) {
return false;
}
seen.add(model.id);
// 如果用户配置了 enabledProviders只显示启用的 provider 的模型
if (enabledProviders && enabledProviders.length > 0) {
// 从模型 ID 中提取 provider格式provider/model-id
const provider = model.id.split('/')[0];
return enabledProviders.includes(provider);
}
return true;
});
set({ models, modelsLoading: false });
} catch (err: unknown) {
const message = err instanceof Error ? err.message : 'Failed to load models';
@@ -1712,7 +1809,9 @@ export const useGatewayStore = create<GatewayStore>((set, get) => {
};
});
// Dev-only: Expose store to window for E2E testing
if (import.meta.env.DEV && typeof window !== 'undefined') {
(window as any).__ZCLAW_STORES__ = (window as any).__ZCLAW_STORES__ || {};
(window as any).__ZCLAW_STORES__.gateway = useGatewayStore;
}

View File

@@ -206,11 +206,17 @@ export const useHandStore = create<HandStore>((set, get) => ({
loadHands: async () => {
const client = get().client;
if (!client) return;
console.log('[HandStore] loadHands called, client:', !!client);
if (!client) {
console.warn('[HandStore] No client available, skipping loadHands');
return;
}
set({ isLoading: true });
try {
console.log('[HandStore] Calling client.listHands()...');
const result = await client.listHands();
console.log('[HandStore] listHands result:', result);
const validStatuses = ['idle', 'running', 'needs_approval', 'error', 'unavailable', 'setup_needed'] as const;
const hands: Hand[] = (result?.hands || []).map((h: Record<string, unknown>) => {
const status = validStatuses.includes(h.status as Hand['status'])
@@ -228,8 +234,10 @@ export const useHandStore = create<HandStore>((set, get) => ({
metricCount: (h.metric_count as number) || ((h.metrics as unknown[])?.length),
};
});
console.log('[HandStore] Mapped hands:', hands.length, 'items');
set({ hands, isLoading: false });
} catch {
} catch (err) {
console.error('[HandStore] loadHands error:', err);
set({ isLoading: false });
}
},

View File

@@ -139,11 +139,25 @@ export const useTeamStore = create<TeamStoreState>()(
set({ isLoading: true, error: null });
try {
// For now, load from localStorage until API is available
// Note: persist middleware stores data as { state: { teams: [...] }, version: ... }
const stored = localStorage.getItem('zclaw-teams');
const teams: Team[] = stored ? parseJsonOrDefault<Team[]>(stored, []) : [];
let teams: Team[] = [];
if (stored) {
const parsed = JSON.parse(stored);
// Handle persist middleware format
if (parsed?.state?.teams && Array.isArray(parsed.state.teams)) {
teams = parsed.state.teams;
} else if (Array.isArray(parsed)) {
// Direct array format (legacy)
teams = parsed;
}
}
set({ teams, isLoading: false });
} catch (error) {
set({ error: (error as Error).message, isLoading: false });
console.error('[TeamStore] Failed to load teams:', error);
set({ teams: [], isLoading: false });
}
},