Compare commits
10 Commits
eb956d0dce
...
3e5d64484e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3e5d64484e | ||
|
|
ee51d5abcd | ||
|
|
f79560a911 | ||
|
|
d0ae7d2770 | ||
|
|
8e6abc91e1 | ||
|
|
1d9283f335 | ||
|
|
49abd0fe89 | ||
|
|
c9b9c5231b | ||
|
|
9fb9c3204c | ||
|
|
3e57fadfc9 |
106
Dockerfile
Normal file
106
Dockerfile
Normal file
@@ -0,0 +1,106 @@
|
||||
# ============================================================
|
||||
# ZCLAW SaaS Backend - Multi-stage Docker Build
|
||||
# ============================================================
|
||||
# Build: docker build -t zclaw-saas .
|
||||
# Run: docker run --env-file saas-env.example zclaw-saas
|
||||
# ============================================================
|
||||
#
|
||||
# .dockerignore recommended contents:
|
||||
# target/
|
||||
# node_modules/
|
||||
# desktop/
|
||||
# admin/
|
||||
# admin-v2/
|
||||
# docs/
|
||||
# .git/
|
||||
# .claude/
|
||||
# *.md
|
||||
# *.pen
|
||||
# plans/
|
||||
# dist/
|
||||
# pencil-new.pen
|
||||
# ============================================================
|
||||
|
||||
# ---- Stage 1: Build ----
|
||||
FROM rust:1.85-bookworm AS builder
|
||||
|
||||
WORKDIR /usr/src/zclaw
|
||||
|
||||
# Cache dependency builds by copying manifests first
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
|
||||
# Create dummy lib.rs files so cargo can resolve the workspace
|
||||
RUN mkdir -p crates/zclaw-types/src && echo "" > crates/zclaw-types/src/lib.rs
|
||||
RUN mkdir -p crates/zclaw-memory/src && echo "" > crates/zclaw-memory/src/lib.rs
|
||||
RUN mkdir -p crates/zclaw-runtime/src && echo "" > crates/zclaw-runtime/src/lib.rs
|
||||
RUN mkdir -p crates/zclaw-kernel/src && echo "" > crates/zclaw-kernel/src/lib.rs
|
||||
RUN mkdir -p crates/zclaw-skills/src && echo "" > crates/zclaw-skills/src/lib.rs
|
||||
RUN mkdir -p crates/zclaw-hands/src && echo "" > crates/zclaw-hands/src/lib.rs
|
||||
RUN mkdir -p crates/zclaw-protocols/src && echo "" > crates/zclaw-protocols/src/lib.rs
|
||||
RUN mkdir -p crates/zclaw-pipeline/src && echo "" > crates/zclaw-pipeline/src/lib.rs
|
||||
RUN mkdir -p crates/zclaw-growth/src && echo "" > crates/zclaw-growth/src/lib.rs
|
||||
RUN mkdir -p crates/zclaw-saas/src && echo "fn main() {}" > crates/zclaw-saas/src/main.rs
|
||||
RUN mkdir -p desktop/src-tauri/src && echo "" > desktop/src-tauri/src/lib.rs
|
||||
|
||||
# Copy all crate Cargo.toml files for dependency resolution
|
||||
COPY crates/zclaw-types/Cargo.toml crates/zclaw-types/Cargo.toml
|
||||
COPY crates/zclaw-memory/Cargo.toml crates/zclaw-memory/Cargo.toml
|
||||
COPY crates/zclaw-runtime/Cargo.toml crates/zclaw-runtime/Cargo.toml
|
||||
COPY crates/zclaw-kernel/Cargo.toml crates/zclaw-kernel/Cargo.toml
|
||||
COPY crates/zclaw-skills/Cargo.toml crates/zclaw-skills/Cargo.toml
|
||||
COPY crates/zclaw-hands/Cargo.toml crates/zclaw-hands/Cargo.toml
|
||||
COPY crates/zclaw-protocols/Cargo.toml crates/zclaw-protocols/Cargo.toml
|
||||
COPY crates/zclaw-pipeline/Cargo.toml crates/zclaw-pipeline/Cargo.toml
|
||||
COPY crates/zclaw-growth/Cargo.toml crates/zclaw-growth/Cargo.toml
|
||||
COPY crates/zclaw-saas/Cargo.toml crates/zclaw-saas/Cargo.toml
|
||||
COPY desktop/src-tauri/Cargo.toml desktop/src-tauri/Cargo.toml
|
||||
|
||||
# Build dependencies only (cached layer)
|
||||
RUN cargo build --release -p zclaw-saas 2>/dev/null || true
|
||||
|
||||
# Now copy the actual source code
|
||||
COPY crates/ crates/
|
||||
COPY desktop/src-tauri/src/ desktop/src-tauri/src/
|
||||
|
||||
# Touch source files to invalidate cache after dependency layer
|
||||
RUN find crates/zclaw-saas/src -type f -exec touch {} +
|
||||
|
||||
# Build the final binary
|
||||
RUN cargo build --release -p zclaw-saas
|
||||
|
||||
# ---- Stage 2: Runtime ----
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
# Install runtime dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create non-root user for security
|
||||
RUN groupadd --gid 1000 zclaw && \
|
||||
useradd --uid 1000 --gid zclaw --shell /bin/bash --create-home zclaw
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy binary from builder
|
||||
COPY --from=builder /usr/src/zclaw/target/release/zclaw-saas ./zclaw-saas
|
||||
|
||||
# Copy default config (can be overridden by env vars)
|
||||
COPY saas-config.toml ./saas-config.toml
|
||||
|
||||
# Set ownership
|
||||
RUN chown -R zclaw:zclaw /app
|
||||
|
||||
# Switch to non-root user
|
||||
USER zclaw
|
||||
|
||||
# Expose SaaS backend port
|
||||
EXPOSE 8080
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=15s --retries=3 \
|
||||
CMD curl -f http://localhost:8080/health || exit 1
|
||||
|
||||
ENTRYPOINT ["./zclaw-saas"]
|
||||
@@ -7,10 +7,11 @@
|
||||
"dev": "vite",
|
||||
"build": "tsc -b && vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
"preview": "vite preview",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ant-design/charts": "^2.6.7",
|
||||
"@ant-design/icons": "^6.1.1",
|
||||
"@ant-design/pro-components": "^2.8.10",
|
||||
"@ant-design/pro-layout": "^7.22.7",
|
||||
@@ -24,6 +25,9 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.4",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/react": "^16.3.2",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@types/node": "^24.12.0",
|
||||
"@types/react": "^19.2.14",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
@@ -32,8 +36,11 @@
|
||||
"eslint-plugin-react-hooks": "^7.0.1",
|
||||
"eslint-plugin-react-refresh": "^0.5.2",
|
||||
"globals": "^17.4.0",
|
||||
"jsdom": "^29.0.1",
|
||||
"msw": "^2.12.14",
|
||||
"typescript": "~5.9.3",
|
||||
"typescript-eslint": "^8.57.0",
|
||||
"vite": "^8.0.1"
|
||||
"vite": "^8.0.1",
|
||||
"vitest": "^4.1.2"
|
||||
}
|
||||
}
|
||||
|
||||
2232
admin-v2/pnpm-lock.yaml
generated
2232
admin-v2/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -2,6 +2,7 @@
|
||||
// 账号管理
|
||||
// ============================================================
|
||||
|
||||
import { useState } from 'react'
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
|
||||
import { Button, message, Tag, Modal, Form, Input, Select, Popconfirm, Space } from 'antd'
|
||||
import { PlusOutlined } from '@ant-design/icons'
|
||||
@@ -68,35 +69,51 @@ export default function Accounts() {
|
||||
|
||||
const columns: ProColumns<AccountPublic>[] = [
|
||||
{ title: '用户名', dataIndex: 'username', width: 120 },
|
||||
{ title: '显示名', dataIndex: 'display_name', width: 120 },
|
||||
{ title: '显示名', dataIndex: 'display_name', width: 120, hideInSearch: true },
|
||||
{ title: '邮箱', dataIndex: 'email', width: 180 },
|
||||
{
|
||||
title: '角色',
|
||||
dataIndex: 'role',
|
||||
width: 120,
|
||||
hideInSearch: true,
|
||||
render: (_, record) => <Tag color={roleColors[record.role]}>{roleLabels[record.role] || record.role}</Tag>,
|
||||
},
|
||||
{
|
||||
title: '状态',
|
||||
dataIndex: 'status',
|
||||
width: 100,
|
||||
hideInSearch: true,
|
||||
render: (_, record) => <Tag color={statusColors[record.status]}>{statusLabels[record.status] || record.status}</Tag>,
|
||||
},
|
||||
{
|
||||
title: '2FA',
|
||||
dataIndex: 'totp_enabled',
|
||||
width: 80,
|
||||
hideInSearch: true,
|
||||
render: (_, record) => record.totp_enabled ? <Tag color="green">已启用</Tag> : <Tag>未启用</Tag>,
|
||||
},
|
||||
{
|
||||
title: 'LLM 路由',
|
||||
dataIndex: 'llm_routing',
|
||||
width: 120,
|
||||
hideInSearch: true,
|
||||
valueType: 'select',
|
||||
valueEnum: {
|
||||
relay: { text: 'SaaS 中转', status: 'Success' },
|
||||
local: { text: '本地直连', status: 'Default' },
|
||||
},
|
||||
},
|
||||
{
|
||||
title: '最后登录',
|
||||
dataIndex: 'last_login_at',
|
||||
width: 180,
|
||||
hideInSearch: true,
|
||||
render: (_, record) => record.last_login_at ? new Date(record.last_login_at).toLocaleString('zh-CN') : '-',
|
||||
},
|
||||
{
|
||||
title: '操作',
|
||||
width: 200,
|
||||
hideInSearch: true,
|
||||
render: (_, record) => (
|
||||
<Space>
|
||||
<Button size="small" onClick={() => { setEditingId(record.id); form.setFieldsValue(record); setModalOpen(true) }}>
|
||||
@@ -130,7 +147,7 @@ export default function Accounts() {
|
||||
dataSource={data?.items ?? []}
|
||||
loading={isLoading}
|
||||
rowKey="id"
|
||||
search={false}
|
||||
search={{}}
|
||||
toolBarRender={() => []}
|
||||
pagination={{
|
||||
total: data?.total ?? 0,
|
||||
@@ -161,10 +178,14 @@ export default function Accounts() {
|
||||
{ value: 'user', label: '用户' },
|
||||
]} />
|
||||
</Form.Item>
|
||||
<Form.Item name="llm_routing" label="LLM 路由模式">
|
||||
<Select options={[
|
||||
{ value: 'local', label: '本地直连' },
|
||||
{ value: 'relay', label: 'SaaS 中转 (Token 池)' },
|
||||
]} />
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Modal>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
import { useState } from 'react'
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
import { useState } from 'react'
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
|
||||
import { Button, message, Tag, Modal, Form, Input, Select, InputNumber, Space, Popconfirm, Descriptions } from 'antd'
|
||||
import { Button, message, Tag, Modal, Form, Input, Select, InputNumber, Space, Popconfirm, Descriptions, MinusCircleOutlined } from 'antd'
|
||||
import { PlusOutlined } from '@ant-design/icons'
|
||||
import type { ProColumns } from '@ant-design/pro-components'
|
||||
import { ProTable } from '@ant-design/pro-components'
|
||||
@@ -51,6 +51,7 @@ export default function AgentTemplates() {
|
||||
})
|
||||
|
||||
const columns: ProColumns<AgentTemplate>[] = [
|
||||
{ title: '图标', dataIndex: 'emoji', width: 60 },
|
||||
{ title: '名称', dataIndex: 'name', width: 160 },
|
||||
{ title: '分类', dataIndex: 'category', width: 100 },
|
||||
{ title: '模型', dataIndex: 'model', width: 140, render: (_, r) => r.model || '-' },
|
||||
@@ -152,6 +153,53 @@ export default function AgentTemplates() {
|
||||
{ value: 'private', label: '私有' },
|
||||
]} />
|
||||
</Form.Item>
|
||||
<Form.Item name="emoji" label="图标">
|
||||
<Input placeholder="如 🏥" />
|
||||
</Form.Item>
|
||||
<Form.Item name="personality" label="人格预设">
|
||||
<Select options={[
|
||||
{ value: 'professional', label: '专业' },
|
||||
{ value: 'friendly', label: '友好' },
|
||||
{ value: 'creative', label: '创意' },
|
||||
{ value: 'concise', label: '简洁' },
|
||||
]} allowClear placeholder="选择人格预设" />
|
||||
</Form.Item>
|
||||
<Form.Item name="soul_content" label="SOUL.md 人格配置">
|
||||
<TextArea rows={8} />
|
||||
</Form.Item>
|
||||
<Form.Item name="welcome_message" label="欢迎语">
|
||||
<TextArea rows={2} />
|
||||
</Form.Item>
|
||||
<Form.Item name="communication_style" label="沟通风格">
|
||||
<TextArea rows={2} />
|
||||
</Form.Item>
|
||||
<Form.Item name="source_id" label="模板标识">
|
||||
<Input placeholder="如 medical-assistant-v1" />
|
||||
</Form.Item>
|
||||
<Form.Item name="scenarios" label="使用场景">
|
||||
<Select mode="tags" placeholder="输入场景标签后按回车" />
|
||||
</Form.Item>
|
||||
<Form.List name="quick_commands">
|
||||
{(fields, { add, remove }) => (
|
||||
<>
|
||||
<div style={{ marginBottom: 8, fontWeight: 500 }}>快捷命令</div>
|
||||
{fields.map(({ key, name, ...restField }) => (
|
||||
<Space key={key} style={{ display: 'flex', marginBottom: 8 }} align="baseline">
|
||||
<Form.Item {...restField} name={[name, 'label']} rules={[{ required: true, message: '请输入标签' }]}>
|
||||
<Input placeholder="标签" style={{ width: 140 }} />
|
||||
</Form.Item>
|
||||
<Form.Item {...restField} name={[name, 'command']} rules={[{ required: true, message: '请输入命令' }]}>
|
||||
<Input placeholder="命令/提示词" style={{ width: 280 }} />
|
||||
</Form.Item>
|
||||
<MinusCircleOutlined onClick={() => remove(name)} />
|
||||
</Space>
|
||||
))}
|
||||
<Button type="dashed" onClick={() => add()} block icon={<PlusOutlined />}>
|
||||
添加快捷命令
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</Form.List>
|
||||
</Form>
|
||||
</Modal>
|
||||
|
||||
@@ -164,18 +212,38 @@ export default function AgentTemplates() {
|
||||
>
|
||||
{detailRecord && (
|
||||
<Descriptions column={2} bordered size="small">
|
||||
<Descriptions.Item label="图标">{detailRecord.emoji || '-'}</Descriptions.Item>
|
||||
<Descriptions.Item label="名称">{detailRecord.name}</Descriptions.Item>
|
||||
<Descriptions.Item label="分类">{detailRecord.category}</Descriptions.Item>
|
||||
<Descriptions.Item label="模型">{detailRecord.model || '-'}</Descriptions.Item>
|
||||
<Descriptions.Item label="来源">{sourceLabels[detailRecord.source]}</Descriptions.Item>
|
||||
<Descriptions.Item label="可见性">{visibilityLabels[detailRecord.visibility]}</Descriptions.Item>
|
||||
<Descriptions.Item label="状态">{statusLabels[detailRecord.status]}</Descriptions.Item>
|
||||
<Descriptions.Item label="版本">{detailRecord.version ?? detailRecord.current_version}</Descriptions.Item>
|
||||
<Descriptions.Item label="描述" span={2}>{detailRecord.description || '-'}</Descriptions.Item>
|
||||
<Descriptions.Item label="人格预设">{detailRecord.personality || '-'}</Descriptions.Item>
|
||||
<Descriptions.Item label="沟通风格">{detailRecord.communication_style || '-'}</Descriptions.Item>
|
||||
<Descriptions.Item label="模板标识" span={2}>{detailRecord.source_id || '-'}</Descriptions.Item>
|
||||
{detailRecord.welcome_message && (
|
||||
<Descriptions.Item label="欢迎语" span={2}>{detailRecord.welcome_message}</Descriptions.Item>
|
||||
)}
|
||||
{detailRecord.scenarios && detailRecord.scenarios.length > 0 && (
|
||||
<Descriptions.Item label="使用场景" span={2}>
|
||||
{detailRecord.scenarios.map((s) => <Tag key={s}>{s}</Tag>)}
|
||||
</Descriptions.Item>
|
||||
)}
|
||||
<Descriptions.Item label="系统提示词" span={2}>
|
||||
<div style={{ whiteSpace: 'pre-wrap', maxHeight: 200, overflow: 'auto' }}>
|
||||
{detailRecord.system_prompt || '-'}
|
||||
</div>
|
||||
</Descriptions.Item>
|
||||
{detailRecord.soul_content && (
|
||||
<Descriptions.Item label="SOUL.md 人格配置" span={2}>
|
||||
<div style={{ whiteSpace: 'pre-wrap', maxHeight: 200, overflow: 'auto' }}>
|
||||
{detailRecord.soul_content}
|
||||
</div>
|
||||
</Descriptions.Item>
|
||||
)}
|
||||
<Descriptions.Item label="工具" span={2}>
|
||||
{detailRecord.tools?.map((t) => <Tag key={t}>{t}</Tag>) || '-'}
|
||||
</Descriptions.Item>
|
||||
|
||||
@@ -66,34 +66,39 @@ export default function Models() {
|
||||
title: '服务商',
|
||||
dataIndex: 'provider_id',
|
||||
width: 140,
|
||||
hideInSearch: true,
|
||||
render: (_, r) => {
|
||||
const provider = providersData?.items?.find((p) => p.id === r.provider_id)
|
||||
return provider?.display_name || r.provider_id.substring(0, 8)
|
||||
},
|
||||
},
|
||||
{ title: '上下文窗口', dataIndex: 'context_window', width: 110, render: (_, r) => r.context_window?.toLocaleString() },
|
||||
{ title: '最大输出', dataIndex: 'max_output_tokens', width: 100, render: (_, r) => r.max_output_tokens?.toLocaleString() },
|
||||
{ title: '上下文窗口', dataIndex: 'context_window', width: 110, hideInSearch: true, render: (_, r) => r.context_window?.toLocaleString() },
|
||||
{ title: '最大输出', dataIndex: 'max_output_tokens', width: 100, hideInSearch: true, render: (_, r) => r.max_output_tokens?.toLocaleString() },
|
||||
{
|
||||
title: '流式',
|
||||
dataIndex: 'supports_streaming',
|
||||
width: 70,
|
||||
hideInSearch: true,
|
||||
render: (_, r) => r.supports_streaming ? <Tag color="green">是</Tag> : <Tag>否</Tag>,
|
||||
},
|
||||
{
|
||||
title: '视觉',
|
||||
dataIndex: 'supports_vision',
|
||||
width: 70,
|
||||
hideInSearch: true,
|
||||
render: (_, r) => r.supports_vision ? <Tag color="blue">是</Tag> : <Tag>否</Tag>,
|
||||
},
|
||||
{
|
||||
title: '状态',
|
||||
dataIndex: 'enabled',
|
||||
width: 70,
|
||||
hideInSearch: true,
|
||||
render: (_, r) => r.enabled ? <Tag color="green">启用</Tag> : <Tag>禁用</Tag>,
|
||||
},
|
||||
{
|
||||
title: '操作',
|
||||
width: 160,
|
||||
hideInSearch: true,
|
||||
render: (_, record) => (
|
||||
<Space>
|
||||
<Button size="small" onClick={() => { setEditingId(record.id); form.setFieldsValue(record); setModalOpen(true) }}>
|
||||
@@ -123,7 +128,7 @@ export default function Models() {
|
||||
dataSource={data?.items ?? []}
|
||||
loading={isLoading}
|
||||
rowKey="id"
|
||||
search={false}
|
||||
search={{}}
|
||||
toolBarRender={() => [
|
||||
<Button key="add" type="primary" icon={<PlusOutlined />} onClick={() => { setEditingId(null); form.resetFields(); setModalOpen(true) }}>
|
||||
新建模型
|
||||
|
||||
@@ -19,6 +19,8 @@ export default function Providers() {
|
||||
const [modalOpen, setModalOpen] = useState(false)
|
||||
const [editingId, setEditingId] = useState<string | null>(null)
|
||||
const [keyModalProviderId, setKeyModalProviderId] = useState<string | null>(null)
|
||||
const [addKeyOpen, setAddKeyOpen] = useState(false)
|
||||
const [addKeyForm] = Form.useForm()
|
||||
|
||||
const { data, isLoading } = useQuery({
|
||||
queryKey: ['providers'],
|
||||
@@ -63,20 +65,54 @@ export default function Providers() {
|
||||
onError: (err: Error) => message.error(err.message || '删除失败'),
|
||||
})
|
||||
|
||||
const addKeyMutation = useMutation({
|
||||
mutationFn: ({ providerId, data }: { providerId: string; data: { key_label: string; key_value: string; priority?: number; max_rpm?: number; max_tpm?: number } }) =>
|
||||
providerService.addKey(providerId, data),
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: ['provider-keys', keyModalProviderId] })
|
||||
message.success('密钥已添加')
|
||||
setAddKeyOpen(false)
|
||||
addKeyForm.resetFields()
|
||||
},
|
||||
onError: () => message.error('添加失败'),
|
||||
})
|
||||
|
||||
const toggleKeyMutation = useMutation({
|
||||
mutationFn: ({ providerId, keyId, active }: { providerId: string; keyId: string; active: boolean }) =>
|
||||
providerService.toggleKey(providerId, keyId, active),
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: ['provider-keys', keyModalProviderId] })
|
||||
message.success('状态已切换')
|
||||
},
|
||||
onError: () => message.error('切换失败'),
|
||||
})
|
||||
|
||||
const deleteKeyMutation = useMutation({
|
||||
mutationFn: ({ providerId, keyId }: { providerId: string; keyId: string }) =>
|
||||
providerService.deleteKey(providerId, keyId),
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: ['provider-keys', keyModalProviderId] })
|
||||
message.success('密钥已删除')
|
||||
},
|
||||
onError: () => message.error('删除失败'),
|
||||
})
|
||||
|
||||
const columns: ProColumns<Provider>[] = [
|
||||
{ title: '名称', dataIndex: 'display_name', width: 140 },
|
||||
{ title: '标识', dataIndex: 'name', width: 120, render: (_, r) => <Text code>{r.name}</Text> },
|
||||
{ title: '协议', dataIndex: 'api_protocol', width: 100 },
|
||||
{ title: 'RPM 限制', dataIndex: 'rate_limit_rpm', width: 100, render: (_, r) => r.rate_limit_rpm ?? '-' },
|
||||
{ title: '协议', dataIndex: 'api_protocol', width: 100, hideInSearch: true },
|
||||
{ title: 'RPM 限制', dataIndex: 'rate_limit_rpm', width: 100, hideInSearch: true, render: (_, r) => r.rate_limit_rpm ?? '-' },
|
||||
{
|
||||
title: '状态',
|
||||
dataIndex: 'enabled',
|
||||
width: 80,
|
||||
hideInSearch: true,
|
||||
render: (_, r) => r.enabled ? <Tag color="green">启用</Tag> : <Tag>禁用</Tag>,
|
||||
},
|
||||
{
|
||||
title: '操作',
|
||||
width: 260,
|
||||
hideInSearch: true,
|
||||
render: (_, record) => (
|
||||
<Space>
|
||||
<Button size="small" onClick={() => { setEditingId(record.id); form.setFieldsValue(record); setModalOpen(true) }}>
|
||||
@@ -104,6 +140,35 @@ export default function Providers() {
|
||||
width: 80,
|
||||
render: (_, r) => r.is_active ? <Tag color="green">活跃</Tag> : <Tag>冷却</Tag>,
|
||||
},
|
||||
{
|
||||
title: '操作',
|
||||
width: 160,
|
||||
render: (_, record) => (
|
||||
<Space>
|
||||
<Popconfirm
|
||||
title={record.is_active ? '确定禁用此密钥?' : '确定启用此密钥?'}
|
||||
onConfirm={() => toggleKeyMutation.mutate({
|
||||
providerId: keyModalProviderId!,
|
||||
keyId: record.id,
|
||||
active: !record.is_active,
|
||||
})}
|
||||
>
|
||||
<Button size="small" type={record.is_active ? 'default' : 'primary'}>
|
||||
{record.is_active ? '禁用' : '启用'}
|
||||
</Button>
|
||||
</Popconfirm>
|
||||
<Popconfirm
|
||||
title="确定删除此密钥?此操作不可恢复。"
|
||||
onConfirm={() => deleteKeyMutation.mutate({
|
||||
providerId: keyModalProviderId!,
|
||||
keyId: record.id,
|
||||
})}
|
||||
>
|
||||
<Button size="small" danger>删除</Button>
|
||||
</Popconfirm>
|
||||
</Space>
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
const handleSave = async () => {
|
||||
@@ -122,7 +187,7 @@ export default function Providers() {
|
||||
dataSource={data?.items ?? []}
|
||||
loading={isLoading}
|
||||
rowKey="id"
|
||||
search={false}
|
||||
search={{}}
|
||||
toolBarRender={() => [
|
||||
<Button key="add" type="primary" icon={<PlusOutlined />} onClick={() => { setEditingId(null); form.resetFields(); setModalOpen(true) }}>
|
||||
新建服务商
|
||||
@@ -169,7 +234,14 @@ export default function Providers() {
|
||||
title="Key Pool"
|
||||
open={!!keyModalProviderId}
|
||||
onCancel={() => setKeyModalProviderId(null)}
|
||||
footer={null}
|
||||
footer={(_, { OkBtn, CancelBtn }) => (
|
||||
<Space>
|
||||
<CancelBtn />
|
||||
<Button type="primary" onClick={() => { addKeyForm.resetFields(); setAddKeyOpen(true) }}>
|
||||
添加密钥
|
||||
</Button>
|
||||
</Space>
|
||||
)}
|
||||
width={700}
|
||||
>
|
||||
<ProTable<ProviderKey>
|
||||
@@ -183,6 +255,36 @@ export default function Providers() {
|
||||
size="small"
|
||||
/>
|
||||
</Modal>
|
||||
|
||||
<Modal
|
||||
title="添加密钥"
|
||||
open={addKeyOpen}
|
||||
onOk={() => {
|
||||
addKeyForm.validateFields().then((v) =>
|
||||
addKeyMutation.mutate({ providerId: keyModalProviderId!, data: v })
|
||||
)
|
||||
}}
|
||||
onCancel={() => setAddKeyOpen(false)}
|
||||
confirmLoading={addKeyMutation.isPending}
|
||||
>
|
||||
<Form form={addKeyForm} layout="vertical">
|
||||
<Form.Item name="key_label" label="标签" rules={[{ required: true }]}>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
<Form.Item name="key_value" label="API Key" rules={[{ required: true }]}>
|
||||
<Input.Password />
|
||||
</Form.Item>
|
||||
<Form.Item name="priority" label="优先级" initialValue={0}>
|
||||
<InputNumber min={0} style={{ width: '100%' }} />
|
||||
</Form.Item>
|
||||
<Form.Item name="max_rpm" label="最大 RPM (可选)">
|
||||
<InputNumber min={0} style={{ width: '100%' }} />
|
||||
</Form.Item>
|
||||
<Form.Item name="max_tpm" label="最大 TPM (可选)">
|
||||
<InputNumber min={0} style={{ width: '100%' }} />
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Modal>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,14 +1,61 @@
|
||||
// ============================================================
|
||||
// 路由守卫 — 未登录重定向到 /login
|
||||
// ZCLAW Admin V2 — Auth Guard with session restore
|
||||
// ============================================================
|
||||
//
|
||||
// Auth strategy:
|
||||
// 1. If Zustand has token (normal flow after login) → authenticated
|
||||
// 2. If no token but account in localStorage → call GET /auth/me
|
||||
// to validate HttpOnly cookie and restore session
|
||||
// 3. If cookie invalid → clean up and redirect to /login
|
||||
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { Navigate, useLocation } from 'react-router-dom'
|
||||
import { Spin } from 'antd'
|
||||
import { useAuthStore } from '@/stores/authStore'
|
||||
import { authService } from '@/services/auth'
|
||||
|
||||
export function AuthGuard({ children }: { children: React.ReactNode }) {
|
||||
const token = useAuthStore((s) => s.token)
|
||||
const account = useAuthStore((s) => s.account)
|
||||
const login = useAuthStore((s) => s.login)
|
||||
const logout = useAuthStore((s) => s.logout)
|
||||
const location = useLocation()
|
||||
|
||||
// Track restore attempt to avoid double-calling
|
||||
const restoreAttempted = useRef(false)
|
||||
const [restoring, setRestoring] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
if (restoreAttempted.current) return
|
||||
restoreAttempted.current = true
|
||||
|
||||
// If no in-memory token but account exists in localStorage,
|
||||
// try to validate the HttpOnly cookie via /auth/me
|
||||
if (!token && account) {
|
||||
setRestoring(true)
|
||||
authService.me()
|
||||
.then((meAccount) => {
|
||||
// Cookie is valid — restore session
|
||||
// Use sentinel token since real auth is via HttpOnly cookie
|
||||
login('cookie-session', '', meAccount)
|
||||
setRestoring(false)
|
||||
})
|
||||
.catch(() => {
|
||||
// Cookie expired or invalid — clean up stale data
|
||||
logout()
|
||||
setRestoring(false)
|
||||
})
|
||||
}
|
||||
}, []) // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
if (restoring) {
|
||||
return (
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', height: '100vh' }}>
|
||||
<Spin size="large" />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (!token) {
|
||||
return <Navigate to="/login" state={{ from: location }} replace />
|
||||
}
|
||||
|
||||
@@ -8,11 +8,18 @@ export const agentTemplateService = {
|
||||
get: (id: string, signal?: AbortSignal) =>
|
||||
request.get<AgentTemplate>(`/agent-templates/${id}`, withSignal({}, signal)).then((r) => r.data),
|
||||
|
||||
getFull: (id: string, signal?: AbortSignal) =>
|
||||
request.get<AgentTemplate>(`/agent-templates/${id}/full`, withSignal({}, signal)).then((r) => r.data),
|
||||
|
||||
create: (data: {
|
||||
name: string; description?: string; category?: string; source?: string
|
||||
model?: string; system_prompt?: string; tools?: string[]
|
||||
capabilities?: string[]; temperature?: number; max_tokens?: number
|
||||
visibility?: string
|
||||
visibility?: string; emoji?: string; personality?: string
|
||||
soul_content?: string; welcome_message?: string
|
||||
communication_style?: string; source_id?: string
|
||||
scenarios?: string[]
|
||||
quick_commands?: Array<{ label: string; command: string }>
|
||||
}, signal?: AbortSignal) =>
|
||||
request.post<AgentTemplate>('/agent-templates', data, withSignal({}, signal)).then((r) => r.data),
|
||||
|
||||
|
||||
@@ -45,10 +45,18 @@ request.interceptors.request.use((config: InternalAxiosRequestConfig) => {
|
||||
// ── 响应拦截器:401 自动刷新 ──────────────────────────────
|
||||
|
||||
let isRefreshing = false
|
||||
let pendingRequests: Array<(token: string) => void> = []
|
||||
let pendingRequests: Array<{
|
||||
resolve: (token: string) => void
|
||||
reject: (error: unknown) => void
|
||||
}> = []
|
||||
|
||||
function onTokenRefreshed(newToken: string) {
|
||||
pendingRequests.forEach((cb) => cb(newToken))
|
||||
pendingRequests.forEach(({ resolve }) => resolve(newToken))
|
||||
pendingRequests = []
|
||||
}
|
||||
|
||||
function onTokenRefreshFailed(error: unknown) {
|
||||
pendingRequests.forEach(({ reject }) => reject(error))
|
||||
pendingRequests = []
|
||||
}
|
||||
|
||||
@@ -67,10 +75,13 @@ request.interceptors.response.use(
|
||||
}
|
||||
|
||||
if (isRefreshing) {
|
||||
return new Promise((resolve) => {
|
||||
pendingRequests.push((newToken: string) => {
|
||||
originalRequest.headers.Authorization = `Bearer ${newToken}`
|
||||
resolve(request(originalRequest))
|
||||
return new Promise((resolve, reject) => {
|
||||
pendingRequests.push({
|
||||
resolve: (newToken: string) => {
|
||||
originalRequest.headers.Authorization = `Bearer ${newToken}`
|
||||
resolve(request(originalRequest))
|
||||
},
|
||||
reject,
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -93,10 +104,12 @@ request.interceptors.response.use(
|
||||
onTokenRefreshed(newToken)
|
||||
originalRequest.headers.Authorization = `Bearer ${newToken}`
|
||||
return request(originalRequest)
|
||||
} catch {
|
||||
} catch (refreshError) {
|
||||
// 关键修复:刷新失败时 reject 所有等待中的请求,避免它们永远 hang
|
||||
onTokenRefreshFailed(refreshError)
|
||||
store.logout()
|
||||
window.location.href = '/login'
|
||||
return Promise.reject(error)
|
||||
return Promise.reject(refreshError)
|
||||
} finally {
|
||||
isRefreshing = false
|
||||
}
|
||||
@@ -112,7 +125,14 @@ request.interceptors.response.use(
|
||||
return Promise.reject(new ApiRequestError(error.response.status, body))
|
||||
}
|
||||
|
||||
return Promise.reject(error)
|
||||
// 网络错误统一包装为 ApiRequestError
|
||||
return Promise.reject(
|
||||
new ApiRequestError(0, {
|
||||
error: 'network_error',
|
||||
message: error.message || '网络连接失败,请检查网络后重试',
|
||||
status: 0,
|
||||
})
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ export interface AccountPublic {
|
||||
totp_enabled: boolean
|
||||
last_login_at: string | null
|
||||
created_at: string
|
||||
llm_routing: 'relay' | 'local'
|
||||
}
|
||||
|
||||
/** 登录请求 */
|
||||
@@ -227,6 +228,25 @@ export interface AgentTemplate {
|
||||
current_version: number
|
||||
created_at: string
|
||||
updated_at: string
|
||||
soul_content?: string
|
||||
scenarios: string[]
|
||||
welcome_message?: string
|
||||
quick_commands: Array<{ label: string; command: string }>
|
||||
personality?: string
|
||||
communication_style?: string
|
||||
emoji?: string
|
||||
version: number
|
||||
source_id?: string
|
||||
}
|
||||
|
||||
/** Agent 模板可用列表(轻量) */
|
||||
export interface AgentTemplateAvailable {
|
||||
id: string
|
||||
name: string
|
||||
category: string
|
||||
emoji?: string
|
||||
description?: string
|
||||
source_id?: string
|
||||
}
|
||||
|
||||
/** Provider Key */
|
||||
@@ -237,7 +257,6 @@ export interface ProviderKey {
|
||||
priority: number
|
||||
max_rpm?: number
|
||||
max_tpm?: number
|
||||
quota_reset_interval?: string
|
||||
is_active: boolean
|
||||
last_429_at?: string
|
||||
cooldown_until?: string
|
||||
|
||||
179
admin-v2/tests/services/request.test.ts
Normal file
179
admin-v2/tests/services/request.test.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
// ============================================================
|
||||
// request.ts 拦截器测试
|
||||
// ============================================================
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'
|
||||
import { http, HttpResponse } from 'msw'
|
||||
import { setupServer } from 'msw/node'
|
||||
|
||||
// ── Hoisted: mock functions + store (accessible in vi.mock factory) ──
|
||||
const { mockSetToken, mockSetRefreshToken, mockLogout, _store } = vi.hoisted(() => {
|
||||
const mockSetToken = vi.fn()
|
||||
const mockSetRefreshToken = vi.fn()
|
||||
const mockLogout = vi.fn()
|
||||
const _store = {
|
||||
token: null as string | null,
|
||||
refreshToken: null as string | null,
|
||||
setToken: mockSetToken,
|
||||
setRefreshToken: mockSetRefreshToken,
|
||||
logout: mockLogout,
|
||||
}
|
||||
return { mockSetToken, mockSetRefreshToken, mockLogout, _store }
|
||||
})
|
||||
|
||||
vi.mock('@/stores/authStore', () => ({
|
||||
useAuthStore: {
|
||||
getState: () => _store,
|
||||
},
|
||||
}))
|
||||
|
||||
import request, { ApiRequestError } from '@/services/request'
|
||||
|
||||
function setStoreState(overrides: Partial<typeof _store>) {
|
||||
Object.assign(_store, overrides)
|
||||
}
|
||||
|
||||
// ── MSW server ──────────────────────────────────────────────
|
||||
const server = setupServer()
|
||||
|
||||
beforeEach(() => {
|
||||
server.listen({ onUnhandledRequest: 'bypass' })
|
||||
mockSetToken.mockClear()
|
||||
mockSetRefreshToken.mockClear()
|
||||
mockLogout.mockClear()
|
||||
_store.token = null
|
||||
_store.refreshToken = null
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
server.close()
|
||||
})
|
||||
|
||||
describe('request interceptor', () => {
|
||||
it('attaches Authorization header when token exists', async () => {
|
||||
let capturedAuth: string | null = null
|
||||
server.use(
|
||||
http.get('*/api/v1/test', ({ request }) => {
|
||||
capturedAuth = request.headers.get('Authorization')
|
||||
return HttpResponse.json({ ok: true })
|
||||
}),
|
||||
)
|
||||
|
||||
setStoreState({ token: 'test-jwt-token' })
|
||||
await request.get('/test')
|
||||
|
||||
expect(capturedAuth).toBe('Bearer test-jwt-token')
|
||||
})
|
||||
|
||||
it('does not attach Authorization header when no token', async () => {
|
||||
let capturedAuth: string | null = null
|
||||
server.use(
|
||||
http.get('*/api/v1/test', ({ request }) => {
|
||||
capturedAuth = request.headers.get('Authorization')
|
||||
return HttpResponse.json({ ok: true })
|
||||
}),
|
||||
)
|
||||
|
||||
setStoreState({ token: null })
|
||||
await request.get('/test')
|
||||
|
||||
expect(capturedAuth).toBeNull()
|
||||
})
|
||||
|
||||
it('wraps non-401 errors as ApiRequestError', async () => {
|
||||
server.use(
|
||||
http.get('*/api/v1/test', () => {
|
||||
return HttpResponse.json(
|
||||
{ error: 'not_found', message: 'Resource not found' },
|
||||
{ status: 404 },
|
||||
)
|
||||
}),
|
||||
)
|
||||
|
||||
try {
|
||||
await request.get('/test')
|
||||
expect.fail('Should have thrown')
|
||||
} catch (err) {
|
||||
expect(err).toBeInstanceOf(ApiRequestError)
|
||||
expect((err as ApiRequestError).status).toBe(404)
|
||||
expect((err as ApiRequestError).body.message).toBe('Resource not found')
|
||||
}
|
||||
})
|
||||
|
||||
it('wraps network errors as ApiRequestError with status 0', async () => {
|
||||
server.use(
|
||||
http.get('*/api/v1/test', () => {
|
||||
return HttpResponse.error()
|
||||
}),
|
||||
)
|
||||
|
||||
try {
|
||||
await request.get('/test')
|
||||
expect.fail('Should have thrown')
|
||||
} catch (err) {
|
||||
expect(err).toBeInstanceOf(ApiRequestError)
|
||||
expect((err as ApiRequestError).status).toBe(0)
|
||||
}
|
||||
})
|
||||
|
||||
it('handles 401 with refresh token success', async () => {
|
||||
let callCount = 0
|
||||
|
||||
server.use(
|
||||
http.get('*/api/v1/protected', () => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return HttpResponse.json({ error: 'unauthorized' }, { status: 401 })
|
||||
}
|
||||
return HttpResponse.json({ data: 'success' })
|
||||
}),
|
||||
http.post('*/api/v1/auth/refresh', () => {
|
||||
return HttpResponse.json({ token: 'new-jwt', refresh_token: 'new-refresh' })
|
||||
}),
|
||||
)
|
||||
|
||||
setStoreState({ token: 'old-jwt', refreshToken: 'old-refresh' })
|
||||
const res = await request.get('/protected')
|
||||
|
||||
expect(res.data).toEqual({ data: 'success' })
|
||||
expect(mockSetToken).toHaveBeenCalledWith('new-jwt')
|
||||
expect(mockSetRefreshToken).toHaveBeenCalledWith('new-refresh')
|
||||
})
|
||||
|
||||
it('handles 401 with no refresh token — calls logout immediately', async () => {
|
||||
server.use(
|
||||
http.get('*/api/v1/norefresh', () => {
|
||||
return HttpResponse.json({ error: 'unauthorized' }, { status: 401 })
|
||||
}),
|
||||
)
|
||||
|
||||
setStoreState({ token: 'old-jwt', refreshToken: null })
|
||||
|
||||
try {
|
||||
await request.get('/norefresh')
|
||||
expect.fail('Should have thrown')
|
||||
} catch {
|
||||
expect(mockLogout).toHaveBeenCalled()
|
||||
}
|
||||
})
|
||||
|
||||
it('handles 401 with refresh failure — calls logout', async () => {
|
||||
server.use(
|
||||
http.get('*/api/v1/refreshfail', () => {
|
||||
return HttpResponse.json({ error: 'unauthorized' }, { status: 401 })
|
||||
}),
|
||||
http.post('*/api/v1/auth/refresh', () => {
|
||||
return HttpResponse.json({ error: 'invalid' }, { status: 401 })
|
||||
}),
|
||||
)
|
||||
|
||||
setStoreState({ token: 'old-jwt', refreshToken: 'old-refresh' })
|
||||
|
||||
try {
|
||||
await request.get('/refreshfail')
|
||||
expect.fail('Should have thrown')
|
||||
} catch {
|
||||
expect(mockLogout).toHaveBeenCalled()
|
||||
}
|
||||
})
|
||||
})
|
||||
43
admin-v2/tests/setup.ts
Normal file
43
admin-v2/tests/setup.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
// ============================================================
|
||||
// Test setup: globals, jsdom polyfills, localStorage mock
|
||||
// ============================================================
|
||||
|
||||
import { beforeAll, beforeEach, vi } from 'vitest'
|
||||
import '@testing-library/jest-dom/vitest'
|
||||
|
||||
// ── localStorage mock (jsdom provides one but we ensure clean state) ──────
|
||||
|
||||
beforeEach(() => {
|
||||
localStorage.clear()
|
||||
})
|
||||
|
||||
// ── Ant Design / rc-util requires matchMedia ──────────────────────────────
|
||||
|
||||
beforeAll(() => {
|
||||
Object.defineProperty(window, 'matchMedia', {
|
||||
writable: true,
|
||||
value: vi.fn().mockImplementation((query: string) => ({
|
||||
matches: false,
|
||||
media: query,
|
||||
onchange: null,
|
||||
addListener: vi.fn(),
|
||||
removeListener: vi.fn(),
|
||||
addEventListener: vi.fn(),
|
||||
removeEventListener: vi.fn(),
|
||||
dispatchEvent: vi.fn(),
|
||||
})),
|
||||
})
|
||||
|
||||
// Ant Design's scrollTo polyfill
|
||||
window.scrollTo = vi.fn()
|
||||
|
||||
// React 19 + jsdom: ensure getComputedStyle returns something useful
|
||||
const originalGetComputedStyle = window.getComputedStyle
|
||||
window.getComputedStyle = (elt: Element, pseudoElt?: string | null) => {
|
||||
try {
|
||||
return originalGetComputedStyle(elt, pseudoElt)
|
||||
} catch {
|
||||
return {} as CSSStyleDeclaration
|
||||
}
|
||||
}
|
||||
})
|
||||
115
admin-v2/tests/stores/authStore.test.ts
Normal file
115
admin-v2/tests/stores/authStore.test.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
// ============================================================
|
||||
// authStore 测试
|
||||
// ============================================================
|
||||
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest'
|
||||
import { useAuthStore } from '@/stores/authStore'
|
||||
import type { AccountPublic } from '@/types'
|
||||
|
||||
// Mock fetch for logout
|
||||
const mockFetch = vi.fn().mockResolvedValue({ ok: true })
|
||||
vi.stubGlobal('fetch', mockFetch)
|
||||
|
||||
const mockAccount: AccountPublic = {
|
||||
id: 'test-id',
|
||||
username: 'testuser',
|
||||
display_name: 'Test User',
|
||||
email: 'test@example.com',
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
totp_enabled: false,
|
||||
llm_routing: 'relay',
|
||||
created_at: '2026-01-01T00:00:00Z',
|
||||
updated_at: '2026-01-01T00:00:00Z',
|
||||
}
|
||||
|
||||
const superAdminAccount: AccountPublic = {
|
||||
...mockAccount,
|
||||
id: 'super-id',
|
||||
username: 'superadmin',
|
||||
role: 'super_admin',
|
||||
}
|
||||
|
||||
describe('authStore', () => {
|
||||
beforeEach(() => {
|
||||
localStorage.clear()
|
||||
mockFetch.mockClear()
|
||||
// Reset store state
|
||||
useAuthStore.setState({
|
||||
token: null,
|
||||
refreshToken: null,
|
||||
account: null,
|
||||
permissions: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('login sets token, refreshToken, account and permissions', () => {
|
||||
const store = useAuthStore.getState()
|
||||
store.login('jwt-token', 'refresh-token', mockAccount)
|
||||
|
||||
const state = useAuthStore.getState()
|
||||
expect(state.token).toBe('jwt-token')
|
||||
expect(state.refreshToken).toBe('refresh-token')
|
||||
expect(state.account).toEqual(mockAccount)
|
||||
expect(state.permissions).toContain('provider:manage')
|
||||
})
|
||||
|
||||
it('super_admin gets admin:full + all permissions', () => {
|
||||
const store = useAuthStore.getState()
|
||||
store.login('jwt', 'refresh', superAdminAccount)
|
||||
|
||||
const state = useAuthStore.getState()
|
||||
expect(state.permissions).toContain('admin:full')
|
||||
expect(state.permissions).toContain('account:admin')
|
||||
expect(state.permissions).toContain('prompt:admin')
|
||||
})
|
||||
|
||||
it('user role gets only basic permissions', () => {
|
||||
const userAccount: AccountPublic = { ...mockAccount, role: 'user' }
|
||||
const store = useAuthStore.getState()
|
||||
store.login('jwt', 'refresh', userAccount)
|
||||
|
||||
const state = useAuthStore.getState()
|
||||
expect(state.permissions).toContain('model:read')
|
||||
expect(state.permissions).toContain('relay:use')
|
||||
expect(state.permissions).not.toContain('provider:manage')
|
||||
})
|
||||
|
||||
it('logout clears all state', () => {
|
||||
useAuthStore.getState().login('jwt', 'refresh', mockAccount)
|
||||
|
||||
useAuthStore.getState().logout()
|
||||
|
||||
const state = useAuthStore.getState()
|
||||
expect(state.token).toBeNull()
|
||||
expect(state.refreshToken).toBeNull()
|
||||
expect(state.account).toBeNull()
|
||||
expect(state.permissions).toEqual([])
|
||||
expect(localStorage.getItem('zclaw_admin_account')).toBeNull()
|
||||
})
|
||||
|
||||
it('hasPermission returns true for matching permission', () => {
|
||||
useAuthStore.getState().login('jwt', 'refresh', mockAccount)
|
||||
expect(useAuthStore.getState().hasPermission('provider:manage')).toBe(true)
|
||||
expect(useAuthStore.getState().hasPermission('config:write')).toBe(true)
|
||||
})
|
||||
|
||||
it('hasPermission returns false for non-matching permission', () => {
|
||||
useAuthStore.getState().login('jwt', 'refresh', mockAccount)
|
||||
expect(useAuthStore.getState().hasPermission('admin:full')).toBe(false)
|
||||
})
|
||||
|
||||
it('admin:full grants all permissions via wildcard', () => {
|
||||
useAuthStore.getState().login('jwt', 'refresh', superAdminAccount)
|
||||
expect(useAuthStore.getState().hasPermission('anything:here')).toBe(true)
|
||||
expect(useAuthStore.getState().hasPermission('made:up')).toBe(true)
|
||||
})
|
||||
|
||||
it('persists account to localStorage on login', () => {
|
||||
useAuthStore.getState().login('jwt', 'refresh', mockAccount)
|
||||
|
||||
const stored = localStorage.getItem('zclaw_admin_account')
|
||||
expect(stored).not.toBeNull()
|
||||
expect(JSON.parse(stored!).username).toBe('testuser')
|
||||
})
|
||||
})
|
||||
@@ -5,7 +5,7 @@
|
||||
"useDefineForClassFields": true,
|
||||
"lib": ["ES2023", "DOM", "DOM.Iterable"],
|
||||
"module": "ESNext",
|
||||
"types": ["vite/client"],
|
||||
"types": ["vite/client", "vitest/globals"],
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
|
||||
18
admin-v2/vitest.config.ts
Normal file
18
admin-v2/vitest.config.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { defineConfig } from 'vitest/config'
|
||||
import react from '@vitejs/plugin-react'
|
||||
import path from 'path'
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'jsdom',
|
||||
setupFiles: ['./tests/setup.ts'],
|
||||
include: ['tests/**/*.test.{ts,tsx}'],
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'@': path.resolve(__dirname, './src'),
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -0,0 +1,6 @@
|
||||
-- 20260331000001_accounts_llm_routing.sql
|
||||
-- 账号级 LLM 路由模式: relay=SaaS中转(Token池), local=本地直连
|
||||
ALTER TABLE accounts ADD COLUMN IF NOT EXISTS llm_routing TEXT NOT NULL DEFAULT 'local'
|
||||
CHECK (llm_routing IN ('relay', 'local'));
|
||||
|
||||
COMMENT ON COLUMN accounts.llm_routing IS 'LLM路由模式: relay=SaaS中转, local=本地直连';
|
||||
@@ -0,0 +1,16 @@
|
||||
-- 20260331000002_agent_templates_extensions.sql
|
||||
-- 行业 Agent 模板扩展: soul_content, scenarios, welcome_message, quick_commands 等
|
||||
|
||||
ALTER TABLE agent_templates ADD COLUMN IF NOT EXISTS soul_content TEXT;
|
||||
ALTER TABLE agent_templates ADD COLUMN IF NOT EXISTS scenarios TEXT NOT NULL DEFAULT '[]';
|
||||
ALTER TABLE agent_templates ADD COLUMN IF NOT EXISTS welcome_message TEXT;
|
||||
ALTER TABLE agent_templates ADD COLUMN IF NOT EXISTS quick_commands TEXT NOT NULL DEFAULT '[]';
|
||||
ALTER TABLE agent_templates ADD COLUMN IF NOT EXISTS personality TEXT;
|
||||
ALTER TABLE agent_templates ADD COLUMN IF NOT EXISTS communication_style TEXT;
|
||||
ALTER TABLE agent_templates ADD COLUMN IF NOT EXISTS emoji TEXT;
|
||||
ALTER TABLE agent_templates ADD COLUMN IF NOT EXISTS version INTEGER NOT NULL DEFAULT 1;
|
||||
ALTER TABLE agent_templates ADD COLUMN IF NOT EXISTS source_id TEXT;
|
||||
|
||||
-- source_id 唯一约束(仅非 NULL 值)
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_agent_templates_source_id
|
||||
ON agent_templates(source_id) WHERE source_id IS NOT NULL;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- 20260401000001_provider_keys_last_used.sql
|
||||
-- Key Pool LRU: 记录每个 key 最后使用时间
|
||||
ALTER TABLE provider_keys ADD COLUMN IF NOT EXISTS last_used_at TIMESTAMPTZ;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- 20260401000002_remove_quota_reset_interval.sql
|
||||
-- 移除未使用的 quota_reset_interval 字段 (RPM/TPM 限流已足够)
|
||||
ALTER TABLE provider_keys DROP COLUMN IF EXISTS quota_reset_interval;
|
||||
@@ -23,7 +23,7 @@ pub async fn list_accounts(
|
||||
"SELECT COUNT(*) FROM accounts WHERE role = $1 AND status = $2 AND (username LIKE $3 OR email LIKE $3 OR display_name LIKE $3)"
|
||||
).bind(role).bind(status).bind(&pattern).fetch_one(db).await?;
|
||||
let rows = sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at, llm_routing
|
||||
FROM accounts WHERE role = $1 AND status = $2 AND (username LIKE $3 OR email LIKE $3 OR display_name LIKE $3)
|
||||
ORDER BY created_at DESC LIMIT $4 OFFSET $5"
|
||||
).bind(role).bind(status).bind(&pattern).bind(page_size as i64).bind(offset as i64).fetch_all(db).await?;
|
||||
@@ -35,7 +35,7 @@ pub async fn list_accounts(
|
||||
"SELECT COUNT(*) FROM accounts WHERE role = $1 AND status = $2"
|
||||
).bind(role).bind(status).fetch_one(db).await?;
|
||||
let rows = sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at, llm_routing
|
||||
FROM accounts WHERE role = $1 AND status = $2
|
||||
ORDER BY created_at DESC LIMIT $3 OFFSET $4"
|
||||
).bind(role).bind(status).bind(page_size as i64).bind(offset as i64).fetch_all(db).await?;
|
||||
@@ -48,7 +48,7 @@ pub async fn list_accounts(
|
||||
"SELECT COUNT(*) FROM accounts WHERE role = $1 AND (username LIKE $2 OR email LIKE $2 OR display_name LIKE $2)"
|
||||
).bind(role).bind(&pattern).fetch_one(db).await?;
|
||||
let rows = sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at, llm_routing
|
||||
FROM accounts WHERE role = $1 AND (username LIKE $2 OR email LIKE $2 OR display_name LIKE $2)
|
||||
ORDER BY created_at DESC LIMIT $3 OFFSET $4"
|
||||
).bind(role).bind(&pattern).bind(page_size as i64).bind(offset as i64).fetch_all(db).await?;
|
||||
@@ -61,7 +61,7 @@ pub async fn list_accounts(
|
||||
"SELECT COUNT(*) FROM accounts WHERE status = $1 AND (username LIKE $2 OR email LIKE $2 OR display_name LIKE $2)"
|
||||
).bind(status).bind(&pattern).fetch_one(db).await?;
|
||||
let rows = sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at, llm_routing
|
||||
FROM accounts WHERE status = $1 AND (username LIKE $2 OR email LIKE $2 OR display_name LIKE $2)
|
||||
ORDER BY created_at DESC LIMIT $3 OFFSET $4"
|
||||
).bind(status).bind(&pattern).bind(page_size as i64).bind(offset as i64).fetch_all(db).await?;
|
||||
@@ -73,7 +73,7 @@ pub async fn list_accounts(
|
||||
"SELECT COUNT(*) FROM accounts WHERE role = $1"
|
||||
).bind(role).fetch_one(db).await?;
|
||||
let rows = sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at, llm_routing
|
||||
FROM accounts WHERE role = $1
|
||||
ORDER BY created_at DESC LIMIT $2 OFFSET $3"
|
||||
).bind(role).bind(page_size as i64).bind(offset as i64).fetch_all(db).await?;
|
||||
@@ -85,7 +85,7 @@ pub async fn list_accounts(
|
||||
"SELECT COUNT(*) FROM accounts WHERE status = $1"
|
||||
).bind(status).fetch_one(db).await?;
|
||||
let rows = sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at, llm_routing
|
||||
FROM accounts WHERE status = $1
|
||||
ORDER BY created_at DESC LIMIT $2 OFFSET $3"
|
||||
).bind(status).bind(page_size as i64).bind(offset as i64).fetch_all(db).await?;
|
||||
@@ -98,7 +98,7 @@ pub async fn list_accounts(
|
||||
"SELECT COUNT(*) FROM accounts WHERE (username LIKE $1 OR email LIKE $1 OR display_name LIKE $1)"
|
||||
).bind(&pattern).fetch_one(db).await?;
|
||||
let rows = sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at, llm_routing
|
||||
FROM accounts WHERE (username LIKE $1 OR email LIKE $1 OR display_name LIKE $1)
|
||||
ORDER BY created_at DESC LIMIT $2 OFFSET $3"
|
||||
).bind(&pattern).bind(page_size as i64).bind(offset as i64).fetch_all(db).await?;
|
||||
@@ -110,7 +110,7 @@ pub async fn list_accounts(
|
||||
"SELECT COUNT(*) FROM accounts"
|
||||
).fetch_one(db).await?;
|
||||
let rows = sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at, llm_routing
|
||||
FROM accounts ORDER BY created_at DESC LIMIT $1 OFFSET $2"
|
||||
).bind(page_size as i64).bind(offset as i64).fetch_all(db).await?;
|
||||
(total, rows)
|
||||
@@ -123,7 +123,7 @@ pub async fn list_accounts(
|
||||
serde_json::json!({
|
||||
"id": r.id, "username": r.username, "email": r.email, "display_name": r.display_name,
|
||||
"role": r.role, "status": r.status, "totp_enabled": r.totp_enabled,
|
||||
"last_login_at": r.last_login_at, "created_at": r.created_at,
|
||||
"last_login_at": r.last_login_at, "created_at": r.created_at, "llm_routing": r.llm_routing,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
@@ -134,7 +134,7 @@ pub async fn list_accounts(
|
||||
pub async fn get_account(db: &PgPool, account_id: &str) -> SaasResult<serde_json::Value> {
|
||||
let row: Option<AccountRow> =
|
||||
sqlx::query_as(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, last_login_at, created_at, llm_routing
|
||||
FROM accounts WHERE id = $1"
|
||||
)
|
||||
.bind(account_id)
|
||||
@@ -146,7 +146,7 @@ pub async fn get_account(db: &PgPool, account_id: &str) -> SaasResult<serde_json
|
||||
Ok(serde_json::json!({
|
||||
"id": r.id, "username": r.username, "email": r.email, "display_name": r.display_name,
|
||||
"role": r.role, "status": r.status, "totp_enabled": r.totp_enabled,
|
||||
"last_login_at": r.last_login_at, "created_at": r.created_at,
|
||||
"last_login_at": r.last_login_at, "created_at": r.created_at, "llm_routing": r.llm_routing,
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -165,13 +165,15 @@ pub async fn update_account(
|
||||
email = COALESCE($2, email),
|
||||
role = COALESCE($3, role),
|
||||
avatar_url = COALESCE($4, avatar_url),
|
||||
updated_at = $5
|
||||
WHERE id = $6"
|
||||
llm_routing = COALESCE($5, llm_routing),
|
||||
updated_at = $6
|
||||
WHERE id = $7"
|
||||
)
|
||||
.bind(req.display_name.as_deref())
|
||||
.bind(req.email.as_deref())
|
||||
.bind(req.role.as_deref())
|
||||
.bind(req.avatar_url.as_deref())
|
||||
.bind(req.llm_routing.as_deref())
|
||||
.bind(&now)
|
||||
.bind(account_id)
|
||||
.execute(db).await?;
|
||||
|
||||
@@ -11,6 +11,7 @@ pub struct UpdateAccountRequest {
|
||||
pub email: Option<String>,
|
||||
pub role: Option<String>,
|
||||
pub avatar_url: Option<String>,
|
||||
pub llm_routing: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
||||
@@ -34,6 +34,8 @@ pub async fn create_template(
|
||||
let visibility = req.visibility.as_deref().unwrap_or("public");
|
||||
let tools = req.tools.as_deref().unwrap_or(&[]);
|
||||
let capabilities = req.capabilities.as_deref().unwrap_or(&[]);
|
||||
let scenarios = req.scenarios.as_deref().unwrap_or(&[]);
|
||||
let quick_commands = req.quick_commands.as_deref().unwrap_or(&[]);
|
||||
|
||||
let result = service::create_template(
|
||||
&state.db, &req.name, req.description.as_deref(),
|
||||
@@ -41,6 +43,14 @@ pub async fn create_template(
|
||||
req.system_prompt.as_deref(),
|
||||
tools, capabilities,
|
||||
req.temperature, req.max_tokens, visibility,
|
||||
req.soul_content.as_deref(),
|
||||
Some(scenarios),
|
||||
req.welcome_message.as_deref(),
|
||||
Some(quick_commands),
|
||||
req.personality.as_deref(),
|
||||
req.communication_style.as_deref(),
|
||||
req.emoji.as_deref(),
|
||||
req.source_id.as_deref(),
|
||||
).await?;
|
||||
|
||||
log_operation(&state.db, &ctx.account_id, "agent_template.create", "agent_template", &result.id,
|
||||
@@ -59,6 +69,26 @@ pub async fn get_template(
|
||||
Ok(Json(service::get_template(&state.db, &id).await?))
|
||||
}
|
||||
|
||||
/// GET /api/v1/agent-templates/:id/full — 获取完整 Agent 模板(含扩展字段)
|
||||
pub async fn get_full_template(
|
||||
State(state): State<AppState>,
|
||||
Extension(ctx): Extension<AuthContext>,
|
||||
Path(id): Path<String>,
|
||||
) -> SaasResult<Json<AgentTemplateInfo>> {
|
||||
check_permission(&ctx, "model:read")?;
|
||||
// Reuses the same get_template service which already returns all fields
|
||||
Ok(Json(service::get_template(&state.db, &id).await?))
|
||||
}
|
||||
|
||||
/// GET /api/v1/agent-templates/available — 列出公开可用模板(轻量级)
|
||||
pub async fn list_available(
|
||||
State(state): State<AppState>,
|
||||
Extension(ctx): Extension<AuthContext>,
|
||||
) -> SaasResult<Json<Vec<AvailableAgentTemplateInfo>>> {
|
||||
check_permission(&ctx, "model:read")?;
|
||||
Ok(Json(service::list_available(&state.db).await?))
|
||||
}
|
||||
|
||||
/// POST /api/v1/agent-templates/:id — 更新 Agent 模板
|
||||
pub async fn update_template(
|
||||
State(state): State<AppState>,
|
||||
@@ -79,6 +109,13 @@ pub async fn update_template(
|
||||
req.max_tokens,
|
||||
req.visibility.as_deref(),
|
||||
req.status.as_deref(),
|
||||
req.soul_content.as_deref(),
|
||||
req.scenarios.as_deref(),
|
||||
req.welcome_message.as_deref(),
|
||||
req.quick_commands.as_deref(),
|
||||
req.personality.as_deref(),
|
||||
req.communication_style.as_deref(),
|
||||
req.emoji.as_deref(),
|
||||
).await?;
|
||||
|
||||
log_operation(&state.db, &ctx.account_id, "agent_template.update", "agent_template", &id,
|
||||
|
||||
@@ -11,7 +11,9 @@ use crate::state::AppState;
|
||||
pub fn routes() -> axum::Router<AppState> {
|
||||
axum::Router::new()
|
||||
.route("/api/v1/agent-templates", get(handlers::list_templates).post(handlers::create_template))
|
||||
.route("/api/v1/agent-templates/available", get(handlers::list_available))
|
||||
.route("/api/v1/agent-templates/:id", get(handlers::get_template))
|
||||
.route("/api/v1/agent-templates/:id", post(handlers::update_template))
|
||||
.route("/api/v1/agent-templates/:id", delete(handlers::archive_template))
|
||||
.route("/api/v1/agent-templates/:id/full", get(handlers::get_full_template))
|
||||
}
|
||||
|
||||
@@ -1,26 +1,50 @@
|
||||
//! Agent 配置模板业务逻辑
|
||||
|
||||
use sqlx::PgPool;
|
||||
use sqlx::{PgPool, Row};
|
||||
use crate::error::{SaasError, SaasResult};
|
||||
use super::types::*;
|
||||
|
||||
fn row_to_template(
|
||||
row: (String, String, Option<String>, String, String, Option<String>, Option<String>,
|
||||
String, String, Option<f64>, Option<i32>, String, String, i32, String, String),
|
||||
) -> AgentTemplateInfo {
|
||||
/// Shared SELECT column list.
|
||||
const SELECT_COLUMNS: &str = "\
|
||||
id, name, description, category, source, model, system_prompt, \
|
||||
tools, capabilities, temperature, max_tokens, visibility, status, \
|
||||
current_version, created_at, updated_at, \
|
||||
soul_content, scenarios, welcome_message, quick_commands, \
|
||||
personality, communication_style, emoji, version, source_id";
|
||||
|
||||
fn row_to_template(row: &sqlx::postgres::PgRow) -> AgentTemplateInfo {
|
||||
AgentTemplateInfo {
|
||||
id: row.0, name: row.1, description: row.2, category: row.3, source: row.4,
|
||||
model: row.5, system_prompt: row.6, tools: serde_json::from_str(&row.7).unwrap_or_default(),
|
||||
capabilities: serde_json::from_str(&row.8).unwrap_or_default(),
|
||||
temperature: row.9, max_tokens: row.10, visibility: row.11, status: row.12,
|
||||
current_version: row.13, created_at: row.14, updated_at: row.15,
|
||||
id: row.get("id"),
|
||||
name: row.get("name"),
|
||||
description: row.get("description"),
|
||||
category: row.get("category"),
|
||||
source: row.get("source"),
|
||||
model: row.get("model"),
|
||||
system_prompt: row.get("system_prompt"),
|
||||
tools: serde_json::from_str(&row.get::<String, _>("tools")).unwrap_or_default(),
|
||||
capabilities: serde_json::from_str(&row.get::<String, _>("capabilities")).unwrap_or_default(),
|
||||
temperature: row.get("temperature"),
|
||||
max_tokens: row.get("max_tokens"),
|
||||
visibility: row.get("visibility"),
|
||||
status: row.get("status"),
|
||||
current_version: row.get("current_version"),
|
||||
created_at: row.get("created_at"),
|
||||
updated_at: row.get("updated_at"),
|
||||
// Extended fields
|
||||
soul_content: row.get("soul_content"),
|
||||
scenarios: serde_json::from_str(&row.get::<String, _>("scenarios")).unwrap_or_default(),
|
||||
welcome_message: row.get("welcome_message"),
|
||||
quick_commands: serde_json::from_str(&row.get::<String, _>("quick_commands")).unwrap_or_default(),
|
||||
personality: row.get("personality"),
|
||||
communication_style: row.get("communication_style"),
|
||||
emoji: row.get("emoji"),
|
||||
version: row.get("version"),
|
||||
source_id: row.get("source_id"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Row type for agent_template queries (avoids multi-line turbofish parsing issues)
|
||||
type AgentTemplateRow = (String, String, Option<String>, String, String, Option<String>, Option<String>, String, String, Option<f64>, Option<i32>, String, String, i32, String, String);
|
||||
|
||||
/// 创建 Agent 模板
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn create_template(
|
||||
db: &PgPool,
|
||||
name: &str,
|
||||
@@ -34,20 +58,47 @@ pub async fn create_template(
|
||||
temperature: Option<f64>,
|
||||
max_tokens: Option<i32>,
|
||||
visibility: &str,
|
||||
// Extended fields
|
||||
soul_content: Option<&str>,
|
||||
scenarios: Option<&[String]>,
|
||||
welcome_message: Option<&str>,
|
||||
quick_commands: Option<&[serde_json::Value]>,
|
||||
personality: Option<&str>,
|
||||
communication_style: Option<&str>,
|
||||
emoji: Option<&str>,
|
||||
source_id: Option<&str>,
|
||||
) -> SaasResult<AgentTemplateInfo> {
|
||||
let id = uuid::Uuid::new_v4().to_string();
|
||||
let now = chrono::Utc::now().to_rfc3339();
|
||||
let tools_json = serde_json::to_string(tools).unwrap_or_else(|_| "[]".to_string());
|
||||
let caps_json = serde_json::to_string(capabilities).unwrap_or_else(|_| "[]".to_string());
|
||||
let scenarios_json = serde_json::to_string(&scenarios.unwrap_or(&[])).unwrap_or_else(|_| "[]".to_string());
|
||||
let quick_commands_json = serde_json::to_string(&quick_commands.unwrap_or(&[])).unwrap_or_else(|_| "[]".to_string());
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO agent_templates (id, name, description, category, source, model, system_prompt,
|
||||
tools, capabilities, temperature, max_tokens, visibility, status, current_version, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, 'active', 1, $13, $13)"
|
||||
&format!("INSERT INTO agent_templates ({}) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,'active',1,$13,$13,$14,$15,$16,$17,$18,$19,$20,1,$21)", SELECT_COLUMNS)
|
||||
)
|
||||
.bind(&id).bind(name).bind(description).bind(category).bind(source)
|
||||
.bind(model).bind(system_prompt).bind(&tools_json).bind(&caps_json)
|
||||
.bind(temperature).bind(max_tokens).bind(visibility).bind(&now)
|
||||
.bind(&id) // $1 id
|
||||
.bind(name) // $2 name
|
||||
.bind(description) // $3 description
|
||||
.bind(category) // $4 category
|
||||
.bind(source) // $5 source
|
||||
.bind(model) // $6 model
|
||||
.bind(system_prompt) // $7 system_prompt
|
||||
.bind(&tools_json) // $8 tools
|
||||
.bind(&caps_json) // $9 capabilities
|
||||
.bind(temperature) // $10 temperature
|
||||
.bind(max_tokens) // $11 max_tokens
|
||||
.bind(visibility) // $12 visibility
|
||||
.bind(&now) // $13 created_at / updated_at
|
||||
.bind(soul_content) // $14 soul_content
|
||||
.bind(&scenarios_json) // $15 scenarios
|
||||
.bind(welcome_message) // $16 welcome_message
|
||||
.bind(&quick_commands_json) // $17 quick_commands
|
||||
.bind(personality) // $18 personality
|
||||
.bind(communication_style) // $19 communication_style
|
||||
.bind(emoji) // $20 emoji
|
||||
.bind(source_id) // $21 source_id
|
||||
.execute(db).await.map_err(|e| {
|
||||
if e.to_string().contains("unique") {
|
||||
SaasError::AlreadyExists(format!("Agent 模板 '{}' 已存在", name))
|
||||
@@ -61,14 +112,12 @@ pub async fn create_template(
|
||||
|
||||
/// 获取单个模板
|
||||
pub async fn get_template(db: &PgPool, id: &str) -> SaasResult<AgentTemplateInfo> {
|
||||
let row: Option<AgentTemplateRow> = sqlx::query_as(
|
||||
"SELECT id, name, description, category, source, model, system_prompt,
|
||||
tools, capabilities, temperature, max_tokens, visibility, status,
|
||||
current_version, created_at, updated_at
|
||||
FROM agent_templates WHERE id = $1"
|
||||
let row = sqlx::query(
|
||||
&format!("SELECT {} FROM agent_templates WHERE id = $1", SELECT_COLUMNS)
|
||||
).bind(id).fetch_optional(db).await?;
|
||||
|
||||
row.map(row_to_template)
|
||||
row.as_ref()
|
||||
.map(row_to_template)
|
||||
.ok_or_else(|| SaasError::NotFound(format!("Agent 模板 {} 不存在", id)))
|
||||
}
|
||||
|
||||
@@ -83,20 +132,21 @@ pub async fn list_templates(
|
||||
let page_size = query.page_size.unwrap_or(20).min(100);
|
||||
let offset = ((page - 1) * page_size) as i64;
|
||||
|
||||
let count_sql = "SELECT COUNT(*) FROM agent_templates WHERE ($1 IS NULL OR category = $1) AND ($2 IS NULL OR source = $2) AND ($3 IS NULL OR visibility = $3) AND ($4 IS NULL OR status = $4)";
|
||||
let data_sql = "SELECT id, name, description, category, source, model, system_prompt,
|
||||
tools, capabilities, temperature, max_tokens, visibility, status,
|
||||
current_version, created_at, updated_at
|
||||
FROM agent_templates WHERE ($1 IS NULL OR category = $1) AND ($2 IS NULL OR source = $2) AND ($3 IS NULL OR visibility = $3) AND ($4 IS NULL OR status = $4) ORDER BY created_at DESC LIMIT $5 OFFSET $6";
|
||||
let where_clause = "WHERE ($1 IS NULL OR category = $1) AND ($2 IS NULL OR source = $2) AND ($3 IS NULL OR visibility = $3) AND ($4 IS NULL OR status = $4)";
|
||||
let count_sql = format!("SELECT COUNT(*) FROM agent_templates {}", where_clause);
|
||||
let data_sql = format!(
|
||||
"SELECT {} FROM agent_templates {} ORDER BY created_at DESC LIMIT $5 OFFSET $6",
|
||||
SELECT_COLUMNS, where_clause
|
||||
);
|
||||
|
||||
let total: i64 = sqlx::query_scalar(count_sql)
|
||||
let total: i64 = sqlx::query_scalar(&count_sql)
|
||||
.bind(&query.category)
|
||||
.bind(&query.source)
|
||||
.bind(&query.visibility)
|
||||
.bind(&query.status)
|
||||
.fetch_one(db).await?;
|
||||
|
||||
let rows: Vec<AgentTemplateRow> = sqlx::query_as(data_sql)
|
||||
let rows = sqlx::query(&data_sql)
|
||||
.bind(&query.category)
|
||||
.bind(&query.source)
|
||||
.bind(&query.visibility)
|
||||
@@ -104,14 +154,37 @@ pub async fn list_templates(
|
||||
.bind(page_size as i64)
|
||||
.bind(offset)
|
||||
.fetch_all(db).await?;
|
||||
let items = rows.into_iter().map(row_to_template).collect();
|
||||
let items = rows.iter().map(|r| row_to_template(r)).collect();
|
||||
|
||||
Ok(crate::common::PaginatedResponse { items, total, page, page_size })
|
||||
}
|
||||
|
||||
/// 列出可用模板 (status='active' AND visibility='public', 轻量级)
|
||||
pub async fn list_available(db: &PgPool) -> SaasResult<Vec<AvailableAgentTemplateInfo>> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, name, category, emoji, description, source_id \
|
||||
FROM agent_templates \
|
||||
WHERE status = 'active' AND visibility = 'public' \
|
||||
ORDER BY category, name"
|
||||
).fetch_all(db).await?;
|
||||
|
||||
Ok(rows.iter().map(|r| {
|
||||
AvailableAgentTemplateInfo {
|
||||
id: r.get("id"),
|
||||
name: r.get("name"),
|
||||
category: r.get("category"),
|
||||
emoji: r.get("emoji"),
|
||||
description: r.get("description"),
|
||||
source_id: r.get("source_id"),
|
||||
}
|
||||
}).collect())
|
||||
}
|
||||
|
||||
/// 更新模板
|
||||
/// COALESCE pattern: all updatable fields in a single static SQL.
|
||||
/// NULL parameters leave the column unchanged.
|
||||
/// source_id and version are immutable.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn update_template(
|
||||
db: &PgPool,
|
||||
id: &str,
|
||||
@@ -124,6 +197,14 @@ pub async fn update_template(
|
||||
max_tokens: Option<i32>,
|
||||
visibility: Option<&str>,
|
||||
status: Option<&str>,
|
||||
// Extended fields
|
||||
soul_content: Option<&str>,
|
||||
scenarios: Option<&[String]>,
|
||||
welcome_message: Option<&str>,
|
||||
quick_commands: Option<&[serde_json::Value]>,
|
||||
personality: Option<&str>,
|
||||
communication_style: Option<&str>,
|
||||
emoji: Option<&str>,
|
||||
) -> SaasResult<AgentTemplateInfo> {
|
||||
// Confirm existence
|
||||
get_template(db, id).await?;
|
||||
@@ -133,6 +214,8 @@ pub async fn update_template(
|
||||
// Serialize JSON fields upfront so we can bind Option<&str> consistently
|
||||
let tools_json = tools.map(|t| serde_json::to_string(t).unwrap_or_else(|_| "[]".to_string()));
|
||||
let caps_json = capabilities.map(|c| serde_json::to_string(c).unwrap_or_else(|_| "[]".to_string()));
|
||||
let scenarios_json = scenarios.map(|s| serde_json::to_string(s).unwrap_or_else(|_| "[]".to_string()));
|
||||
let quick_commands_json = quick_commands.map(|qc| serde_json::to_string(qc).unwrap_or_else(|_| "[]".to_string()));
|
||||
|
||||
sqlx::query(
|
||||
"UPDATE agent_templates SET
|
||||
@@ -145,20 +228,34 @@ pub async fn update_template(
|
||||
max_tokens = COALESCE($7, max_tokens),
|
||||
visibility = COALESCE($8, visibility),
|
||||
status = COALESCE($9, status),
|
||||
updated_at = $10
|
||||
WHERE id = $11"
|
||||
updated_at = $10,
|
||||
soul_content = COALESCE($11, soul_content),
|
||||
scenarios = COALESCE($12, scenarios),
|
||||
welcome_message = COALESCE($13, welcome_message),
|
||||
quick_commands = COALESCE($14, quick_commands),
|
||||
personality = COALESCE($15, personality),
|
||||
communication_style = COALESCE($16, communication_style),
|
||||
emoji = COALESCE($17, emoji)
|
||||
WHERE id = $18"
|
||||
)
|
||||
.bind(description)
|
||||
.bind(model)
|
||||
.bind(system_prompt)
|
||||
.bind(tools_json.as_deref())
|
||||
.bind(caps_json.as_deref())
|
||||
.bind(temperature)
|
||||
.bind(max_tokens)
|
||||
.bind(visibility)
|
||||
.bind(status)
|
||||
.bind(&now)
|
||||
.bind(id)
|
||||
.bind(description) // $1
|
||||
.bind(model) // $2
|
||||
.bind(system_prompt) // $3
|
||||
.bind(tools_json.as_deref()) // $4
|
||||
.bind(caps_json.as_deref()) // $5
|
||||
.bind(temperature) // $6
|
||||
.bind(max_tokens) // $7
|
||||
.bind(visibility) // $8
|
||||
.bind(status) // $9
|
||||
.bind(&now) // $10
|
||||
.bind(soul_content) // $11
|
||||
.bind(scenarios_json.as_deref()) // $12
|
||||
.bind(welcome_message) // $13
|
||||
.bind(quick_commands_json.as_deref()) // $14
|
||||
.bind(personality) // $15
|
||||
.bind(communication_style) // $16
|
||||
.bind(emoji) // $17
|
||||
.bind(id) // $18
|
||||
.execute(db).await?;
|
||||
|
||||
get_template(db, id).await
|
||||
@@ -166,5 +263,6 @@ pub async fn update_template(
|
||||
|
||||
/// 归档模板
|
||||
pub async fn archive_template(db: &PgPool, id: &str) -> SaasResult<AgentTemplateInfo> {
|
||||
update_template(db, id, None, None, None, None, None, None, None, None, Some("archived")).await
|
||||
update_template(db, id, None, None, None, None, None, None, None, None, Some("archived"),
|
||||
None, None, None, None, None, None, None).await
|
||||
}
|
||||
|
||||
@@ -22,6 +22,16 @@ pub struct AgentTemplateInfo {
|
||||
pub current_version: i32,
|
||||
pub created_at: String,
|
||||
pub updated_at: String,
|
||||
// Extended fields (migration 20260331000002)
|
||||
pub soul_content: Option<String>,
|
||||
pub scenarios: Vec<String>,
|
||||
pub welcome_message: Option<String>,
|
||||
pub quick_commands: Vec<serde_json::Value>,
|
||||
pub personality: Option<String>,
|
||||
pub communication_style: Option<String>,
|
||||
pub emoji: Option<String>,
|
||||
pub version: i32,
|
||||
pub source_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -37,6 +47,15 @@ pub struct CreateAgentTemplateRequest {
|
||||
pub temperature: Option<f64>,
|
||||
pub max_tokens: Option<i32>,
|
||||
pub visibility: Option<String>,
|
||||
// Extended fields
|
||||
pub soul_content: Option<String>,
|
||||
pub scenarios: Option<Vec<String>>,
|
||||
pub welcome_message: Option<String>,
|
||||
pub quick_commands: Option<Vec<serde_json::Value>>,
|
||||
pub personality: Option<String>,
|
||||
pub communication_style: Option<String>,
|
||||
pub emoji: Option<String>,
|
||||
pub source_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -50,6 +69,14 @@ pub struct UpdateAgentTemplateRequest {
|
||||
pub max_tokens: Option<i32>,
|
||||
pub visibility: Option<String>,
|
||||
pub status: Option<String>,
|
||||
// Extended fields (source_id and version are immutable)
|
||||
pub soul_content: Option<String>,
|
||||
pub scenarios: Option<Vec<String>>,
|
||||
pub welcome_message: Option<String>,
|
||||
pub quick_commands: Option<Vec<serde_json::Value>>,
|
||||
pub personality: Option<String>,
|
||||
pub communication_style: Option<String>,
|
||||
pub emoji: Option<String>,
|
||||
}
|
||||
|
||||
// --- List ---
|
||||
@@ -63,3 +90,16 @@ pub struct AgentTemplateListQuery {
|
||||
pub page: Option<u32>,
|
||||
pub page_size: Option<u32>,
|
||||
}
|
||||
|
||||
// --- Available (lightweight) ---
|
||||
|
||||
/// Lightweight template summary for the /available endpoint
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AvailableAgentTemplateInfo {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub category: String,
|
||||
pub emoji: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub source_id: Option<String>,
|
||||
}
|
||||
|
||||
@@ -111,8 +111,8 @@ pub async fn register(
|
||||
let now = chrono::Utc::now().to_rfc3339();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO accounts (id, username, email, password_hash, display_name, role, status, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, 'active', $7, $7)"
|
||||
"INSERT INTO accounts (id, username, email, password_hash, display_name, role, status, created_at, updated_at, llm_routing)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, 'active', $7, $7, 'local')"
|
||||
)
|
||||
.bind(&account_id)
|
||||
.bind(&req.username)
|
||||
@@ -159,6 +159,7 @@ pub async fn register(
|
||||
status: "active".into(),
|
||||
totp_enabled: false,
|
||||
created_at: now,
|
||||
llm_routing: "local".into(),
|
||||
},
|
||||
};
|
||||
let jar = set_auth_cookies(jar, &resp.token, &refresh_token);
|
||||
@@ -176,7 +177,7 @@ pub async fn login(
|
||||
let row: Option<AccountLoginRow> =
|
||||
sqlx::query_as(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled,
|
||||
password_hash, totp_secret, created_at
|
||||
password_hash, totp_secret, created_at, llm_routing
|
||||
FROM accounts WHERE username = $1 OR email = $1"
|
||||
)
|
||||
.bind(&req.username)
|
||||
@@ -245,6 +246,7 @@ pub async fn login(
|
||||
account: AccountPublic {
|
||||
id: r.id, username: r.username, email: r.email, display_name: r.display_name,
|
||||
role: r.role, status: r.status, totp_enabled: r.totp_enabled, created_at: r.created_at,
|
||||
llm_routing: r.llm_routing,
|
||||
},
|
||||
};
|
||||
let jar = set_auth_cookies(jar, &resp.token, &refresh_token);
|
||||
@@ -349,7 +351,7 @@ pub async fn me(
|
||||
) -> SaasResult<Json<AccountPublic>> {
|
||||
let row: Option<AccountAuthRow> =
|
||||
sqlx::query_as(
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, created_at
|
||||
"SELECT id, username, email, display_name, role, status, totp_enabled, created_at, llm_routing
|
||||
FROM accounts WHERE id = $1"
|
||||
)
|
||||
.bind(&ctx.account_id)
|
||||
@@ -361,6 +363,7 @@ pub async fn me(
|
||||
Ok(Json(AccountPublic {
|
||||
id: r.id, username: r.username, email: r.email, display_name: r.display_name,
|
||||
role: r.role, status: r.status, totp_enabled: r.totp_enabled, created_at: r.created_at,
|
||||
llm_routing: r.llm_routing,
|
||||
}))
|
||||
}
|
||||
|
||||
|
||||
@@ -45,6 +45,7 @@ pub struct AccountPublic {
|
||||
pub status: String,
|
||||
pub totp_enabled: bool,
|
||||
pub created_at: String,
|
||||
pub llm_routing: String,
|
||||
}
|
||||
|
||||
/// 认证上下文 (注入到 request extensions)
|
||||
|
||||
@@ -4,7 +4,7 @@ use sqlx::postgres::PgPoolOptions;
|
||||
use sqlx::PgPool;
|
||||
use crate::error::SaasResult;
|
||||
|
||||
const SCHEMA_VERSION: i32 = 7;
|
||||
const SCHEMA_VERSION: i32 = 11;
|
||||
|
||||
/// 初始化数据库
|
||||
pub async fn init_db(database_url: &str) -> SaasResult<PgPool> {
|
||||
@@ -548,20 +548,84 @@ async fn seed_demo_data(pool: &PgPool) -> SaasResult<()> {
|
||||
}
|
||||
|
||||
// ===== 6. Agent Templates =====
|
||||
let agent_templates = [
|
||||
("demo-agent-coder", "Code Assistant", "A helpful coding assistant that can write, review, and debug code", "coding", "demo-openai", "gpt-4o", "You are an expert coding assistant. Help users write clean, efficient code.", "[\"code_search\",\"code_edit\",\"terminal\"]", "[\"code_generation\",\"code_review\",\"debugging\"]", 0.3, 8192),
|
||||
("demo-agent-writer", "Content Writer", "Creative writing and content generation agent", "creative", "demo-anthropic", "claude-sonnet-4-20250514", "You are a skilled content writer. Create engaging, well-structured content.", "[\"web_search\",\"document_edit\"]", "[\"writing\",\"editing\",\"summarization\"]", 0.7, 4096),
|
||||
("demo-agent-analyst", "Data Analyst", "Data analysis and visualization specialist", "analytics", "demo-openai", "gpt-4o", "You are a data analysis expert. Help users analyze data and create visualizations.", "[\"code_execution\",\"data_access\"]", "[\"data_analysis\",\"visualization\",\"statistics\"]", 0.2, 8192),
|
||||
("demo-agent-researcher", "Research Agent", "Deep research and information synthesis agent", "research", "demo-google", "gemini-2.5-pro", "You are a research specialist. Conduct thorough research and synthesize findings.", "[\"web_search\",\"document_access\"]", "[\"research\",\"synthesis\",\"citation\"]", 0.4, 16384),
|
||||
("demo-agent-translator", "Translator", "Multi-language translation agent", "utility", "demo-deepseek", "deepseek-chat", "You are a professional translator. Translate text accurately while preserving tone and context.", "[]", "[\"translation\",\"localization\"]", 0.3, 4096),
|
||||
// Each tuple: (id, name, description, category, model, system_prompt, tools, capabilities, temperature, max_tokens,
|
||||
// soul_content, scenarios, welcome_message, quick_commands, personality, communication_style, emoji, source_id)
|
||||
let agent_templates: [(&str, &str, &str, &str, &str, &str, &str, &str, f64, i32,
|
||||
&str, &str, &str, &str, &str, &str, &str, &str); 6] = [
|
||||
("demo-agent-coder", "Code Assistant", "A helpful coding assistant that can write, review, and debug code",
|
||||
"coding", "gpt-4o",
|
||||
"You are an expert coding assistant. Help users write clean, efficient code.",
|
||||
"[\"code_search\",\"code_edit\",\"terminal\"]", "[\"code_generation\",\"code_review\",\"debugging\"]",
|
||||
0.3, 8192,
|
||||
"你是一位资深全栈工程师,擅长代码编写、评审和调试。你追求简洁高效的代码风格,注重可读性和可维护性。",
|
||||
"[\"代码编写\",\"代码审查\",\"Bug调试\",\"架构设计\"]",
|
||||
"你好!我是你的编程助手,有什么代码问题可以随时问我。",
|
||||
"[{\"label\":\"写一个函数\",\"command\":\"帮我写一个\"},{\"label\":\"审查代码\",\"command\":\"请审查这段代码\"},{\"label\":\"解释代码\",\"command\":\"解释一下这段代码\"}]",
|
||||
"professional", "concise", "💻", "code-assistant-v1"),
|
||||
("demo-agent-writer", "Content Writer", "Creative writing and content generation agent",
|
||||
"creative", "claude-sonnet-4-20250514",
|
||||
"You are a skilled content writer. Create engaging, well-structured content.",
|
||||
"[\"web_search\",\"document_edit\"]", "[\"writing\",\"editing\",\"summarization\"]",
|
||||
0.7, 4096,
|
||||
"你是一位创意写作专家,擅长各类文案创作、内容编辑和摘要生成。你善于把握文字的节奏和情感表达。",
|
||||
"[\"文章写作\",\"文案创作\",\"内容编辑\",\"摘要生成\"]",
|
||||
"你好!我是你的内容创作助手,需要写点什么?",
|
||||
"[{\"label\":\"写一篇文章\",\"command\":\"帮我写一篇关于\"},{\"label\":\"润色文案\",\"command\":\"帮我优化这段文字\"},{\"label\":\"生成摘要\",\"command\":\"请为以下内容生成摘要\"}]",
|
||||
"creative", "warm", "✍️", "content-writer-v1"),
|
||||
("demo-agent-analyst", "Data Analyst", "Data analysis and visualization specialist",
|
||||
"analytics", "gpt-4o",
|
||||
"You are a data analysis expert. Help users analyze data and create visualizations.",
|
||||
"[\"code_execution\",\"data_access\"]", "[\"data_analysis\",\"visualization\",\"statistics\"]",
|
||||
0.2, 8192,
|
||||
"你是一位数据分析专家,擅长统计分析、数据可视化和洞察提取。你善于从数据中发现有价值的模式和趋势。",
|
||||
"[\"数据分析\",\"可视化报表\",\"统计建模\",\"趋势预测\"]",
|
||||
"你好!我是你的数据分析助手,请分享你的数据或问题。",
|
||||
"[{\"label\":\"分析数据\",\"command\":\"帮我分析这组数据\"},{\"label\":\"生成图表\",\"command\":\"为以下数据生成图表\"},{\"label\":\"统计摘要\",\"command\":\"请给出统计摘要\"}]",
|
||||
"analytical", "structured", "📊", "data-analyst-v1"),
|
||||
("demo-agent-researcher", "Research Agent", "Deep research and information synthesis agent",
|
||||
"research", "gemini-2.5-pro",
|
||||
"You are a research specialist. Conduct thorough research and synthesize findings.",
|
||||
"[\"web_search\",\"document_access\"]", "[\"research\",\"synthesis\",\"citation\"]",
|
||||
0.4, 16384,
|
||||
"你是一位深度研究专家,擅长信息检索、文献综述和知识综合。你注重信息来源的可靠性和引用的准确性。",
|
||||
"[\"深度研究\",\"文献综述\",\"信息检索\",\"知识综合\"]",
|
||||
"你好!我是你的研究助手,需要我帮你调查什么话题?",
|
||||
"[{\"label\":\"深度研究\",\"command\":\"请深入研究\"},{\"label\":\"文献综述\",\"command\":\"帮我写一份文献综述\"},{\"label\":\"对比分析\",\"command\":\"请对比分析\"}]",
|
||||
"scholarly", "detailed", "🔬", "research-agent-v1"),
|
||||
("demo-agent-translator", "Translator", "Multi-language translation agent",
|
||||
"utility", "deepseek-chat",
|
||||
"You are a professional translator. Translate text accurately while preserving tone and context.",
|
||||
"[]", "[\"translation\",\"localization\"]",
|
||||
0.3, 4096,
|
||||
"你是一位专业翻译,精通中英日韩等多种语言。你注重准确传达原文含义,同时保持目标语言的自然流畅。",
|
||||
"[\"文本翻译\",\"文档本地化\",\"术语管理\",\"双语校对\"]",
|
||||
"你好!我是你的翻译助手,请发送需要翻译的文本。",
|
||||
"[{\"label\":\"中译英\",\"command\":\"请将以下中文翻译为英文\"},{\"label\":\"英译中\",\"command\":\"请将以下英文翻译为中文\"},{\"label\":\"润色译文\",\"command\":\"请润色这段翻译\"}]",
|
||||
"professional", "precise", "🌐", "translator-v1"),
|
||||
("demo-agent-medical", "医疗助手", "Clinical decision support and medical literature assistant",
|
||||
"healthcare", "gpt-4o",
|
||||
"You are a medical AI assistant. Help with clinical decision support, literature retrieval, and medication reference. Always remind users that your suggestions do not replace professional medical advice.",
|
||||
"[\"web_search\",\"document_access\"]", "[\"clinical_support\",\"literature_search\",\"diagnosis_assist\",\"medication_ref\"]",
|
||||
0.2, 16384,
|
||||
"你是一位医疗AI助手,具备丰富的临床知识。你辅助临床决策、文献检索和用药参考。\n\n重要提示:\n- 你的建议仅供医疗专业人员参考\n- 不能替代正式的医疗诊断\n- 涉及患者安全的问题需格外谨慎\n- 始终建议用户咨询专业医生",
|
||||
"[\"临床辅助\",\"文献检索\",\"诊断建议\",\"用药参考\"]",
|
||||
"你好!我是你的医疗AI助手。我可以帮助你进行临床决策支持、医学文献检索和用药参考。请注意,我的建议仅供参考,不能替代专业医疗意见。",
|
||||
"[{\"label\":\"药物查询\",\"command\":\"查询药物\"},{\"label\":\"文献检索\",\"command\":\"检索相关文献\"},{\"label\":\"临床辅助\",\"command\":\"辅助临床分析\"},{\"label\":\"诊断建议\",\"command\":\"请给出诊断建议\"}]",
|
||||
"professional", "cautious", "🏥", "medical-assistant-v1"),
|
||||
];
|
||||
for (id, name, desc, cat, _pid, model, prompt, tools, caps, temp, max_tok) in &agent_templates {
|
||||
for (id, name, desc, cat, model, prompt, tools, caps, temp, max_tok,
|
||||
soul, scenarios, welcome, quick_cmds, personality, comm_style, emoji, source_id) in &agent_templates {
|
||||
let ts = now.to_rfc3339();
|
||||
sqlx::query(
|
||||
"INSERT INTO agent_templates (id, name, description, category, source, model, system_prompt, tools, capabilities, temperature, max_tokens, visibility, status, current_version, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, 'custom', $5, $6, $7, $8, $9, $10, 'public', 'active', 1, $11, $11) ON CONFLICT (id) DO NOTHING"
|
||||
"INSERT INTO agent_templates (id, name, description, category, source, model, system_prompt, tools, capabilities,
|
||||
temperature, max_tokens, visibility, status, current_version, created_at, updated_at,
|
||||
soul_content, scenarios, welcome_message, quick_commands, personality, communication_style, emoji, version, source_id)
|
||||
VALUES ($1,$2,$3,$4,'custom',$5,$6,$7,$8,$9,$10,'public','active',1,$11,$11,$12,$13,$14,$15,$16,$17,$18,1,$19)
|
||||
ON CONFLICT (id) DO NOTHING"
|
||||
).bind(id).bind(name).bind(desc).bind(cat).bind(model).bind(prompt).bind(tools).bind(caps)
|
||||
.bind(*temp).bind(*max_tok).bind(&ts)
|
||||
.bind(soul).bind(scenarios).bind(welcome).bind(quick_cmds)
|
||||
.bind(personality).bind(comm_style).bind(emoji).bind(source_id)
|
||||
.execute(pool).await?;
|
||||
}
|
||||
|
||||
@@ -687,7 +751,7 @@ async fn seed_demo_data(pool: &PgPool) -> SaasResult<()> {
|
||||
}
|
||||
}
|
||||
|
||||
tracing::info!("Demo data seeded: 5 providers, 12 models, 5 keys, ~1500 usage records, 20 relay tasks, 5 agent templates, 12 configs, 3 API tokens, 50 logs, 112 telemetry reports");
|
||||
tracing::info!("Demo data seeded: 5 providers, 12 models, 5 keys, ~1500 usage records, 20 relay tasks, 6 agent templates, 12 configs, 3 API tokens, 50 logs, 112 telemetry reports");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -129,18 +129,13 @@ pub async fn public_rate_limit_middleware(
|
||||
"public_rate_limit", "请求频率超限,请稍后再试")
|
||||
};
|
||||
|
||||
// 从连接信息或 header 提取客户端 IP
|
||||
// 从连接信息提取客户端 IP
|
||||
// 安全策略: 仅使用 TCP 连接层 IP,不信任 X-Forwarded-For / X-Real-IP 头
|
||||
// 反向代理场景下应使用 ConnectInfo<SocketAddr> 或在代理层做限流
|
||||
let client_ip = req.extensions()
|
||||
.get::<axum::extract::ConnectInfo<std::net::SocketAddr>>()
|
||||
.map(|ci| ci.0.ip().to_string())
|
||||
.unwrap_or_else(|| {
|
||||
req.headers()
|
||||
.get("x-real-ip")
|
||||
.or_else(|| req.headers().get("x-forwarded-for"))
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(|s| s.split(',').next().unwrap_or("unknown").trim().to_string())
|
||||
.unwrap_or_else(|| "unknown".to_string())
|
||||
});
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
|
||||
let key = format!("{}:{}", key_prefix, client_ip);
|
||||
let now = Instant::now();
|
||||
|
||||
@@ -14,6 +14,7 @@ pub struct AccountRow {
|
||||
pub totp_enabled: bool,
|
||||
pub last_login_at: Option<String>,
|
||||
pub created_at: String,
|
||||
pub llm_routing: String,
|
||||
}
|
||||
|
||||
/// accounts 表行 (不含 last_login_at,用于 auth/me 等场景)
|
||||
@@ -27,6 +28,7 @@ pub struct AccountAuthRow {
|
||||
pub status: String,
|
||||
pub totp_enabled: bool,
|
||||
pub created_at: String,
|
||||
pub llm_routing: String,
|
||||
}
|
||||
|
||||
/// Login 一次性查询行(合并用户信息 + password_hash + totp_secret)
|
||||
@@ -42,6 +44,7 @@ pub struct AccountLoginRow {
|
||||
pub password_hash: String,
|
||||
pub totp_secret: Option<String>,
|
||||
pub created_at: String,
|
||||
pub llm_routing: String,
|
||||
}
|
||||
|
||||
/// operation_logs 表行
|
||||
|
||||
@@ -10,7 +10,6 @@ pub struct ProviderKeySelectRow {
|
||||
pub priority: i32,
|
||||
pub max_rpm: Option<i64>,
|
||||
pub max_tpm: Option<i64>,
|
||||
pub quota_reset_interval: Option<String>,
|
||||
}
|
||||
|
||||
/// provider_keys 完整行 (用于列表查询)
|
||||
@@ -22,7 +21,6 @@ pub struct ProviderKeyRow {
|
||||
pub priority: i32,
|
||||
pub max_rpm: Option<i64>,
|
||||
pub max_tpm: Option<i64>,
|
||||
pub quota_reset_interval: Option<String>,
|
||||
pub is_active: bool,
|
||||
pub last_429_at: Option<String>,
|
||||
pub cooldown_until: Option<String>,
|
||||
|
||||
@@ -375,7 +375,6 @@ pub struct AddKeyRequest {
|
||||
pub priority: i32,
|
||||
pub max_rpm: Option<i64>,
|
||||
pub max_tpm: Option<i64>,
|
||||
pub quota_reset_interval: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn add_provider_key(
|
||||
@@ -406,7 +405,6 @@ pub async fn add_provider_key(
|
||||
let key_id = super::key_pool::add_provider_key(
|
||||
&state.db, &provider_id, &req.key_label, &encrypted_value,
|
||||
req.priority, req.max_rpm, req.max_tpm,
|
||||
req.quota_reset_interval.as_deref(),
|
||||
).await?;
|
||||
|
||||
log_operation(&state.db, &ctx.account_id, "provider_key.add", "provider_key", &key_id,
|
||||
|
||||
@@ -26,7 +26,6 @@ pub struct PoolKey {
|
||||
pub priority: i32,
|
||||
pub max_rpm: Option<i64>,
|
||||
pub max_tpm: Option<i64>,
|
||||
pub quota_reset_interval: Option<String>,
|
||||
}
|
||||
|
||||
/// Key 选择结果
|
||||
@@ -43,18 +42,18 @@ pub async fn select_best_key(db: &PgPool, provider_id: &str, enc_key: &[u8; 32])
|
||||
let current_minute = chrono::Utc::now().format("%Y-%m-%dT%H:%M").to_string();
|
||||
|
||||
// 单次查询: 活跃 Key + 当前分钟的 RPM/TPM 使用量 (LEFT JOIN)
|
||||
let rows: Vec<(String, String, i32, Option<i64>, Option<i64>, Option<String>, Option<i64>, Option<i64>)> =
|
||||
let rows: Vec<(String, String, i32, Option<i64>, Option<i64>, Option<i64>, Option<i64>)> =
|
||||
sqlx::query_as(
|
||||
"SELECT pk.id, pk.key_value, pk.priority, pk.max_rpm, pk.max_tpm, pk.quota_reset_interval,
|
||||
"SELECT pk.id, pk.key_value, pk.priority, pk.max_rpm, pk.max_tpm,
|
||||
uw.request_count, uw.token_count
|
||||
FROM provider_keys pk
|
||||
LEFT JOIN key_usage_window uw ON pk.id = uw.key_id AND uw.window_minute = $1
|
||||
WHERE pk.provider_id = $2 AND pk.is_active = TRUE
|
||||
AND (pk.cooldown_until IS NULL OR pk.cooldown_until <= $3)
|
||||
ORDER BY pk.priority ASC"
|
||||
ORDER BY pk.priority ASC, pk.last_used_at ASC NULLS FIRST"
|
||||
).bind(¤t_minute).bind(provider_id).bind(&now).fetch_all(db).await?;
|
||||
|
||||
for (id, key_value, priority, max_rpm, max_tpm, quota_reset_interval, req_count, token_count) in &rows {
|
||||
for (id, key_value, priority, max_rpm, max_tpm, req_count, token_count) in &rows {
|
||||
// RPM 检查
|
||||
if let Some(rpm_limit) = max_rpm {
|
||||
if *rpm_limit > 0 {
|
||||
@@ -86,7 +85,6 @@ pub async fn select_best_key(db: &PgPool, provider_id: &str, enc_key: &[u8; 32])
|
||||
priority: *priority,
|
||||
max_rpm: *max_rpm,
|
||||
max_tpm: *max_tpm,
|
||||
quota_reset_interval: quota_reset_interval.clone(),
|
||||
},
|
||||
key_id: id.clone(),
|
||||
});
|
||||
@@ -124,7 +122,6 @@ pub async fn select_best_key(db: &PgPool, provider_id: &str, enc_key: &[u8; 32])
|
||||
priority: 0,
|
||||
max_rpm: None,
|
||||
max_tpm: None,
|
||||
quota_reset_interval: None,
|
||||
},
|
||||
key_id: "provider-fallback".to_string(),
|
||||
});
|
||||
@@ -165,6 +162,12 @@ pub async fn record_key_usage(
|
||||
.bind(tokens).bind(&chrono::Utc::now().to_rfc3339()).bind(key_id)
|
||||
.execute(db).await?;
|
||||
|
||||
// 更新最后使用时间 (LRU 排序依据)
|
||||
sqlx::query("UPDATE provider_keys SET last_used_at = NOW() WHERE id = $1")
|
||||
.bind(key_id)
|
||||
.execute(db)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -206,7 +209,7 @@ pub async fn list_provider_keys(
|
||||
) -> SaasResult<Vec<serde_json::Value>> {
|
||||
let rows: Vec<ProviderKeyRow> =
|
||||
sqlx::query_as(
|
||||
"SELECT id, provider_id, key_label, priority, max_rpm, max_tpm, quota_reset_interval, is_active,
|
||||
"SELECT id, provider_id, key_label, priority, max_rpm, max_tpm, is_active,
|
||||
last_429_at, cooldown_until, total_requests, total_tokens, created_at, updated_at
|
||||
FROM provider_keys WHERE provider_id = $1 ORDER BY priority ASC"
|
||||
).bind(provider_id).fetch_all(db).await?;
|
||||
@@ -219,7 +222,6 @@ pub async fn list_provider_keys(
|
||||
"priority": r.priority,
|
||||
"max_rpm": r.max_rpm,
|
||||
"max_tpm": r.max_tpm,
|
||||
"quota_reset_interval": r.quota_reset_interval,
|
||||
"is_active": r.is_active,
|
||||
"last_429_at": r.last_429_at,
|
||||
"cooldown_until": r.cooldown_until,
|
||||
@@ -240,17 +242,16 @@ pub async fn add_provider_key(
|
||||
priority: i32,
|
||||
max_rpm: Option<i64>,
|
||||
max_tpm: Option<i64>,
|
||||
quota_reset_interval: Option<&str>,
|
||||
) -> SaasResult<String> {
|
||||
let id = uuid::Uuid::new_v4().to_string();
|
||||
let now = chrono::Utc::now().to_rfc3339();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO provider_keys (id, provider_id, key_label, key_value, priority, max_rpm, max_tpm, quota_reset_interval, is_active, total_requests, total_tokens, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, TRUE, 0, 0, $9, $9)"
|
||||
"INSERT INTO provider_keys (id, provider_id, key_label, key_value, priority, max_rpm, max_tpm, is_active, total_requests, total_tokens, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, TRUE, 0, 0, $8, $8)"
|
||||
)
|
||||
.bind(&id).bind(provider_id).bind(key_label).bind(key_value)
|
||||
.bind(priority).bind(max_rpm).bind(max_tpm).bind(quota_reset_interval).bind(&now)
|
||||
.bind(priority).bind(max_rpm).bind(max_tpm).bind(&now)
|
||||
.execute(db).await?;
|
||||
|
||||
tracing::info!("Added key '{}' to provider {}", key_label, provider_id);
|
||||
|
||||
@@ -243,6 +243,8 @@ pub async fn execute_relay(
|
||||
|
||||
let mut req_builder = client.post(&url)
|
||||
.header("Content-Type", "application/json")
|
||||
// Kimi Coding Plan 等 Coding Agent API 需要识别 User-Agent 为 coding agent
|
||||
.header("User-Agent", "claude-code/1.0")
|
||||
.body(request_body.to_string());
|
||||
|
||||
if let Some(ref key) = api_key {
|
||||
|
||||
@@ -74,6 +74,9 @@ pub fn start_scheduler(config: &SchedulerConfig, db: PgPool, dispatcher: WorkerD
|
||||
|
||||
/// 内置的 DB 清理任务(不通过 Worker,直接执行 SQL)
|
||||
pub fn start_db_cleanup_tasks(db: PgPool) {
|
||||
let db_devices = db.clone();
|
||||
let db_key_pool = db.clone();
|
||||
|
||||
// 每 24 小时清理不活跃设备
|
||||
tokio::spawn(async move {
|
||||
let mut interval = tokio::time::interval(Duration::from_secs(86400));
|
||||
@@ -86,7 +89,7 @@ pub fn start_db_cleanup_tasks(db: PgPool) {
|
||||
let cutoff = (chrono::Utc::now() - chrono::Duration::days(90)).to_rfc3339();
|
||||
cutoff
|
||||
})
|
||||
.execute(&db)
|
||||
.execute(&db_devices)
|
||||
.await
|
||||
{
|
||||
Ok(result) => {
|
||||
@@ -98,6 +101,27 @@ pub fn start_db_cleanup_tasks(db: PgPool) {
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// 每 24 小时清理过期的 key_usage_window 记录
|
||||
tokio::spawn(async move {
|
||||
let mut interval = tokio::time::interval(Duration::from_secs(86400));
|
||||
loop {
|
||||
interval.tick().await;
|
||||
match sqlx::query(
|
||||
"DELETE FROM key_usage_window WHERE window_minute < to_char(NOW() - INTERVAL '24 hours', 'YYYY-MM-DD\"T\"HH24:MI')"
|
||||
)
|
||||
.execute(&db_key_pool)
|
||||
.await
|
||||
{
|
||||
Ok(result) => {
|
||||
if result.rows_affected() > 0 {
|
||||
tracing::info!("Cleaned up {} expired key_usage_window records (24h)", result.rows_affected());
|
||||
}
|
||||
}
|
||||
Err(e) => tracing::error!("Key usage window cleanup failed: {}", e),
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// 启动用户定时任务调度循环
|
||||
|
||||
110
deploy/nginx.conf
Normal file
110
deploy/nginx.conf
Normal file
@@ -0,0 +1,110 @@
|
||||
# ============================================================
|
||||
# ZCLAW SaaS Backend - Nginx Reverse Proxy Configuration
|
||||
# ============================================================
|
||||
# Prerequisites:
|
||||
# - SSL certificate (e.g., Let's Encrypt certbot)
|
||||
# - Nginx installed (apt install nginx)
|
||||
#
|
||||
# Installation:
|
||||
# sudo cp deploy/nginx.conf /etc/nginx/sites-available/zclaw-saas
|
||||
# sudo ln -s /etc/nginx/sites-available/zclaw-saas /etc/nginx/sites-enabled/
|
||||
# sudo nginx -t && sudo systemctl reload nginx
|
||||
#
|
||||
# Replace placeholders:
|
||||
# <YOUR_DOMAIN> - e.g., api.zclaw.com
|
||||
# <CERT_PATH> - e.g., /etc/letsencrypt/live/api.zclaw.com/fullchain.pem
|
||||
# <KEY_PATH> - e.g., /etc/letsencrypt/live/api.zclaw.com/privkey.pem
|
||||
# ============================================================
|
||||
|
||||
# ---- HTTP -> HTTPS Redirect ----
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
server_name <YOUR_DOMAIN>;
|
||||
|
||||
# Let's Encrypt challenge (keep if using certbot)
|
||||
location /.well-known/acme-challenge/ {
|
||||
root /var/www/certbot;
|
||||
}
|
||||
|
||||
location / {
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
}
|
||||
|
||||
# ---- HTTPS Server ----
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
listen [::]:443 ssl http2;
|
||||
server_name <YOUR_DOMAIN>;
|
||||
|
||||
# ---- SSL Configuration ----
|
||||
ssl_certificate <CERT_PATH>;
|
||||
ssl_certificate_key <KEY_PATH>;
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384;
|
||||
ssl_prefer_server_ciphers off;
|
||||
ssl_session_cache shared:SSL:10m;
|
||||
ssl_session_timeout 1d;
|
||||
|
||||
# ---- Security Headers ----
|
||||
add_header X-Frame-Options DENY always;
|
||||
add_header X-Content-Type-Options nosniff always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header Strict-Transport-Security "max-age=63072000; includeSubDomains; preload" always;
|
||||
add_header Referrer-Policy strict-origin-when-cross-origin always;
|
||||
|
||||
# ---- Gzip Compression ----
|
||||
gzip on;
|
||||
gzip_types
|
||||
application/json
|
||||
application/javascript
|
||||
text/plain
|
||||
text/css
|
||||
text/xml
|
||||
text/javascript;
|
||||
gzip_min_length 256;
|
||||
gzip_proxied any;
|
||||
gzip_vary on;
|
||||
|
||||
# ---- Request Size Limit ----
|
||||
client_max_body_size 10m;
|
||||
|
||||
# ---- Proxy to ZCLAW SaaS Backend ----
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
# ---- WebSocket / SSE Support ----
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
|
||||
# ---- Standard Proxy Headers ----
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# ---- SSE Streaming Support ----
|
||||
# Disable buffering for Server-Sent Events
|
||||
proxy_buffering off;
|
||||
proxy_cache off;
|
||||
|
||||
# Long timeout for streaming responses (5 minutes)
|
||||
proxy_read_timeout 300s;
|
||||
proxy_send_timeout 300s;
|
||||
|
||||
# Disable request buffering for large payloads
|
||||
proxy_request_buffering off;
|
||||
}
|
||||
|
||||
# ---- Health Check Endpoint (no logging) ----
|
||||
location = /health {
|
||||
proxy_pass http://127.0.0.1:8080/health;
|
||||
access_log off;
|
||||
}
|
||||
|
||||
# ---- Logging ----
|
||||
access_log /var/log/nginx/zclaw-saas-access.log;
|
||||
error_log /var/log/nginx/zclaw-saas-error.log warn;
|
||||
}
|
||||
245
desktop/src-tauri/src/gateway/commands.rs
Normal file
245
desktop/src-tauri/src/gateway/commands.rs
Normal file
@@ -0,0 +1,245 @@
|
||||
use serde::Serialize;
|
||||
use serde_json::{json, Value};
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use tauri::AppHandle;
|
||||
|
||||
use super::config::{
|
||||
approve_local_device_pairing, ensure_local_gateway_ready_for_tauri, read_local_gateway_auth,
|
||||
LocalGatewayAuth, LocalGatewayPairingApprovalResult, LocalGatewayPrepareResult,
|
||||
};
|
||||
use super::io::{parse_json_output, read_gateway_status, run_zclaw, LocalGatewayStatus};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct VersionResponse {
|
||||
version: String,
|
||||
commit: Option<String>,
|
||||
build_date: Option<String>,
|
||||
runtime_source: Option<String>,
|
||||
raw: Value,
|
||||
}
|
||||
|
||||
/// Process information structure
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct ProcessInfo {
|
||||
pid: u32,
|
||||
name: String,
|
||||
status: String,
|
||||
cpu_percent: Option<f64>,
|
||||
memory_mb: Option<f64>,
|
||||
uptime_seconds: Option<u64>,
|
||||
}
|
||||
|
||||
/// Process list response
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct ProcessListResponse {
|
||||
processes: Vec<ProcessInfo>,
|
||||
total_count: usize,
|
||||
runtime_source: Option<String>,
|
||||
}
|
||||
|
||||
/// Process logs response
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct ProcessLogsResponse {
|
||||
pid: Option<u32>,
|
||||
logs: String,
|
||||
lines: usize,
|
||||
runtime_source: Option<String>,
|
||||
}
|
||||
|
||||
/// Get ZCLAW Kernel status
|
||||
#[tauri::command]
|
||||
pub fn zclaw_status(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
read_gateway_status(&app)
|
||||
}
|
||||
|
||||
/// Start ZCLAW Kernel
|
||||
#[tauri::command]
|
||||
pub fn zclaw_start(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
ensure_local_gateway_ready_for_tauri(&app)?;
|
||||
run_zclaw(&app, &["gateway", "start", "--json"])?;
|
||||
thread::sleep(Duration::from_millis(800));
|
||||
read_gateway_status(&app)
|
||||
}
|
||||
|
||||
/// Stop ZCLAW Kernel
|
||||
#[tauri::command]
|
||||
pub fn zclaw_stop(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
run_zclaw(&app, &["gateway", "stop", "--json"])?;
|
||||
thread::sleep(Duration::from_millis(800));
|
||||
read_gateway_status(&app)
|
||||
}
|
||||
|
||||
/// Restart ZCLAW Kernel
|
||||
#[tauri::command]
|
||||
pub fn zclaw_restart(app: AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
ensure_local_gateway_ready_for_tauri(&app)?;
|
||||
run_zclaw(&app, &["gateway", "restart", "--json"])?;
|
||||
thread::sleep(Duration::from_millis(1200));
|
||||
read_gateway_status(&app)
|
||||
}
|
||||
|
||||
/// Get local auth token from ZCLAW config
|
||||
#[tauri::command]
|
||||
pub fn zclaw_local_auth() -> Result<LocalGatewayAuth, String> {
|
||||
read_local_gateway_auth()
|
||||
}
|
||||
|
||||
/// Prepare ZCLAW for Tauri (update allowed origins)
|
||||
#[tauri::command]
|
||||
pub fn zclaw_prepare_for_tauri(app: AppHandle) -> Result<LocalGatewayPrepareResult, String> {
|
||||
ensure_local_gateway_ready_for_tauri(&app)
|
||||
}
|
||||
|
||||
/// Approve device pairing request
|
||||
#[tauri::command]
|
||||
pub fn zclaw_approve_device_pairing(
|
||||
app: AppHandle,
|
||||
device_id: String,
|
||||
public_key_base64: String,
|
||||
url: Option<String>,
|
||||
) -> Result<LocalGatewayPairingApprovalResult, String> {
|
||||
approve_local_device_pairing(&app, &device_id, &public_key_base64, url.as_deref())
|
||||
}
|
||||
|
||||
/// Run ZCLAW doctor to diagnose issues
|
||||
#[tauri::command]
|
||||
pub fn zclaw_doctor(app: AppHandle) -> Result<String, String> {
|
||||
let result = run_zclaw(&app, &["doctor", "--json"])?;
|
||||
Ok(result.stdout)
|
||||
}
|
||||
|
||||
/// List ZCLAW processes
|
||||
#[tauri::command]
|
||||
pub fn zclaw_process_list(app: AppHandle) -> Result<ProcessListResponse, String> {
|
||||
let result = run_zclaw(&app, &["process", "list", "--json"])?;
|
||||
|
||||
let raw = parse_json_output(&result.stdout).unwrap_or_else(|_| json!({"processes": []}));
|
||||
|
||||
let processes: Vec<ProcessInfo> = raw
|
||||
.get("processes")
|
||||
.and_then(Value::as_array)
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.filter_map(|p| {
|
||||
Some(ProcessInfo {
|
||||
pid: p.get("pid").and_then(Value::as_u64)?.try_into().ok()?,
|
||||
name: p.get("name").and_then(Value::as_str)?.to_string(),
|
||||
status: p
|
||||
.get("status")
|
||||
.and_then(Value::as_str)
|
||||
.unwrap_or("unknown")
|
||||
.to_string(),
|
||||
cpu_percent: p.get("cpuPercent").and_then(Value::as_f64),
|
||||
memory_mb: p.get("memoryMb").and_then(Value::as_f64),
|
||||
uptime_seconds: p.get("uptimeSeconds").and_then(Value::as_u64),
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(ProcessListResponse {
|
||||
total_count: processes.len(),
|
||||
processes,
|
||||
runtime_source: Some(result.runtime.source),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get ZCLAW process logs
|
||||
#[tauri::command]
|
||||
pub fn zclaw_process_logs(
|
||||
app: AppHandle,
|
||||
pid: Option<u32>,
|
||||
lines: Option<usize>,
|
||||
) -> Result<ProcessLogsResponse, String> {
|
||||
let line_count = lines.unwrap_or(100);
|
||||
let lines_str = line_count.to_string();
|
||||
|
||||
// Build owned strings first to avoid lifetime issues
|
||||
let args: Vec<String> = if let Some(pid_value) = pid {
|
||||
vec![
|
||||
"process".to_string(),
|
||||
"logs".to_string(),
|
||||
"--pid".to_string(),
|
||||
pid_value.to_string(),
|
||||
"--lines".to_string(),
|
||||
lines_str,
|
||||
"--json".to_string(),
|
||||
]
|
||||
} else {
|
||||
vec![
|
||||
"process".to_string(),
|
||||
"logs".to_string(),
|
||||
"--lines".to_string(),
|
||||
lines_str,
|
||||
"--json".to_string(),
|
||||
]
|
||||
};
|
||||
|
||||
// Convert to &str for the command
|
||||
let args_refs: Vec<&str> = args.iter().map(|s| s.as_str()).collect();
|
||||
let result = run_zclaw(&app, &args_refs)?;
|
||||
|
||||
// Parse the logs - could be JSON array or plain text
|
||||
let logs = if let Ok(json) = parse_json_output(&result.stdout) {
|
||||
// If JSON format, extract logs array or convert to string
|
||||
if let Some(log_lines) = json.get("logs").and_then(Value::as_array) {
|
||||
log_lines
|
||||
.iter()
|
||||
.filter_map(|l| l.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
} else if let Some(log_text) = json.get("log").and_then(Value::as_str) {
|
||||
log_text.to_string()
|
||||
} else {
|
||||
result.stdout.clone()
|
||||
}
|
||||
} else {
|
||||
result.stdout.clone()
|
||||
};
|
||||
|
||||
let log_lines_count = logs.lines().count();
|
||||
|
||||
Ok(ProcessLogsResponse {
|
||||
pid,
|
||||
logs,
|
||||
lines: log_lines_count,
|
||||
runtime_source: Some(result.runtime.source),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get ZCLAW version information
|
||||
#[tauri::command]
|
||||
pub fn zclaw_version(app: AppHandle) -> Result<VersionResponse, String> {
|
||||
let result = run_zclaw(&app, &["--version", "--json"])?;
|
||||
|
||||
let raw = parse_json_output(&result.stdout).unwrap_or_else(|_| {
|
||||
// Fallback: try to parse plain text version output
|
||||
json!({
|
||||
"version": result.stdout.trim(),
|
||||
"raw": result.stdout.trim()
|
||||
})
|
||||
});
|
||||
|
||||
let version = raw
|
||||
.get("version")
|
||||
.and_then(Value::as_str)
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
|
||||
let commit = raw.get("commit").and_then(Value::as_str).map(ToOwned::to_owned);
|
||||
let build_date = raw.get("buildDate").and_then(Value::as_str).map(ToOwned::to_owned);
|
||||
|
||||
Ok(VersionResponse {
|
||||
version,
|
||||
commit,
|
||||
build_date,
|
||||
runtime_source: Some(result.runtime.source),
|
||||
raw,
|
||||
})
|
||||
}
|
||||
237
desktop/src-tauri/src/gateway/config.rs
Normal file
237
desktop/src-tauri/src/gateway/config.rs
Normal file
@@ -0,0 +1,237 @@
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use std::fs;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use tauri::AppHandle;
|
||||
|
||||
use super::io::{read_gateway_status, run_zclaw, parse_json_output};
|
||||
use super::runtime::{resolve_zclaw_config_path, TAURI_ALLOWED_ORIGINS};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LocalGatewayAuth {
|
||||
pub config_path: Option<String>,
|
||||
pub gateway_token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LocalGatewayPrepareResult {
|
||||
pub config_path: Option<String>,
|
||||
pub origins_updated: bool,
|
||||
pub gateway_restarted: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LocalGatewayPairingApprovalResult {
|
||||
pub approved: bool,
|
||||
pub request_id: Option<String>,
|
||||
pub device_id: Option<String>,
|
||||
}
|
||||
|
||||
/// Parse TOML config and extract gateway token
|
||||
pub fn read_local_gateway_auth() -> Result<LocalGatewayAuth, String> {
|
||||
let config_path = resolve_zclaw_config_path()
|
||||
.ok_or_else(|| "未找到 ZCLAW 配置目录。".to_string())?;
|
||||
let config_text = fs::read_to_string(&config_path)
|
||||
.map_err(|error| format!("读取 ZCLAW 配置失败: {error}"))?;
|
||||
|
||||
// Parse TOML format - simple extraction for gateway.token
|
||||
let gateway_token = extract_toml_token(&config_text);
|
||||
|
||||
Ok(LocalGatewayAuth {
|
||||
config_path: Some(config_path.display().to_string()),
|
||||
gateway_token,
|
||||
})
|
||||
}
|
||||
|
||||
/// Extract gateway.token from TOML config text
|
||||
fn extract_toml_token(config_text: &str) -> Option<String> {
|
||||
// Simple TOML parsing for gateway.token
|
||||
// Format: token = "value" under [gateway] section
|
||||
let mut in_gateway_section = false;
|
||||
for line in config_text.lines() {
|
||||
let trimmed = line.trim();
|
||||
if trimmed.starts_with("[gateway") {
|
||||
in_gateway_section = true;
|
||||
continue;
|
||||
}
|
||||
if trimmed.starts_with('[') && !trimmed.starts_with("[gateway") {
|
||||
in_gateway_section = false;
|
||||
continue;
|
||||
}
|
||||
if in_gateway_section && trimmed.starts_with("token") {
|
||||
if let Some(eq_pos) = trimmed.find('=') {
|
||||
let value = trimmed[eq_pos + 1..].trim();
|
||||
// Remove quotes
|
||||
let value = value.trim_matches('"').trim_matches('\'');
|
||||
if !value.is_empty() {
|
||||
return Some(value.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Ensure Tauri origins are allowed in ZCLAW config
|
||||
fn ensure_tauri_allowed_origins(config_text: &str) -> (String, bool) {
|
||||
let mut lines: Vec<String> = config_text.lines().map(|s| s.to_string()).collect();
|
||||
let mut changed = false;
|
||||
let mut in_control_ui = false;
|
||||
let mut has_allowed_origins = false;
|
||||
|
||||
// Find or create [gateway.controlUi] section with allowedOrigins
|
||||
for i in 0..lines.len() {
|
||||
let trimmed = lines[i].trim();
|
||||
|
||||
if trimmed.starts_with("[gateway.controlUi") || trimmed == "[gateway.controlUi]" {
|
||||
in_control_ui = true;
|
||||
} else if trimmed.starts_with('[') && in_control_ui {
|
||||
in_control_ui = false;
|
||||
}
|
||||
|
||||
if in_control_ui && trimmed.starts_with("allowedOrigins") {
|
||||
has_allowed_origins = true;
|
||||
// Check if all required origins are present
|
||||
for origin in TAURI_ALLOWED_ORIGINS {
|
||||
if !lines[i].contains(origin) {
|
||||
// Append origin to the array
|
||||
// This is a simple approach - for production, use proper TOML parsing
|
||||
if lines[i].ends_with(']') {
|
||||
let insert_pos = lines[i].len() - 1;
|
||||
lines[i].insert_str(insert_pos, &format!(", \"{}\"", origin));
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no allowedOrigins found, add the section
|
||||
if !has_allowed_origins {
|
||||
// Find [gateway] section and add controlUi after it
|
||||
for i in 0..lines.len() {
|
||||
if lines[i].trim().starts_with("[gateway]") || lines[i].trim() == "[gateway]" {
|
||||
// Insert controlUi section after gateway
|
||||
let origins: String = TAURI_ALLOWED_ORIGINS
|
||||
.iter()
|
||||
.map(|s| format!("\"{}\"", s))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
lines.insert(i + 1, "[gateway.controlUi]".to_string());
|
||||
lines.insert(i + 2, format!("allowedOrigins = [{}]", origins));
|
||||
changed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If no [gateway] section found, create it
|
||||
if !changed {
|
||||
let origins: String = TAURI_ALLOWED_ORIGINS
|
||||
.iter()
|
||||
.map(|s| format!("\"{}\"", s))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
lines.push("[gateway]".to_string());
|
||||
lines.push("[gateway.controlUi]".to_string());
|
||||
lines.push(format!("allowedOrigins = [{}]", origins));
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
(lines.join("\n"), changed)
|
||||
}
|
||||
|
||||
pub fn ensure_local_gateway_ready_for_tauri(app: &AppHandle) -> Result<LocalGatewayPrepareResult, String> {
|
||||
let config_path = resolve_zclaw_config_path()
|
||||
.ok_or_else(|| "未找到 ZCLAW 配置目录。".to_string())?;
|
||||
let config_text = fs::read_to_string(&config_path)
|
||||
.map_err(|error| format!("读取 ZCLAW 配置失败: {error}"))?;
|
||||
|
||||
let (updated_config, origins_updated) = ensure_tauri_allowed_origins(&config_text);
|
||||
|
||||
if origins_updated {
|
||||
fs::write(&config_path, format!("{}\n", updated_config))
|
||||
.map_err(|error| format!("写入 ZCLAW 配置失败: {error}"))?;
|
||||
}
|
||||
|
||||
let mut gateway_restarted = false;
|
||||
if origins_updated {
|
||||
if let Ok(status) = read_gateway_status(app) {
|
||||
if status.port_status.as_deref() == Some("busy") || !status.listener_pids.is_empty() {
|
||||
run_zclaw(app, &["gateway", "restart", "--json"])?;
|
||||
thread::sleep(Duration::from_millis(1200));
|
||||
gateway_restarted = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(LocalGatewayPrepareResult {
|
||||
config_path: Some(config_path.display().to_string()),
|
||||
origins_updated,
|
||||
gateway_restarted,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn approve_local_device_pairing(
|
||||
app: &AppHandle,
|
||||
device_id: &str,
|
||||
public_key_base64: &str,
|
||||
url: Option<&str>,
|
||||
) -> Result<LocalGatewayPairingApprovalResult, String> {
|
||||
let local_auth = read_local_gateway_auth()?;
|
||||
let gateway_token = local_auth
|
||||
.gateway_token
|
||||
.ok_or_else(|| "本地 Gateway token 不可用,无法自动批准设备配对。".to_string())?;
|
||||
|
||||
let devices_output = run_zclaw(app, &["devices", "list", "--json"])?;
|
||||
let devices_json = parse_json_output(&devices_output.stdout)?;
|
||||
let pending = devices_json
|
||||
.get("pending")
|
||||
.and_then(Value::as_array)
|
||||
.ok_or_else(|| "设备列表输出缺少 pending 数组。".to_string())?;
|
||||
|
||||
let pending_request = pending.iter().find(|entry| {
|
||||
entry.get("deviceId").and_then(Value::as_str) == Some(device_id)
|
||||
&& entry.get("publicKey").and_then(Value::as_str) == Some(public_key_base64)
|
||||
});
|
||||
|
||||
let Some(request) = pending_request else {
|
||||
return Ok(LocalGatewayPairingApprovalResult {
|
||||
approved: false,
|
||||
request_id: None,
|
||||
device_id: Some(device_id.to_string()),
|
||||
});
|
||||
};
|
||||
|
||||
let request_id = request
|
||||
.get("requestId")
|
||||
.and_then(Value::as_str)
|
||||
.ok_or_else(|| "待批准设备缺少 requestId。".to_string())?
|
||||
.to_string();
|
||||
|
||||
// Use ZCLAW default port 4200
|
||||
let gateway_url = url.unwrap_or("ws://127.0.0.1:4200").to_string();
|
||||
let args = vec![
|
||||
"devices".to_string(),
|
||||
"approve".to_string(),
|
||||
request_id.clone(),
|
||||
"--json".to_string(),
|
||||
"--token".to_string(),
|
||||
gateway_token,
|
||||
"--url".to_string(),
|
||||
gateway_url,
|
||||
];
|
||||
let arg_refs = args.iter().map(|value| value.as_str()).collect::<Vec<_>>();
|
||||
run_zclaw(app, &arg_refs)?;
|
||||
thread::sleep(Duration::from_millis(300));
|
||||
|
||||
Ok(LocalGatewayPairingApprovalResult {
|
||||
approved: true,
|
||||
request_id: Some(request_id),
|
||||
device_id: Some(device_id.to_string()),
|
||||
})
|
||||
}
|
||||
167
desktop/src-tauri/src/gateway/io.rs
Normal file
167
desktop/src-tauri/src/gateway/io.rs
Normal file
@@ -0,0 +1,167 @@
|
||||
use serde::Serialize;
|
||||
use serde_json::{json, Value};
|
||||
use std::process::Command;
|
||||
use tauri::AppHandle;
|
||||
|
||||
use super::runtime::{
|
||||
command_error, resolve_zclaw_runtime, runtime_path_string, ZclawCommandOutput, ZclawRuntime,
|
||||
ZCLAW_DEFAULT_PORT,
|
||||
};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LocalGatewayStatus {
|
||||
pub supported: bool,
|
||||
pub cli_available: bool,
|
||||
pub runtime_source: Option<String>,
|
||||
pub runtime_path: Option<String>,
|
||||
pub service_label: Option<String>,
|
||||
pub service_loaded: bool,
|
||||
pub service_status: Option<String>,
|
||||
pub config_ok: bool,
|
||||
pub port: Option<u16>,
|
||||
pub port_status: Option<String>,
|
||||
pub probe_url: Option<String>,
|
||||
pub listener_pids: Vec<u32>,
|
||||
pub error: Option<String>,
|
||||
pub raw: Value,
|
||||
}
|
||||
|
||||
pub fn run_zclaw(app: &AppHandle, args: &[&str]) -> Result<ZclawCommandOutput, String> {
|
||||
let runtime = resolve_zclaw_runtime(app);
|
||||
let mut command = Command::new(&runtime.executable);
|
||||
command.args(&runtime.pre_args).args(args);
|
||||
let output = command.output().map_err(|error| command_error(&runtime, error))?;
|
||||
|
||||
if output.status.success() {
|
||||
Ok(ZclawCommandOutput {
|
||||
stdout: String::from_utf8_lossy(&output.stdout).trim().to_string(),
|
||||
runtime,
|
||||
})
|
||||
} else {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
|
||||
let message = if stderr.is_empty() {
|
||||
stdout
|
||||
} else if stdout.is_empty() {
|
||||
stderr
|
||||
} else {
|
||||
format!("{stderr}\n{stdout}")
|
||||
};
|
||||
|
||||
if message.is_empty() {
|
||||
Err(format!("ZCLAW {:?} 执行失败: {}", args, output.status))
|
||||
} else {
|
||||
Err(message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_json_output(stdout: &str) -> Result<Value, String> {
|
||||
if let Ok(raw) = serde_json::from_str::<Value>(stdout) {
|
||||
return Ok(raw);
|
||||
}
|
||||
|
||||
if let Some(index) = stdout.find('{') {
|
||||
let trimmed = &stdout[index..];
|
||||
return serde_json::from_str::<Value>(trimmed)
|
||||
.map_err(|error| format!("解析 Gateway 状态失败: {error}"));
|
||||
}
|
||||
|
||||
Err("Gateway 状态输出不包含可解析的 JSON。".to_string())
|
||||
}
|
||||
|
||||
pub fn unavailable_status(error: String, runtime: Option<&ZclawRuntime>) -> LocalGatewayStatus {
|
||||
LocalGatewayStatus {
|
||||
supported: true,
|
||||
cli_available: false,
|
||||
runtime_source: runtime.map(|value| value.source.clone()),
|
||||
runtime_path: runtime.map(runtime_path_string),
|
||||
service_label: None,
|
||||
service_loaded: false,
|
||||
service_status: None,
|
||||
config_ok: false,
|
||||
port: None,
|
||||
port_status: None,
|
||||
probe_url: None,
|
||||
listener_pids: Vec::new(),
|
||||
error: Some(error),
|
||||
raw: json!({}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_gateway_status(raw: Value, runtime: &ZclawRuntime) -> LocalGatewayStatus {
|
||||
let listener_pids = raw
|
||||
.get("port")
|
||||
.and_then(|port| port.get("listeners"))
|
||||
.and_then(Value::as_array)
|
||||
.map(|listeners| {
|
||||
listeners
|
||||
.iter()
|
||||
.filter_map(|listener| listener.get("pid").and_then(Value::as_u64))
|
||||
.filter_map(|pid| u32::try_from(pid).ok())
|
||||
.collect::<Vec<u32>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
LocalGatewayStatus {
|
||||
supported: true,
|
||||
cli_available: true,
|
||||
runtime_source: Some(runtime.source.clone()),
|
||||
runtime_path: Some(runtime_path_string(runtime)),
|
||||
service_label: raw
|
||||
.get("service")
|
||||
.and_then(|service| service.get("label"))
|
||||
.and_then(Value::as_str)
|
||||
.map(ToOwned::to_owned),
|
||||
service_loaded: raw
|
||||
.get("service")
|
||||
.and_then(|service| service.get("loaded"))
|
||||
.and_then(Value::as_bool)
|
||||
.unwrap_or(false),
|
||||
service_status: raw
|
||||
.get("service")
|
||||
.and_then(|service| service.get("runtime"))
|
||||
.and_then(|runtime| runtime.get("status"))
|
||||
.and_then(Value::as_str)
|
||||
.map(ToOwned::to_owned),
|
||||
config_ok: raw
|
||||
.get("service")
|
||||
.and_then(|service| service.get("configAudit"))
|
||||
.and_then(|config_audit| config_audit.get("ok"))
|
||||
.and_then(Value::as_bool)
|
||||
.unwrap_or(false),
|
||||
port: raw
|
||||
.get("gateway")
|
||||
.and_then(|gateway| gateway.get("port"))
|
||||
.and_then(Value::as_u64)
|
||||
.and_then(|port| u16::try_from(port).ok())
|
||||
.or(Some(ZCLAW_DEFAULT_PORT)),
|
||||
port_status: raw
|
||||
.get("port")
|
||||
.and_then(|port| port.get("status"))
|
||||
.and_then(Value::as_str)
|
||||
.map(ToOwned::to_owned),
|
||||
probe_url: raw
|
||||
.get("gateway")
|
||||
.and_then(|gateway| gateway.get("probeUrl"))
|
||||
.and_then(Value::as_str)
|
||||
.map(ToOwned::to_owned),
|
||||
listener_pids,
|
||||
error: None,
|
||||
raw,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_gateway_status(app: &AppHandle) -> Result<LocalGatewayStatus, String> {
|
||||
match run_zclaw(app, &["gateway", "status", "--json", "--no-probe"]) {
|
||||
Ok(result) => {
|
||||
let raw = parse_json_output(&result.stdout)?;
|
||||
Ok(parse_gateway_status(raw, &result.runtime))
|
||||
}
|
||||
Err(error) => {
|
||||
let runtime = resolve_zclaw_runtime(app);
|
||||
Ok(unavailable_status(error, Some(&runtime)))
|
||||
}
|
||||
}
|
||||
}
|
||||
4
desktop/src-tauri/src/gateway/mod.rs
Normal file
4
desktop/src-tauri/src/gateway/mod.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod commands;
|
||||
pub mod config;
|
||||
pub mod io;
|
||||
pub mod runtime;
|
||||
290
desktop/src-tauri/src/gateway/runtime.rs
Normal file
290
desktop/src-tauri/src/gateway/runtime.rs
Normal file
@@ -0,0 +1,290 @@
|
||||
use std::path::PathBuf;
|
||||
use tauri::{AppHandle, Manager};
|
||||
|
||||
pub(crate) struct ZclawRuntime {
|
||||
pub source: String,
|
||||
pub executable: PathBuf,
|
||||
pub pre_args: Vec<String>,
|
||||
pub display_path: PathBuf,
|
||||
}
|
||||
|
||||
pub(crate) struct ZclawCommandOutput {
|
||||
pub stdout: String,
|
||||
pub runtime: ZclawRuntime,
|
||||
}
|
||||
|
||||
/// Default ZCLAW Kernel port
|
||||
pub const ZCLAW_DEFAULT_PORT: u16 = 4200;
|
||||
|
||||
pub(super) const TAURI_ALLOWED_ORIGINS: [&str; 2] = ["http://tauri.localhost", "tauri://localhost"];
|
||||
|
||||
pub(super) fn command_error(runtime: &ZclawRuntime, error: std::io::Error) -> String {
|
||||
if error.kind() == std::io::ErrorKind::NotFound {
|
||||
match runtime.source.as_str() {
|
||||
"bundled" => format!(
|
||||
"未找到 ZCLAW 内置运行时:{}",
|
||||
runtime.display_path.display()
|
||||
),
|
||||
"development" => format!(
|
||||
"未找到开发态运行时:{}",
|
||||
runtime.display_path.display()
|
||||
),
|
||||
"override" => format!(
|
||||
"未找到 ZCLAW_BIN 指定的运行时:{}",
|
||||
runtime.display_path.display()
|
||||
),
|
||||
_ => "未找到运行时。请重新安装 ZCLAW,或在开发环境中安装 ZCLAW CLI。"
|
||||
.to_string(),
|
||||
}
|
||||
} else {
|
||||
format!("运行 ZCLAW 失败: {error}")
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn runtime_path_string(runtime: &ZclawRuntime) -> String {
|
||||
runtime.display_path.display().to_string()
|
||||
}
|
||||
|
||||
fn binary_extension() -> &'static str {
|
||||
if cfg!(target_os = "windows") {
|
||||
".exe"
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
fn zclaw_sidecar_filename() -> String {
|
||||
format!("zclaw-{}{}", env!("TARGET"), binary_extension())
|
||||
}
|
||||
|
||||
fn zclaw_plain_filename() -> String {
|
||||
format!("zclaw{}", binary_extension())
|
||||
}
|
||||
|
||||
fn push_runtime_candidate(candidates: &mut Vec<ZclawRuntime>, source: &str, executable: PathBuf) {
|
||||
if candidates.iter().any(|candidate| candidate.display_path == executable) {
|
||||
return;
|
||||
}
|
||||
|
||||
candidates.push(ZclawRuntime {
|
||||
source: source.to_string(),
|
||||
display_path: executable.clone(),
|
||||
executable,
|
||||
pre_args: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Build binary runtime (ZCLAW is a single binary, not npm package)
|
||||
fn build_binary_runtime(source: &str, root_dir: &PathBuf) -> Option<ZclawRuntime> {
|
||||
// Try platform-specific binary names
|
||||
let binary_names = get_platform_binary_names();
|
||||
|
||||
for name in binary_names {
|
||||
let binary_path = root_dir.join(&name);
|
||||
if binary_path.is_file() {
|
||||
return Some(ZclawRuntime {
|
||||
source: source.to_string(),
|
||||
executable: binary_path.clone(),
|
||||
pre_args: Vec::new(),
|
||||
display_path: binary_path,
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Get platform-specific binary names for ZCLAW
|
||||
fn get_platform_binary_names() -> Vec<String> {
|
||||
let mut names = Vec::new();
|
||||
|
||||
if cfg!(target_os = "windows") {
|
||||
names.push("zclaw.exe".to_string());
|
||||
names.push(format!("zclaw-{}.exe", env!("TARGET")));
|
||||
} else if cfg!(target_os = "macos") {
|
||||
if cfg!(target_arch = "aarch64") {
|
||||
names.push("zclaw-aarch64-apple-darwin".to_string());
|
||||
} else {
|
||||
names.push("zclaw-x86_64-apple-darwin".to_string());
|
||||
}
|
||||
names.push(format!("zclaw-{}", env!("TARGET")));
|
||||
names.push("zclaw".to_string());
|
||||
} else {
|
||||
// Linux
|
||||
if cfg!(target_arch = "aarch64") {
|
||||
names.push("zclaw-aarch64-unknown-linux-gnu".to_string());
|
||||
} else {
|
||||
names.push("zclaw-x86_64-unknown-linux-gnu".to_string());
|
||||
}
|
||||
names.push(format!("zclaw-{}", env!("TARGET")));
|
||||
names.push("zclaw".to_string());
|
||||
}
|
||||
|
||||
names
|
||||
}
|
||||
|
||||
/// Legacy: Build staged runtime using Node.js (for backward compatibility)
|
||||
fn build_staged_runtime_legacy(source: &str, root_dir: PathBuf) -> Option<ZclawRuntime> {
|
||||
let node_executable = root_dir.join(if cfg!(target_os = "windows") {
|
||||
"node.exe"
|
||||
} else {
|
||||
"node"
|
||||
});
|
||||
let entrypoint = root_dir
|
||||
.join("node_modules")
|
||||
.join("zclaw")
|
||||
.join("zclaw.mjs");
|
||||
|
||||
if !node_executable.is_file() || !entrypoint.is_file() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(ZclawRuntime {
|
||||
source: source.to_string(),
|
||||
executable: node_executable,
|
||||
pre_args: vec![entrypoint.display().to_string()],
|
||||
display_path: root_dir,
|
||||
})
|
||||
}
|
||||
|
||||
/// Build staged runtime - prefers binary, falls back to Node.js for legacy support
|
||||
fn build_staged_runtime(source: &str, root_dir: PathBuf) -> Option<ZclawRuntime> {
|
||||
// First, try to find the binary directly
|
||||
if let Some(runtime) = build_binary_runtime(source, &root_dir) {
|
||||
return Some(runtime);
|
||||
}
|
||||
|
||||
// Fallback to Node.js-based runtime for backward compatibility
|
||||
build_staged_runtime_legacy(source, root_dir)
|
||||
}
|
||||
|
||||
fn push_staged_runtime_candidate(candidates: &mut Vec<ZclawRuntime>, source: &str, root_dir: PathBuf) {
|
||||
if candidates.iter().any(|candidate| candidate.display_path == root_dir) {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(runtime) = build_staged_runtime(source, root_dir) {
|
||||
candidates.push(runtime);
|
||||
}
|
||||
}
|
||||
|
||||
fn bundled_runtime_candidates(app: &AppHandle) -> Vec<ZclawRuntime> {
|
||||
let mut candidates = Vec::new();
|
||||
let sidecar_name = zclaw_sidecar_filename();
|
||||
let plain_name = zclaw_plain_filename();
|
||||
let platform_names = get_platform_binary_names();
|
||||
|
||||
if let Ok(resource_dir) = app.path().resource_dir() {
|
||||
// Primary: zclaw-runtime directory (contains binary + manifest)
|
||||
push_staged_runtime_candidate(
|
||||
&mut candidates,
|
||||
"bundled",
|
||||
resource_dir.join("zclaw-runtime"),
|
||||
);
|
||||
|
||||
// Alternative: binaries directory
|
||||
for name in &platform_names {
|
||||
push_runtime_candidate(
|
||||
&mut candidates,
|
||||
"bundled",
|
||||
resource_dir.join("binaries").join(name),
|
||||
);
|
||||
}
|
||||
|
||||
// Alternative: root level binaries
|
||||
push_runtime_candidate(&mut candidates, "bundled", resource_dir.join(&plain_name));
|
||||
push_runtime_candidate(&mut candidates, "bundled", resource_dir.join(&sidecar_name));
|
||||
}
|
||||
|
||||
if let Ok(current_exe) = std::env::current_exe() {
|
||||
if let Some(exe_dir) = current_exe.parent() {
|
||||
// Windows NSIS installer location
|
||||
push_staged_runtime_candidate(
|
||||
&mut candidates,
|
||||
"bundled",
|
||||
exe_dir.join("resources").join("zclaw-runtime"),
|
||||
);
|
||||
|
||||
// Alternative: binaries next to exe
|
||||
for name in &platform_names {
|
||||
push_runtime_candidate(
|
||||
&mut candidates,
|
||||
"bundled",
|
||||
exe_dir.join("binaries").join(name),
|
||||
);
|
||||
}
|
||||
|
||||
push_runtime_candidate(&mut candidates, "bundled", exe_dir.join(&plain_name));
|
||||
push_runtime_candidate(&mut candidates, "bundled", exe_dir.join(&sidecar_name));
|
||||
}
|
||||
}
|
||||
|
||||
// Development mode
|
||||
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
push_staged_runtime_candidate(
|
||||
&mut candidates,
|
||||
"development",
|
||||
manifest_dir.join("resources").join("zclaw-runtime"),
|
||||
);
|
||||
|
||||
for name in &platform_names {
|
||||
push_runtime_candidate(
|
||||
&mut candidates,
|
||||
"development",
|
||||
manifest_dir.join("binaries").join(name),
|
||||
);
|
||||
}
|
||||
|
||||
candidates
|
||||
}
|
||||
|
||||
/// Resolve ZCLAW runtime location
|
||||
/// Priority: ZCLAW_BIN env > bundled > system PATH
|
||||
pub fn resolve_zclaw_runtime(app: &AppHandle) -> ZclawRuntime {
|
||||
if let Ok(override_path) = std::env::var("ZCLAW_BIN") {
|
||||
let override_path = PathBuf::from(override_path);
|
||||
if override_path.is_dir() {
|
||||
if let Some(runtime) = build_staged_runtime("override", override_path.clone()) {
|
||||
return runtime;
|
||||
}
|
||||
}
|
||||
|
||||
return ZclawRuntime {
|
||||
source: "override".to_string(),
|
||||
display_path: override_path.clone(),
|
||||
executable: override_path,
|
||||
pre_args: Vec::new(),
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(runtime) = bundled_runtime_candidates(app)
|
||||
.into_iter()
|
||||
.find(|candidate| candidate.executable.is_file())
|
||||
{
|
||||
return runtime;
|
||||
}
|
||||
|
||||
ZclawRuntime {
|
||||
source: "system".to_string(),
|
||||
display_path: PathBuf::from("zclaw"),
|
||||
executable: PathBuf::from("zclaw"),
|
||||
pre_args: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve ZCLAW config path (TOML format)
|
||||
/// Priority: ZCLAW_HOME env > ~/.zclaw/
|
||||
pub fn resolve_zclaw_config_path() -> Option<PathBuf> {
|
||||
if let Ok(value) = std::env::var("ZCLAW_HOME") {
|
||||
return Some(PathBuf::from(value).join("zclaw.toml"));
|
||||
}
|
||||
|
||||
if let Ok(value) = std::env::var("HOME") {
|
||||
return Some(PathBuf::from(value).join(".zclaw").join("zclaw.toml"));
|
||||
}
|
||||
|
||||
if let Ok(value) = std::env::var("USERPROFILE") {
|
||||
return Some(PathBuf::from(value).join(".zclaw").join("zclaw.toml"));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
296
desktop/src-tauri/src/health_check.rs
Normal file
296
desktop/src-tauri/src/health_check.rs
Normal file
@@ -0,0 +1,296 @@
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use std::net::{TcpStream, ToSocketAddrs};
|
||||
use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
|
||||
use tauri::AppHandle;
|
||||
|
||||
use crate::gateway::io::{parse_json_output, read_gateway_status, run_zclaw, LocalGatewayStatus};
|
||||
use crate::gateway::runtime::{resolve_zclaw_runtime, ZCLAW_DEFAULT_PORT};
|
||||
|
||||
/// Health status enum
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub(crate) enum HealthStatus {
|
||||
Healthy,
|
||||
Unhealthy,
|
||||
}
|
||||
|
||||
/// Port check result
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct PortCheckResult {
|
||||
port: u16,
|
||||
accessible: bool,
|
||||
latency_ms: Option<u64>,
|
||||
error: Option<String>,
|
||||
}
|
||||
|
||||
/// Process health details
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct ProcessHealthDetails {
|
||||
pid: Option<u32>,
|
||||
name: Option<String>,
|
||||
status: Option<String>,
|
||||
uptime_seconds: Option<u64>,
|
||||
cpu_percent: Option<f64>,
|
||||
memory_mb: Option<f64>,
|
||||
}
|
||||
|
||||
/// Health check response
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct HealthCheckResponse {
|
||||
status: HealthStatus,
|
||||
process: ProcessHealthDetails,
|
||||
port_check: PortCheckResult,
|
||||
last_check_timestamp: u64,
|
||||
checks_performed: Vec<String>,
|
||||
issues: Vec<String>,
|
||||
runtime_source: Option<String>,
|
||||
}
|
||||
|
||||
/// Check if a TCP port is accessible
|
||||
fn check_port_accessibility(host: &str, port: u16, timeout_ms: u64) -> PortCheckResult {
|
||||
let addr = format!("{}:{}", host, port);
|
||||
|
||||
// Resolve the address
|
||||
let socket_addr = match addr.to_socket_addrs() {
|
||||
Ok(mut addrs) => addrs.next(),
|
||||
Err(e) => {
|
||||
return PortCheckResult {
|
||||
port,
|
||||
accessible: false,
|
||||
latency_ms: None,
|
||||
error: Some(format!("Failed to resolve address: {}", e)),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
let Some(socket_addr) = socket_addr else {
|
||||
return PortCheckResult {
|
||||
port,
|
||||
accessible: false,
|
||||
latency_ms: None,
|
||||
error: Some("Failed to resolve address".to_string()),
|
||||
};
|
||||
};
|
||||
|
||||
// Try to connect with timeout
|
||||
let start = Instant::now();
|
||||
|
||||
// Use a simple TCP connect with timeout simulation
|
||||
let result = TcpStream::connect_timeout(&socket_addr, Duration::from_millis(timeout_ms));
|
||||
|
||||
match result {
|
||||
Ok(_) => {
|
||||
let latency = start.elapsed().as_millis() as u64;
|
||||
PortCheckResult {
|
||||
port,
|
||||
accessible: true,
|
||||
latency_ms: Some(latency),
|
||||
error: None,
|
||||
}
|
||||
}
|
||||
Err(e) => PortCheckResult {
|
||||
port,
|
||||
accessible: false,
|
||||
latency_ms: None,
|
||||
error: Some(format!("Connection failed: {}", e)),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Get process uptime from status command
|
||||
fn get_process_uptime(status: &LocalGatewayStatus) -> Option<u64> {
|
||||
// Try to extract uptime from raw status data
|
||||
status
|
||||
.raw
|
||||
.get("process")
|
||||
.and_then(|p| p.get("uptimeSeconds"))
|
||||
.and_then(Value::as_u64)
|
||||
}
|
||||
|
||||
/// Perform comprehensive health check on ZCLAW Kernel
|
||||
#[tauri::command]
|
||||
pub fn zclaw_health_check(
|
||||
app: AppHandle,
|
||||
port: Option<u16>,
|
||||
timeout_ms: Option<u64>,
|
||||
) -> Result<HealthCheckResponse, String> {
|
||||
let check_port = port.unwrap_or(ZCLAW_DEFAULT_PORT);
|
||||
let timeout = timeout_ms.unwrap_or(3000);
|
||||
let mut checks_performed = Vec::new();
|
||||
let mut issues = Vec::new();
|
||||
|
||||
// Get current timestamp
|
||||
let last_check_timestamp = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.map(|d| d.as_secs())
|
||||
.unwrap_or(0);
|
||||
|
||||
// 1. Check if ZCLAW CLI is available
|
||||
let runtime = resolve_zclaw_runtime(&app);
|
||||
let cli_available = runtime.executable.is_file();
|
||||
|
||||
if !cli_available {
|
||||
return Ok(HealthCheckResponse {
|
||||
status: HealthStatus::Unhealthy,
|
||||
process: ProcessHealthDetails {
|
||||
pid: None,
|
||||
name: None,
|
||||
status: None,
|
||||
uptime_seconds: None,
|
||||
cpu_percent: None,
|
||||
memory_mb: None,
|
||||
},
|
||||
port_check: PortCheckResult {
|
||||
port: check_port,
|
||||
accessible: false,
|
||||
latency_ms: None,
|
||||
error: Some("ZCLAW CLI not available".to_string()),
|
||||
},
|
||||
last_check_timestamp,
|
||||
checks_performed: vec!["cli_availability".to_string()],
|
||||
issues: vec![format!(
|
||||
"ZCLAW runtime not found at: {}",
|
||||
runtime.display_path.display()
|
||||
)],
|
||||
runtime_source: Some(runtime.source),
|
||||
});
|
||||
}
|
||||
checks_performed.push("cli_availability".to_string());
|
||||
|
||||
// 2. Get gateway status
|
||||
let gateway_status = read_gateway_status(&app)?;
|
||||
checks_performed.push("gateway_status".to_string());
|
||||
|
||||
// Check for configuration issues
|
||||
if !gateway_status.config_ok {
|
||||
issues.push("Gateway configuration has issues".to_string());
|
||||
}
|
||||
|
||||
// 3. Check port accessibility
|
||||
let port_check = check_port_accessibility("127.0.0.1", check_port, timeout);
|
||||
checks_performed.push("port_accessibility".to_string());
|
||||
|
||||
if !port_check.accessible {
|
||||
issues.push(format!(
|
||||
"Port {} is not accessible: {}",
|
||||
check_port,
|
||||
port_check.error.as_deref().unwrap_or("unknown error")
|
||||
));
|
||||
}
|
||||
|
||||
// 4. Extract process information
|
||||
let process_health = if !gateway_status.listener_pids.is_empty() {
|
||||
// Get the first listener PID
|
||||
let pid = gateway_status.listener_pids[0];
|
||||
|
||||
// Try to get detailed process info from process list
|
||||
let process_info = run_zclaw(&app, &["process", "list", "--json"])
|
||||
.ok()
|
||||
.and_then(|result| parse_json_output(&result.stdout).ok())
|
||||
.and_then(|json| json.get("processes").and_then(Value::as_array).cloned());
|
||||
|
||||
let (cpu, memory, uptime) = if let Some(ref processes) = process_info {
|
||||
let matching = processes
|
||||
.iter()
|
||||
.find(|p| p.get("pid").and_then(Value::as_u64) == Some(pid as u64));
|
||||
|
||||
matching.map_or((None, None, None), |p| {
|
||||
(
|
||||
p.get("cpuPercent").and_then(Value::as_f64),
|
||||
p.get("memoryMb").and_then(Value::as_f64),
|
||||
p.get("uptimeSeconds").and_then(Value::as_u64),
|
||||
)
|
||||
})
|
||||
} else {
|
||||
(None, None, get_process_uptime(&gateway_status))
|
||||
};
|
||||
|
||||
ProcessHealthDetails {
|
||||
pid: Some(pid),
|
||||
name: Some("zclaw".to_string()),
|
||||
status: Some(
|
||||
gateway_status
|
||||
.service_status
|
||||
.clone()
|
||||
.unwrap_or_else(|| "running".to_string()),
|
||||
),
|
||||
uptime_seconds: uptime,
|
||||
cpu_percent: cpu,
|
||||
memory_mb: memory,
|
||||
}
|
||||
} else {
|
||||
ProcessHealthDetails {
|
||||
pid: None,
|
||||
name: None,
|
||||
status: gateway_status.service_status.clone(),
|
||||
uptime_seconds: None,
|
||||
cpu_percent: None,
|
||||
memory_mb: None,
|
||||
}
|
||||
};
|
||||
|
||||
// Check if process is running but no listeners
|
||||
if gateway_status.service_status.as_deref() == Some("running")
|
||||
&& gateway_status.listener_pids.is_empty()
|
||||
{
|
||||
issues.push("Service reports running but no listener processes found".to_string());
|
||||
}
|
||||
|
||||
// 5. Determine overall health status
|
||||
let status = if !cli_available {
|
||||
HealthStatus::Unhealthy
|
||||
} else if !port_check.accessible {
|
||||
HealthStatus::Unhealthy
|
||||
} else if gateway_status.listener_pids.is_empty() {
|
||||
HealthStatus::Unhealthy
|
||||
} else if !issues.is_empty() {
|
||||
// Has some issues but core functionality is working
|
||||
HealthStatus::Healthy
|
||||
} else {
|
||||
HealthStatus::Healthy
|
||||
};
|
||||
|
||||
Ok(HealthCheckResponse {
|
||||
status,
|
||||
process: process_health,
|
||||
port_check,
|
||||
last_check_timestamp,
|
||||
checks_performed,
|
||||
issues,
|
||||
runtime_source: Some(runtime.source),
|
||||
})
|
||||
}
|
||||
|
||||
/// Quick ping to check if ZCLAW is alive (lightweight check)
|
||||
#[tauri::command]
|
||||
pub fn zclaw_ping(app: AppHandle) -> Result<bool, String> {
|
||||
let port_check = check_port_accessibility("127.0.0.1", ZCLAW_DEFAULT_PORT, 1000);
|
||||
|
||||
if port_check.accessible {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// Fallback: check via status command
|
||||
match run_zclaw(&app, &["gateway", "status", "--json", "--no-probe"]) {
|
||||
Ok(result) => {
|
||||
if let Ok(status) = parse_json_output(&result.stdout) {
|
||||
// Check if there are any listener PIDs
|
||||
let has_listeners = status
|
||||
.get("port")
|
||||
.and_then(|p| p.get("listeners"))
|
||||
.and_then(Value::as_array)
|
||||
.map(|arr| !arr.is_empty())
|
||||
.unwrap_or(false);
|
||||
|
||||
Ok(has_listeners)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
Err(_) => Ok(false),
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
114
desktop/src-tauri/src/kernel_commands/a2a.rs
Normal file
114
desktop/src-tauri/src/kernel_commands/a2a.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
//! A2A (Agent-to-Agent) commands — gated behind `multi-agent` feature
|
||||
|
||||
use serde_json;
|
||||
use tauri::State;
|
||||
use zclaw_types::AgentId;
|
||||
|
||||
use super::KernelState;
|
||||
|
||||
// ============================================================
|
||||
// A2A (Agent-to-Agent) Commands — gated behind multi-agent feature
|
||||
// ============================================================
|
||||
|
||||
#[cfg(feature = "multi-agent")]
|
||||
/// Send a direct A2A message from one agent to another
|
||||
#[tauri::command]
|
||||
pub async fn agent_a2a_send(
|
||||
state: State<'_, KernelState>,
|
||||
from: String,
|
||||
to: String,
|
||||
payload: serde_json::Value,
|
||||
message_type: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let from_id: AgentId = from.parse()
|
||||
.map_err(|_| format!("Invalid from agent ID: {}", from))?;
|
||||
let to_id: AgentId = to.parse()
|
||||
.map_err(|_| format!("Invalid to agent ID: {}", to))?;
|
||||
|
||||
let msg_type = message_type.map(|mt| match mt.as_str() {
|
||||
"request" => zclaw_kernel::A2aMessageType::Request,
|
||||
"notification" => zclaw_kernel::A2aMessageType::Notification,
|
||||
"task" => zclaw_kernel::A2aMessageType::Task,
|
||||
_ => zclaw_kernel::A2aMessageType::Notification,
|
||||
});
|
||||
|
||||
kernel.a2a_send(&from_id, &to_id, payload, msg_type).await
|
||||
.map_err(|e| format!("A2A send failed: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Broadcast a message from one agent to all other agents
|
||||
#[cfg(feature = "multi-agent")]
|
||||
#[tauri::command]
|
||||
pub async fn agent_a2a_broadcast(
|
||||
state: State<'_, KernelState>,
|
||||
from: String,
|
||||
payload: serde_json::Value,
|
||||
) -> Result<(), String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let from_id: AgentId = from.parse()
|
||||
.map_err(|_| format!("Invalid from agent ID: {}", from))?;
|
||||
|
||||
kernel.a2a_broadcast(&from_id, payload).await
|
||||
.map_err(|e| format!("A2A broadcast failed: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Discover agents with a specific capability
|
||||
#[cfg(feature = "multi-agent")]
|
||||
#[tauri::command]
|
||||
pub async fn agent_a2a_discover(
|
||||
state: State<'_, KernelState>,
|
||||
capability: String,
|
||||
) -> Result<Vec<serde_json::Value>, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let profiles = kernel.a2a_discover(&capability).await
|
||||
.map_err(|e| format!("A2A discover failed: {}", e))?;
|
||||
|
||||
let result: Vec<serde_json::Value> = profiles.iter()
|
||||
.filter_map(|p| serde_json::to_value(p).ok())
|
||||
.collect();
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Delegate a task to another agent and wait for response
|
||||
#[cfg(feature = "multi-agent")]
|
||||
#[tauri::command]
|
||||
pub async fn agent_a2a_delegate_task(
|
||||
state: State<'_, KernelState>,
|
||||
from: String,
|
||||
to: String,
|
||||
task: String,
|
||||
timeout_ms: Option<u64>,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let from_id: AgentId = from.parse()
|
||||
.map_err(|_| format!("Invalid from agent ID: {}", from))?;
|
||||
let to_id: AgentId = to.parse()
|
||||
.map_err(|_| format!("Invalid to agent ID: {}", to))?;
|
||||
|
||||
let timeout = timeout_ms.unwrap_or(30_000);
|
||||
|
||||
// 30 seconds default
|
||||
|
||||
let response = kernel.a2a_delegate_task(&from_id, &to_id, task, timeout).await
|
||||
.map_err(|e| format!("A2A task delegation failed: {}", e))?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
257
desktop/src-tauri/src/kernel_commands/agent.rs
Normal file
257
desktop/src-tauri/src/kernel_commands/agent.rs
Normal file
@@ -0,0 +1,257 @@
|
||||
//! Agent CRUD commands: create, list, get, delete, update, export, import
|
||||
|
||||
use std::path::PathBuf;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::State;
|
||||
use zclaw_types::{AgentConfig, AgentId, AgentInfo};
|
||||
|
||||
use super::{validate_agent_id, KernelState};
|
||||
use crate::intelligence::validation::validate_string_length;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Request / Response types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn default_provider() -> String { "openai".to_string() }
|
||||
fn default_model() -> String { "gpt-4o-mini".to_string() }
|
||||
fn default_max_tokens() -> u32 { 4096 }
|
||||
fn default_temperature() -> f32 { 0.7 }
|
||||
|
||||
/// Agent creation request
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CreateAgentRequest {
|
||||
pub name: String,
|
||||
#[serde(default)]
|
||||
pub description: Option<String>,
|
||||
#[serde(default)]
|
||||
pub system_prompt: Option<String>,
|
||||
#[serde(default = "default_provider")]
|
||||
pub provider: String,
|
||||
#[serde(default = "default_model")]
|
||||
pub model: String,
|
||||
#[serde(default = "default_max_tokens")]
|
||||
pub max_tokens: u32,
|
||||
#[serde(default = "default_temperature")]
|
||||
pub temperature: f32,
|
||||
#[serde(default)]
|
||||
pub workspace: Option<PathBuf>,
|
||||
}
|
||||
|
||||
/// Agent creation response
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CreateAgentResponse {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub state: String,
|
||||
}
|
||||
|
||||
/// Agent update request
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AgentUpdateRequest {
|
||||
pub name: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub system_prompt: Option<String>,
|
||||
pub model: Option<String>,
|
||||
pub provider: Option<String>,
|
||||
pub max_tokens: Option<u32>,
|
||||
pub temperature: Option<f32>,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Commands
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Create a new agent
|
||||
#[tauri::command]
|
||||
pub async fn agent_create(
|
||||
state: State<'_, KernelState>,
|
||||
request: CreateAgentRequest,
|
||||
) -> Result<CreateAgentResponse, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let mut config = AgentConfig::new(&request.name)
|
||||
.with_description(request.description.unwrap_or_default())
|
||||
.with_system_prompt(request.system_prompt.unwrap_or_default())
|
||||
.with_model(zclaw_types::ModelConfig {
|
||||
provider: request.provider,
|
||||
model: request.model,
|
||||
api_key_env: None,
|
||||
base_url: None,
|
||||
})
|
||||
.with_max_tokens(request.max_tokens)
|
||||
.with_temperature(request.temperature);
|
||||
|
||||
if let Some(workspace) = request.workspace {
|
||||
config.workspace = Some(workspace);
|
||||
}
|
||||
|
||||
let id = kernel.spawn_agent(config)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to create agent: {}", e))?;
|
||||
|
||||
Ok(CreateAgentResponse {
|
||||
id: id.to_string(),
|
||||
name: request.name,
|
||||
state: "running".to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
/// List all agents
|
||||
#[tauri::command]
|
||||
pub async fn agent_list(
|
||||
state: State<'_, KernelState>,
|
||||
) -> Result<Vec<AgentInfo>, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
Ok(kernel.list_agents())
|
||||
}
|
||||
|
||||
/// Get agent info
|
||||
#[tauri::command]
|
||||
pub async fn agent_get(
|
||||
state: State<'_, KernelState>,
|
||||
agent_id: String,
|
||||
) -> Result<Option<AgentInfo>, String> {
|
||||
let agent_id = validate_agent_id(&agent_id)?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let id: AgentId = agent_id.parse()
|
||||
.map_err(|_| "Invalid agent ID format".to_string())?;
|
||||
|
||||
Ok(kernel.get_agent(&id))
|
||||
}
|
||||
|
||||
/// Delete an agent
|
||||
#[tauri::command]
|
||||
pub async fn agent_delete(
|
||||
state: State<'_, KernelState>,
|
||||
agent_id: String,
|
||||
) -> Result<(), String> {
|
||||
let agent_id = validate_agent_id(&agent_id)?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let id: AgentId = agent_id.parse()
|
||||
.map_err(|_| "Invalid agent ID format".to_string())?;
|
||||
|
||||
kernel.kill_agent(&id)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to delete agent: {}", e))
|
||||
}
|
||||
|
||||
/// Update an agent's configuration
|
||||
#[tauri::command]
|
||||
pub async fn agent_update(
|
||||
state: State<'_, KernelState>,
|
||||
agent_id: String,
|
||||
updates: AgentUpdateRequest,
|
||||
) -> Result<AgentInfo, String> {
|
||||
let agent_id = validate_agent_id(&agent_id)?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let id: AgentId = agent_id.parse()
|
||||
.map_err(|_| "Invalid agent ID format".to_string())?;
|
||||
|
||||
// Get existing config
|
||||
let mut config = kernel.get_agent_config(&id)
|
||||
.ok_or_else(|| format!("Agent not found: {}", agent_id))?;
|
||||
|
||||
// Apply updates
|
||||
if let Some(name) = updates.name {
|
||||
config.name = name;
|
||||
}
|
||||
if let Some(description) = updates.description {
|
||||
config.description = Some(description);
|
||||
}
|
||||
if let Some(system_prompt) = updates.system_prompt {
|
||||
config.system_prompt = Some(system_prompt);
|
||||
}
|
||||
if let Some(model) = updates.model {
|
||||
config.model.model = model;
|
||||
}
|
||||
if let Some(provider) = updates.provider {
|
||||
config.model.provider = provider;
|
||||
}
|
||||
if let Some(max_tokens) = updates.max_tokens {
|
||||
config.max_tokens = Some(max_tokens);
|
||||
}
|
||||
if let Some(temperature) = updates.temperature {
|
||||
config.temperature = Some(temperature);
|
||||
}
|
||||
|
||||
// Save updated config
|
||||
kernel.update_agent(config)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to update agent: {}", e))?;
|
||||
|
||||
// Return updated info
|
||||
kernel.get_agent(&id)
|
||||
.ok_or_else(|| format!("Agent not found after update: {}", agent_id))
|
||||
}
|
||||
|
||||
/// Export an agent configuration as JSON
|
||||
#[tauri::command]
|
||||
pub async fn agent_export(
|
||||
state: State<'_, KernelState>,
|
||||
agent_id: String,
|
||||
) -> Result<String, String> {
|
||||
let agent_id = validate_agent_id(&agent_id)?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let id: AgentId = agent_id.parse()
|
||||
.map_err(|_| "Invalid agent ID format".to_string())?;
|
||||
|
||||
let config = kernel.get_agent_config(&id)
|
||||
.ok_or_else(|| format!("Agent not found: {}", agent_id))?;
|
||||
|
||||
serde_json::to_string_pretty(&config)
|
||||
.map_err(|e| format!("Failed to serialize agent config: {}", e))
|
||||
}
|
||||
|
||||
/// Import an agent from JSON configuration
|
||||
#[tauri::command]
|
||||
pub async fn agent_import(
|
||||
state: State<'_, KernelState>,
|
||||
config_json: String,
|
||||
) -> Result<AgentInfo, String> {
|
||||
validate_string_length(&config_json, "config_json", 1_000_000)
|
||||
.map_err(|e| format!("{}", e))?;
|
||||
|
||||
let mut config: AgentConfig = serde_json::from_str(&config_json)
|
||||
.map_err(|e| format!("Invalid agent config JSON: {}", e))?;
|
||||
|
||||
// Regenerate ID to avoid collisions
|
||||
config.id = AgentId::new();
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let new_id = kernel.spawn_agent(config).await
|
||||
.map_err(|e| format!("Failed to import agent: {}", e))?;
|
||||
|
||||
kernel.get_agent(&new_id)
|
||||
.ok_or_else(|| "Agent was created but could not be retrieved".to_string())
|
||||
}
|
||||
140
desktop/src-tauri/src/kernel_commands/approval.rs
Normal file
140
desktop/src-tauri/src/kernel_commands/approval.rs
Normal file
@@ -0,0 +1,140 @@
|
||||
//! Approval commands: list and respond
|
||||
//!
|
||||
//! When approved, kernel's `respond_to_approval` internally spawns the Hand execution
|
||||
//! and emits `hand-execution-complete` events to the frontend.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json;
|
||||
use tauri::{AppHandle, Emitter, State};
|
||||
|
||||
use super::KernelState;
|
||||
|
||||
// ============================================================
|
||||
// Approval Commands
|
||||
// ============================================================
|
||||
|
||||
/// Approval response
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApprovalResponse {
|
||||
pub id: String,
|
||||
pub hand_id: String,
|
||||
pub status: String,
|
||||
pub created_at: String,
|
||||
pub input: serde_json::Value,
|
||||
}
|
||||
|
||||
/// List pending approvals
|
||||
#[tauri::command]
|
||||
pub async fn approval_list(
|
||||
state: State<'_, KernelState>,
|
||||
) -> Result<Vec<ApprovalResponse>, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let approvals = kernel.list_approvals().await;
|
||||
Ok(approvals.into_iter().map(|a| ApprovalResponse {
|
||||
id: a.id,
|
||||
hand_id: a.hand_id,
|
||||
status: a.status,
|
||||
created_at: a.created_at.to_rfc3339(),
|
||||
input: a.input,
|
||||
}).collect())
|
||||
}
|
||||
|
||||
/// Respond to an approval
|
||||
///
|
||||
/// When approved, the kernel's `respond_to_approval` internally spawns the Hand
|
||||
/// execution. We additionally emit Tauri events so the frontend can track when
|
||||
/// the execution finishes, since the kernel layer has no access to the AppHandle.
|
||||
#[tauri::command]
|
||||
pub async fn approval_respond(
|
||||
app: AppHandle,
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
approved: bool,
|
||||
reason: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
// Capture hand info before calling respond_to_approval (which mutates the approval)
|
||||
let hand_id = {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let approvals = kernel.list_approvals().await;
|
||||
let entry = approvals.iter().find(|a| a.id == id && a.status == "pending")
|
||||
.ok_or_else(|| format!("Approval not found or already resolved: {}", id))?;
|
||||
entry.hand_id.clone()
|
||||
};
|
||||
|
||||
// Call kernel respond_to_approval (this updates status and spawns Hand execution)
|
||||
{
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
kernel.respond_to_approval(&id, approved, reason).await
|
||||
.map_err(|e| format!("Failed to respond to approval: {}", e))?;
|
||||
}
|
||||
|
||||
// When approved, monitor the Hand execution and emit events to the frontend.
|
||||
// The kernel's respond_to_approval changes status to "approved" immediately,
|
||||
// then the spawned task sets it to "completed" or "failed" when done.
|
||||
if approved {
|
||||
let approval_id = id.clone();
|
||||
let kernel_state: KernelState = (*state).clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let timeout = tokio::time::Duration::from_secs(300);
|
||||
let poll_interval = tokio::time::Duration::from_millis(500);
|
||||
|
||||
let result = tokio::time::timeout(timeout, async {
|
||||
loop {
|
||||
tokio::time::sleep(poll_interval).await;
|
||||
|
||||
let kernel_lock = kernel_state.lock().await;
|
||||
if let Some(kernel) = kernel_lock.as_ref() {
|
||||
// Use get_approval to check any status (not just "pending")
|
||||
if let Some(entry) = kernel.get_approval(&approval_id).await {
|
||||
match entry.status.as_str() {
|
||||
"completed" => {
|
||||
tracing::info!("[approval_respond] Hand '{}' completed for approval {}", hand_id, approval_id);
|
||||
return (true, None::<String>);
|
||||
}
|
||||
"failed" => {
|
||||
let error_msg = entry.input.get("error")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("Unknown error")
|
||||
.to_string();
|
||||
tracing::warn!("[approval_respond] Hand '{}' failed for approval {}: {}", hand_id, approval_id, error_msg);
|
||||
return (false, Some(error_msg));
|
||||
}
|
||||
_ => {} // "approved" = still running
|
||||
}
|
||||
} else {
|
||||
// Entry disappeared entirely — kernel was likely restarted
|
||||
return (false, Some("Approval entry disappeared".to_string()));
|
||||
}
|
||||
} else {
|
||||
return (false, Some("Kernel not available".to_string()));
|
||||
}
|
||||
}
|
||||
}).await;
|
||||
|
||||
let (success, error) = match result {
|
||||
Ok((s, e)) => (s, e),
|
||||
Err(_) => (false, Some("Hand execution timed out (5 minutes)".to_string())),
|
||||
};
|
||||
|
||||
let _ = app.emit("hand-execution-complete", serde_json::json!({
|
||||
"approvalId": approval_id,
|
||||
"handId": hand_id,
|
||||
"success": success,
|
||||
"error": error,
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
274
desktop/src-tauri/src/kernel_commands/chat.rs
Normal file
274
desktop/src-tauri/src/kernel_commands/chat.rs
Normal file
@@ -0,0 +1,274 @@
|
||||
//! Chat commands: send message, streaming chat
|
||||
|
||||
use std::sync::Arc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::{AppHandle, Emitter, State};
|
||||
use tokio::sync::Mutex;
|
||||
use zclaw_types::AgentId;
|
||||
|
||||
use super::{validate_agent_id, KernelState, SessionStreamGuard};
|
||||
use crate::intelligence::validation::validate_string_length;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Request / Response types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Chat request
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ChatRequest {
|
||||
pub agent_id: String,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
/// Chat response
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ChatResponse {
|
||||
pub content: String,
|
||||
pub input_tokens: u32,
|
||||
pub output_tokens: u32,
|
||||
}
|
||||
|
||||
/// Streaming chat event for Tauri emission
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", tag = "type")]
|
||||
pub enum StreamChatEvent {
|
||||
Delta { delta: String },
|
||||
ToolStart { name: String, input: serde_json::Value },
|
||||
ToolEnd { name: String, output: serde_json::Value },
|
||||
IterationStart { iteration: usize, max_iterations: usize },
|
||||
HandStart { name: String, params: serde_json::Value },
|
||||
HandEnd { name: String, result: serde_json::Value },
|
||||
Complete { input_tokens: u32, output_tokens: u32 },
|
||||
Error { message: String },
|
||||
}
|
||||
|
||||
/// Streaming chat request
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StreamChatRequest {
|
||||
pub agent_id: String,
|
||||
pub session_id: String,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Commands
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Send a message to an agent
|
||||
#[tauri::command]
|
||||
pub async fn agent_chat(
|
||||
state: State<'_, KernelState>,
|
||||
request: ChatRequest,
|
||||
) -> Result<ChatResponse, String> {
|
||||
validate_agent_id(&request.agent_id)?;
|
||||
validate_string_length(&request.message, "message", 100000)
|
||||
.map_err(|e| format!("Invalid message: {}", e))?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let id: AgentId = request.agent_id.parse()
|
||||
.map_err(|_| "Invalid agent ID format".to_string())?;
|
||||
|
||||
let response = kernel.send_message(&id, request.message)
|
||||
.await
|
||||
.map_err(|e| format!("Chat failed: {}", e))?;
|
||||
|
||||
Ok(ChatResponse {
|
||||
content: response.content,
|
||||
input_tokens: response.input_tokens,
|
||||
output_tokens: response.output_tokens,
|
||||
})
|
||||
}
|
||||
|
||||
/// Send a message to an agent with streaming response
|
||||
///
|
||||
/// This command initiates a streaming chat session. Events are emitted
|
||||
/// via Tauri's event system with the name "stream:chunk" and include
|
||||
/// the session_id for routing.
|
||||
#[tauri::command]
|
||||
pub async fn agent_chat_stream(
|
||||
app: AppHandle,
|
||||
state: State<'_, KernelState>,
|
||||
identity_state: State<'_, crate::intelligence::IdentityManagerState>,
|
||||
heartbeat_state: State<'_, crate::intelligence::HeartbeatEngineState>,
|
||||
reflection_state: State<'_, crate::intelligence::ReflectionEngineState>,
|
||||
stream_guard: State<'_, SessionStreamGuard>,
|
||||
request: StreamChatRequest,
|
||||
) -> Result<(), String> {
|
||||
validate_agent_id(&request.agent_id)?;
|
||||
validate_string_length(&request.message, "message", 100000)
|
||||
.map_err(|e| format!("Invalid message: {}", e))?;
|
||||
|
||||
let id: AgentId = request.agent_id.parse()
|
||||
.map_err(|_| "Invalid agent ID format".to_string())?;
|
||||
|
||||
let session_id = request.session_id.clone();
|
||||
let agent_id_str = request.agent_id.clone();
|
||||
let message = request.message.clone();
|
||||
|
||||
// Session-level concurrency guard
|
||||
let session_mutex = stream_guard
|
||||
.entry(session_id.clone())
|
||||
.or_insert_with(|| Arc::new(Mutex::new(())));
|
||||
let _session_guard = session_mutex.try_lock()
|
||||
.map_err(|_| {
|
||||
tracing::warn!(
|
||||
"[agent_chat_stream] Session {} already has an active stream — rejecting",
|
||||
session_id
|
||||
);
|
||||
format!("Session {} already has an active stream", session_id)
|
||||
})?;
|
||||
|
||||
// AUTO-INIT HEARTBEAT
|
||||
{
|
||||
let mut engines = heartbeat_state.lock().await;
|
||||
if !engines.contains_key(&request.agent_id) {
|
||||
let engine = crate::intelligence::heartbeat::HeartbeatEngine::new(
|
||||
request.agent_id.clone(),
|
||||
None,
|
||||
);
|
||||
engines.insert(request.agent_id.clone(), engine);
|
||||
tracing::info!("[agent_chat_stream] Auto-initialized heartbeat for agent: {}", request.agent_id);
|
||||
}
|
||||
}
|
||||
|
||||
// PRE-CONVERSATION: Build intelligence-enhanced system prompt
|
||||
let enhanced_prompt = crate::intelligence_hooks::pre_conversation_hook(
|
||||
&request.agent_id,
|
||||
&request.message,
|
||||
&identity_state,
|
||||
).await.unwrap_or_default();
|
||||
|
||||
// Get the streaming receiver while holding the lock, then release it
|
||||
let (mut rx, llm_driver) = {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let driver = Some(kernel.driver());
|
||||
|
||||
let prompt_arg = if enhanced_prompt.is_empty() { None } else { Some(enhanced_prompt) };
|
||||
|
||||
let session_id_parsed = if session_id.is_empty() {
|
||||
None
|
||||
} else {
|
||||
match uuid::Uuid::parse_str(&session_id) {
|
||||
Ok(uuid) => Some(zclaw_types::SessionId::from_uuid(uuid)),
|
||||
Err(e) => {
|
||||
return Err(format!(
|
||||
"Invalid session_id '{}': {}. Cannot reuse conversation context.",
|
||||
session_id, e
|
||||
));
|
||||
}
|
||||
}
|
||||
};
|
||||
let rx = kernel.send_message_stream_with_prompt(&id, message.clone(), prompt_arg, session_id_parsed)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to start streaming: {}", e))?;
|
||||
(rx, driver)
|
||||
};
|
||||
|
||||
let hb_state = heartbeat_state.inner().clone();
|
||||
let rf_state = reflection_state.inner().clone();
|
||||
|
||||
// Spawn a task to process stream events with timeout guard
|
||||
tokio::spawn(async move {
|
||||
use zclaw_runtime::LoopEvent;
|
||||
|
||||
tracing::debug!("[agent_chat_stream] Starting stream processing for session: {}", session_id);
|
||||
|
||||
let stream_timeout = tokio::time::Duration::from_secs(300);
|
||||
|
||||
loop {
|
||||
match tokio::time::timeout(stream_timeout, rx.recv()).await {
|
||||
Ok(Some(event)) => {
|
||||
let stream_event = match &event {
|
||||
LoopEvent::Delta(delta) => {
|
||||
tracing::trace!("[agent_chat_stream] Delta: {} bytes", delta.len());
|
||||
StreamChatEvent::Delta { delta: delta.clone() }
|
||||
}
|
||||
LoopEvent::ToolStart { name, input } => {
|
||||
tracing::debug!("[agent_chat_stream] ToolStart: {}", name);
|
||||
if name.starts_with("hand_") {
|
||||
StreamChatEvent::HandStart { name: name.clone(), params: input.clone() }
|
||||
} else {
|
||||
StreamChatEvent::ToolStart { name: name.clone(), input: input.clone() }
|
||||
}
|
||||
}
|
||||
LoopEvent::ToolEnd { name, output } => {
|
||||
tracing::debug!("[agent_chat_stream] ToolEnd: {}", name);
|
||||
if name.starts_with("hand_") {
|
||||
StreamChatEvent::HandEnd { name: name.clone(), result: output.clone() }
|
||||
} else {
|
||||
StreamChatEvent::ToolEnd { name: name.clone(), output: output.clone() }
|
||||
}
|
||||
}
|
||||
LoopEvent::IterationStart { iteration, max_iterations } => {
|
||||
tracing::debug!("[agent_chat_stream] IterationStart: {}/{}", iteration, max_iterations);
|
||||
StreamChatEvent::IterationStart { iteration: *iteration, max_iterations: *max_iterations }
|
||||
}
|
||||
LoopEvent::Complete(result) => {
|
||||
tracing::info!("[agent_chat_stream] Complete: input_tokens={}, output_tokens={}",
|
||||
result.input_tokens, result.output_tokens);
|
||||
|
||||
let agent_id_hook = agent_id_str.clone();
|
||||
let message_hook = message.clone();
|
||||
let hb = hb_state.clone();
|
||||
let rf = rf_state.clone();
|
||||
let driver = llm_driver.clone();
|
||||
tokio::spawn(async move {
|
||||
crate::intelligence_hooks::post_conversation_hook(
|
||||
&agent_id_hook, &message_hook, &hb, &rf, driver,
|
||||
).await;
|
||||
});
|
||||
|
||||
StreamChatEvent::Complete {
|
||||
input_tokens: result.input_tokens,
|
||||
output_tokens: result.output_tokens,
|
||||
}
|
||||
}
|
||||
LoopEvent::Error(message) => {
|
||||
tracing::warn!("[agent_chat_stream] Error: {}", message);
|
||||
StreamChatEvent::Error { message: message.clone() }
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(e) = app.emit("stream:chunk", serde_json::json!({
|
||||
"sessionId": session_id,
|
||||
"event": stream_event
|
||||
})) {
|
||||
tracing::warn!("[agent_chat_stream] Failed to emit event: {}", e);
|
||||
break;
|
||||
}
|
||||
|
||||
if matches!(event, LoopEvent::Complete(_) | LoopEvent::Error(_)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
tracing::info!("[agent_chat_stream] Stream channel closed for session: {}", session_id);
|
||||
break;
|
||||
}
|
||||
Err(_) => {
|
||||
tracing::warn!("[agent_chat_stream] Stream idle timeout for session: {}", session_id);
|
||||
let _ = app.emit("stream:chunk", serde_json::json!({
|
||||
"sessionId": session_id,
|
||||
"event": StreamChatEvent::Error {
|
||||
message: "流式响应超时,请重试".to_string()
|
||||
}
|
||||
}));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("[agent_chat_stream] Stream processing ended for session: {}", session_id);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
431
desktop/src-tauri/src/kernel_commands/hand.rs
Normal file
431
desktop/src-tauri/src/kernel_commands/hand.rs
Normal file
@@ -0,0 +1,431 @@
|
||||
//! Hand commands: list, execute, approve, cancel, get, run_status, run_list, run_cancel
|
||||
//!
|
||||
//! Hands are autonomous capabilities registered in the Kernel's HandRegistry.
|
||||
//! Hand execution can require approval depending on autonomy level and config.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json;
|
||||
use tauri::{AppHandle, Emitter, State};
|
||||
|
||||
use super::KernelState;
|
||||
|
||||
// ============================================================================
|
||||
// Hands Commands - Autonomous Capabilities
|
||||
// ============================================================================
|
||||
|
||||
/// Hand information response for frontend
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HandInfoResponse {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub status: String,
|
||||
pub requirements_met: bool,
|
||||
pub needs_approval: bool,
|
||||
pub dependencies: Vec<String>,
|
||||
pub tags: Vec<String>,
|
||||
pub enabled: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub category: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub icon: Option<String>,
|
||||
#[serde(default)]
|
||||
pub tool_count: u32,
|
||||
#[serde(default)]
|
||||
pub metric_count: u32,
|
||||
}
|
||||
|
||||
impl From<zclaw_hands::HandConfig> for HandInfoResponse {
|
||||
fn from(config: zclaw_hands::HandConfig) -> Self {
|
||||
// Determine status based on enabled and dependencies
|
||||
let status = if !config.enabled {
|
||||
"unavailable".to_string()
|
||||
} else if config.needs_approval {
|
||||
"needs_approval".to_string()
|
||||
} else {
|
||||
"idle".to_string()
|
||||
};
|
||||
|
||||
// Extract category from tags if present
|
||||
let category = config.tags.iter().find(|t| {
|
||||
["research", "automation", "browser", "data", "media", "communication"].contains(&t.as_str())
|
||||
}).cloned();
|
||||
|
||||
// Map tags to icon
|
||||
let icon = if config.tags.contains(&"browser".to_string()) {
|
||||
Some("globe".to_string())
|
||||
} else if config.tags.contains(&"research".to_string()) {
|
||||
Some("search".to_string())
|
||||
} else if config.tags.contains(&"media".to_string()) {
|
||||
Some("video".to_string())
|
||||
} else if config.tags.contains(&"data".to_string()) {
|
||||
Some("database".to_string())
|
||||
} else if config.tags.contains(&"communication".to_string()) {
|
||||
Some("message-circle".to_string())
|
||||
} else {
|
||||
Some("zap".to_string())
|
||||
};
|
||||
|
||||
Self {
|
||||
id: config.id,
|
||||
name: config.name,
|
||||
description: config.description,
|
||||
status,
|
||||
requirements_met: config.enabled && config.dependencies.is_empty(),
|
||||
needs_approval: config.needs_approval,
|
||||
dependencies: config.dependencies,
|
||||
tags: config.tags,
|
||||
enabled: config.enabled,
|
||||
category,
|
||||
icon,
|
||||
tool_count: 0,
|
||||
metric_count: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Hand execution result
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HandResult {
|
||||
pub success: bool,
|
||||
pub output: serde_json::Value,
|
||||
pub error: Option<String>,
|
||||
pub duration_ms: Option<u64>,
|
||||
}
|
||||
|
||||
impl From<zclaw_hands::HandResult> for HandResult {
|
||||
fn from(result: zclaw_hands::HandResult) -> Self {
|
||||
Self {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
error: result.error,
|
||||
duration_ms: result.duration_ms,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// List all registered hands
|
||||
///
|
||||
/// Returns hands from the Kernel's HandRegistry.
|
||||
/// Hands are registered during kernel initialization.
|
||||
#[tauri::command]
|
||||
pub async fn hand_list(
|
||||
state: State<'_, KernelState>,
|
||||
) -> Result<Vec<HandInfoResponse>, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let hands = kernel.list_hands().await;
|
||||
Ok(hands.into_iter().map(HandInfoResponse::from).collect())
|
||||
}
|
||||
|
||||
/// Execute a hand
|
||||
///
|
||||
/// Executes a hand with the given ID and input.
|
||||
/// If the hand has `needs_approval = true`, creates a pending approval instead.
|
||||
/// Returns the hand result as JSON, or a pending status with approval ID.
|
||||
#[tauri::command]
|
||||
pub async fn hand_execute(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
input: serde_json::Value,
|
||||
autonomy_level: Option<String>,
|
||||
) -> Result<HandResult, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
// Autonomy guard: supervised mode requires approval for ALL hands
|
||||
if autonomy_level.as_deref() == Some("supervised") {
|
||||
let approval = kernel.create_approval(id.clone(), input).await;
|
||||
return Ok(HandResult {
|
||||
success: false,
|
||||
output: serde_json::json!({
|
||||
"status": "pending_approval",
|
||||
"approval_id": approval.id,
|
||||
"hand_id": approval.hand_id,
|
||||
"message": "监督模式下所有 Hand 执行需要用户审批"
|
||||
}),
|
||||
error: None,
|
||||
duration_ms: None,
|
||||
});
|
||||
}
|
||||
|
||||
// Check if hand requires approval (assisted mode or no autonomy level specified).
|
||||
// In autonomous mode, the user has opted in to bypass per-hand approval gates.
|
||||
if autonomy_level.as_deref() != Some("autonomous") {
|
||||
let hands = kernel.list_hands().await;
|
||||
if let Some(hand_config) = hands.iter().find(|h| h.id == id) {
|
||||
if hand_config.needs_approval {
|
||||
let approval = kernel.create_approval(id.clone(), input).await;
|
||||
return Ok(HandResult {
|
||||
success: false,
|
||||
output: serde_json::json!({
|
||||
"status": "pending_approval",
|
||||
"approval_id": approval.id,
|
||||
"hand_id": approval.hand_id,
|
||||
"message": "This hand requires approval before execution"
|
||||
}),
|
||||
error: None,
|
||||
duration_ms: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Execute hand directly (returns result + run_id for tracking)
|
||||
let (result, _run_id) = kernel.execute_hand(&id, input).await
|
||||
.map_err(|e| format!("Failed to execute hand: {}", e))?;
|
||||
|
||||
Ok(HandResult::from(result))
|
||||
}
|
||||
|
||||
/// Approve a hand execution
|
||||
///
|
||||
/// When approved, the kernel's `respond_to_approval` internally spawns the Hand
|
||||
/// execution. We additionally emit Tauri events so the frontend can track when
|
||||
/// the execution finishes.
|
||||
#[tauri::command]
|
||||
pub async fn hand_approve(
|
||||
app: AppHandle,
|
||||
state: State<'_, KernelState>,
|
||||
hand_name: String,
|
||||
run_id: String,
|
||||
approved: bool,
|
||||
reason: Option<String>,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
tracing::info!(
|
||||
"[hand_approve] hand={}, run_id={}, approved={}, reason={:?}",
|
||||
hand_name, run_id, approved, reason
|
||||
);
|
||||
|
||||
// Verify the approval belongs to the specified hand before responding.
|
||||
// This prevents cross-hand approval attacks where a run_id from one hand
|
||||
// is used to approve a different hand's pending execution.
|
||||
let approvals = kernel.list_approvals().await;
|
||||
let entry = approvals.iter().find(|a| a.id == run_id && a.status == "pending")
|
||||
.ok_or_else(|| format!("Approval not found or already resolved: {}", run_id))?;
|
||||
|
||||
if entry.hand_id != hand_name {
|
||||
return Err(format!(
|
||||
"Approval run_id {} belongs to hand '{}', not '{}' as requested",
|
||||
run_id, entry.hand_id, hand_name
|
||||
));
|
||||
}
|
||||
|
||||
kernel.respond_to_approval(&run_id, approved, reason).await
|
||||
.map_err(|e| format!("Failed to approve hand: {}", e))?;
|
||||
|
||||
// When approved, monitor the Hand execution and emit events to the frontend
|
||||
if approved {
|
||||
let approval_id = run_id.clone();
|
||||
let hand_id = hand_name.clone();
|
||||
let kernel_state: KernelState = (*state).clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
// Poll the approval status until it transitions from "approved" to
|
||||
// "completed" or "failed" (set by the kernel's spawned task).
|
||||
// Timeout after 5 minutes to avoid hanging forever.
|
||||
let timeout = tokio::time::Duration::from_secs(300);
|
||||
let poll_interval = tokio::time::Duration::from_millis(500);
|
||||
|
||||
let result = tokio::time::timeout(timeout, async {
|
||||
loop {
|
||||
tokio::time::sleep(poll_interval).await;
|
||||
|
||||
let kernel_lock = kernel_state.lock().await;
|
||||
if let Some(kernel) = kernel_lock.as_ref() {
|
||||
// Use get_approval to check any status (not just "pending")
|
||||
if let Some(entry) = kernel.get_approval(&approval_id).await {
|
||||
match entry.status.as_str() {
|
||||
"completed" => {
|
||||
tracing::info!("[hand_approve] Hand '{}' execution completed for approval {}", hand_id, approval_id);
|
||||
return (true, None::<String>);
|
||||
}
|
||||
"failed" => {
|
||||
let error_msg = entry.input.get("error")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("Unknown error")
|
||||
.to_string();
|
||||
tracing::warn!("[hand_approve] Hand '{}' execution failed for approval {}: {}", hand_id, approval_id, error_msg);
|
||||
return (false, Some(error_msg));
|
||||
}
|
||||
_ => {} // still running (status is "approved")
|
||||
}
|
||||
} else {
|
||||
// Entry disappeared entirely — kernel was likely restarted
|
||||
return (false, Some("Approval entry disappeared".to_string()));
|
||||
}
|
||||
} else {
|
||||
return (false, Some("Kernel not available".to_string()));
|
||||
}
|
||||
}
|
||||
}).await;
|
||||
|
||||
let (success, error) = match result {
|
||||
Ok((s, e)) => (s, e),
|
||||
Err(_) => (false, Some("Hand execution timed out (5 minutes)".to_string())),
|
||||
};
|
||||
|
||||
let _ = app.emit("hand-execution-complete", serde_json::json!({
|
||||
"approvalId": approval_id,
|
||||
"handId": hand_id,
|
||||
"success": success,
|
||||
"error": error,
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
Ok(serde_json::json!({
|
||||
"status": if approved { "approved" } else { "rejected" },
|
||||
"hand_name": hand_name,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Cancel a hand execution
|
||||
#[tauri::command]
|
||||
pub async fn hand_cancel(
|
||||
state: State<'_, KernelState>,
|
||||
hand_name: String,
|
||||
run_id: String,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
tracing::info!(
|
||||
"[hand_cancel] hand={}, run_id={}",
|
||||
hand_name, run_id
|
||||
);
|
||||
|
||||
// Verify the approval belongs to the specified hand before cancelling
|
||||
let approvals = kernel.list_approvals().await;
|
||||
let entry = approvals.iter().find(|a| a.id == run_id && a.status == "pending")
|
||||
.ok_or_else(|| format!("Approval not found or already resolved: {}", run_id))?;
|
||||
|
||||
if entry.hand_id != hand_name {
|
||||
return Err(format!(
|
||||
"Approval run_id {} belongs to hand '{}', not '{}' as requested",
|
||||
run_id, entry.hand_id, hand_name
|
||||
));
|
||||
}
|
||||
|
||||
kernel.cancel_approval(&run_id).await
|
||||
.map_err(|e| format!("Failed to cancel hand: {}", e))?;
|
||||
|
||||
Ok(serde_json::json!({ "status": "cancelled", "hand_name": hand_name }))
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Hand Stub Commands (not yet fully implemented)
|
||||
// ============================================================
|
||||
|
||||
/// Get detailed info for a single hand
|
||||
#[tauri::command]
|
||||
pub async fn hand_get(
|
||||
state: State<'_, KernelState>,
|
||||
name: String,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let hands = kernel.list_hands().await;
|
||||
let found = hands.iter().find(|h| h.id == name)
|
||||
.ok_or_else(|| format!("Hand '{}' not found", name))?;
|
||||
|
||||
Ok(serde_json::to_value(found)
|
||||
.map_err(|e| format!("Serialization error: {}", e))?)
|
||||
}
|
||||
|
||||
/// Get status of a specific hand run
|
||||
#[tauri::command]
|
||||
pub async fn hand_run_status(
|
||||
state: State<'_, KernelState>,
|
||||
run_id: String,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let parsed_id: zclaw_types::HandRunId = run_id.parse()
|
||||
.map_err(|e| format!("Invalid run ID: {}", e))?;
|
||||
|
||||
let run = kernel.get_hand_run(&parsed_id).await
|
||||
.map_err(|e| format!("Failed to get hand run: {}", e))?;
|
||||
|
||||
match run {
|
||||
Some(r) => Ok(serde_json::to_value(r)
|
||||
.map_err(|e| format!("Serialization error: {}", e))?),
|
||||
None => Ok(serde_json::json!({
|
||||
"status": "not_found",
|
||||
"run_id": run_id,
|
||||
"message": "Hand run not found"
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
/// List run history for a hand (or all hands)
|
||||
#[tauri::command]
|
||||
pub async fn hand_run_list(
|
||||
state: State<'_, KernelState>,
|
||||
hand_name: Option<String>,
|
||||
status: Option<String>,
|
||||
limit: Option<u32>,
|
||||
offset: Option<u32>,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let filter = zclaw_types::HandRunFilter {
|
||||
hand_name,
|
||||
status: status.map(|s| s.parse()).transpose()
|
||||
.map_err(|e| format!("Invalid status filter: {}", e))?,
|
||||
limit,
|
||||
offset,
|
||||
};
|
||||
|
||||
let runs = kernel.list_hand_runs(&filter).await
|
||||
.map_err(|e| format!("Failed to list hand runs: {}", e))?;
|
||||
let total = kernel.count_hand_runs(&filter).await
|
||||
.map_err(|e| format!("Failed to count hand runs: {}", e))?;
|
||||
|
||||
Ok(serde_json::json!({
|
||||
"runs": runs,
|
||||
"total": total,
|
||||
"limit": filter.limit.unwrap_or(20),
|
||||
"offset": filter.offset.unwrap_or(0),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Cancel a running hand execution
|
||||
#[tauri::command]
|
||||
pub async fn hand_run_cancel(
|
||||
state: State<'_, KernelState>,
|
||||
run_id: String,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let parsed_id: zclaw_types::HandRunId = run_id.parse()
|
||||
.map_err(|e| format!("Invalid run ID: {}", e))?;
|
||||
|
||||
kernel.cancel_hand_run(&parsed_id).await
|
||||
.map_err(|e| format!("Failed to cancel hand run: {}", e))?;
|
||||
|
||||
Ok(serde_json::json!({
|
||||
"status": "cancelled",
|
||||
"run_id": run_id
|
||||
}))
|
||||
}
|
||||
251
desktop/src-tauri/src/kernel_commands/lifecycle.rs
Normal file
251
desktop/src-tauri/src/kernel_commands/lifecycle.rs
Normal file
@@ -0,0 +1,251 @@
|
||||
//! Kernel lifecycle commands: init, status, shutdown
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::State;
|
||||
|
||||
use super::{KernelState, SchedulerState};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Request / Response types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn default_api_protocol() -> String { "openai".to_string() }
|
||||
fn default_kernel_provider() -> String { "openai".to_string() }
|
||||
fn default_kernel_model() -> String { "gpt-4o-mini".to_string() }
|
||||
|
||||
/// Kernel configuration request
|
||||
///
|
||||
/// Simple configuration: base_url + api_key + model
|
||||
/// Model ID is passed directly to the API without any transformation
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct KernelConfigRequest {
|
||||
/// LLM provider (for preset URLs): anthropic, openai, zhipu, kimi, qwen, deepseek, local, custom
|
||||
#[serde(default = "default_kernel_provider")]
|
||||
pub provider: String,
|
||||
/// Model identifier - passed directly to the API
|
||||
#[serde(default = "default_kernel_model")]
|
||||
pub model: String,
|
||||
/// API key
|
||||
pub api_key: Option<String>,
|
||||
/// Base URL (optional, uses provider default if not specified)
|
||||
pub base_url: Option<String>,
|
||||
/// API protocol: openai or anthropic
|
||||
#[serde(default = "default_api_protocol")]
|
||||
pub api_protocol: String,
|
||||
}
|
||||
|
||||
/// Kernel status response
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct KernelStatusResponse {
|
||||
pub initialized: bool,
|
||||
pub agent_count: usize,
|
||||
pub database_url: Option<String>,
|
||||
pub base_url: Option<String>,
|
||||
pub model: Option<String>,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Commands
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Initialize the internal ZCLAW Kernel
|
||||
///
|
||||
/// If kernel already exists with the same config, returns existing status.
|
||||
/// If config changed, reboots kernel with new config.
|
||||
#[tauri::command]
|
||||
pub async fn kernel_init(
|
||||
state: State<'_, KernelState>,
|
||||
scheduler_state: State<'_, SchedulerState>,
|
||||
config_request: Option<KernelConfigRequest>,
|
||||
) -> Result<KernelStatusResponse, String> {
|
||||
let mut kernel_lock = state.lock().await;
|
||||
|
||||
// Check if we need to reboot kernel with new config
|
||||
if let Some(kernel) = kernel_lock.as_ref() {
|
||||
// Get current config from kernel
|
||||
let current_config = kernel.config();
|
||||
|
||||
// Check if config changed
|
||||
let config_changed = if let Some(ref req) = config_request {
|
||||
let default_base_url = zclaw_kernel::config::KernelConfig::from_provider(
|
||||
&req.provider, "", &req.model, None, &req.api_protocol
|
||||
).llm.base_url;
|
||||
let request_base_url = req.base_url.clone().unwrap_or(default_base_url.clone());
|
||||
|
||||
current_config.llm.model != req.model ||
|
||||
current_config.llm.base_url != request_base_url
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if !config_changed {
|
||||
// Same config, return existing status
|
||||
return Ok(KernelStatusResponse {
|
||||
initialized: true,
|
||||
agent_count: kernel.list_agents().len(),
|
||||
database_url: None,
|
||||
base_url: Some(current_config.llm.base_url.clone()),
|
||||
model: Some(current_config.llm.model.clone()),
|
||||
});
|
||||
}
|
||||
|
||||
// Config changed, need to reboot kernel
|
||||
// Shutdown old kernel
|
||||
if let Err(e) = kernel.shutdown().await {
|
||||
eprintln!("[kernel_init] Warning: Failed to shutdown old kernel: {}", e);
|
||||
}
|
||||
*kernel_lock = None;
|
||||
}
|
||||
|
||||
// Build configuration from request
|
||||
let config = if let Some(req) = &config_request {
|
||||
let api_key = req.api_key.as_deref().unwrap_or("");
|
||||
let base_url = req.base_url.as_deref();
|
||||
|
||||
zclaw_kernel::config::KernelConfig::from_provider(
|
||||
&req.provider,
|
||||
api_key,
|
||||
&req.model,
|
||||
base_url,
|
||||
&req.api_protocol,
|
||||
)
|
||||
} else {
|
||||
zclaw_kernel::config::KernelConfig::default()
|
||||
};
|
||||
|
||||
// Debug: print skills directory
|
||||
if let Some(ref skills_dir) = config.skills_dir {
|
||||
println!("[kernel_init] Skills directory: {} (exists: {})", skills_dir.display(), skills_dir.exists());
|
||||
} else {
|
||||
println!("[kernel_init] No skills directory configured");
|
||||
}
|
||||
|
||||
let base_url = config.llm.base_url.clone();
|
||||
let model = config.llm.model.clone();
|
||||
|
||||
// Boot kernel
|
||||
let mut kernel = zclaw_kernel::Kernel::boot(config.clone())
|
||||
.await
|
||||
.map_err(|e| format!("Failed to initialize kernel: {}", e))?;
|
||||
|
||||
let agent_count = kernel.list_agents().len();
|
||||
|
||||
// Configure extraction driver so the Growth system can call LLM for memory extraction
|
||||
let driver = kernel.driver();
|
||||
crate::intelligence::extraction_adapter::configure_extraction_driver(
|
||||
driver.clone(),
|
||||
model.clone(),
|
||||
);
|
||||
|
||||
// Bridge SqliteStorage to Kernel's GrowthIntegration
|
||||
{
|
||||
match crate::viking_commands::get_storage().await {
|
||||
Ok(sqlite_storage) => {
|
||||
let viking = std::sync::Arc::new(zclaw_runtime::VikingAdapter::new(sqlite_storage));
|
||||
kernel.set_viking(viking);
|
||||
tracing::info!("[kernel_init] Bridged persistent SqliteStorage to Kernel GrowthIntegration");
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"[kernel_init] Failed to get SqliteStorage, GrowthIntegration will use in-memory storage: {}",
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Set the LLM extraction driver on the kernel for memory extraction via middleware
|
||||
let extraction_driver = crate::intelligence::extraction_adapter::TauriExtractionDriver::new(
|
||||
driver.clone(),
|
||||
model.clone(),
|
||||
);
|
||||
kernel.set_extraction_driver(std::sync::Arc::new(extraction_driver));
|
||||
}
|
||||
|
||||
// Configure summary driver so the Growth system can generate L0/L1 summaries
|
||||
if let Some(api_key) = config_request.as_ref().and_then(|r| r.api_key.clone()) {
|
||||
crate::summarizer_adapter::configure_summary_driver(
|
||||
crate::summarizer_adapter::TauriSummaryDriver::new(
|
||||
format!("{}/chat/completions", base_url),
|
||||
api_key,
|
||||
Some(model.clone()),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
*kernel_lock = Some(kernel);
|
||||
|
||||
// Start SchedulerService — periodically checks and fires scheduled triggers
|
||||
{
|
||||
let mut sched_lock = scheduler_state.lock().await;
|
||||
// Stop old scheduler if any
|
||||
if let Some(ref old) = *sched_lock {
|
||||
old.stop();
|
||||
}
|
||||
let scheduler = zclaw_kernel::scheduler::SchedulerService::new(
|
||||
state.inner().clone(),
|
||||
60, // check every 60 seconds
|
||||
);
|
||||
scheduler.start();
|
||||
tracing::info!("[kernel_init] SchedulerService started (60s interval)");
|
||||
*sched_lock = Some(scheduler);
|
||||
}
|
||||
|
||||
Ok(KernelStatusResponse {
|
||||
initialized: true,
|
||||
agent_count,
|
||||
database_url: Some(config.database_url),
|
||||
base_url: Some(base_url),
|
||||
model: Some(model),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get kernel status
|
||||
#[tauri::command]
|
||||
pub async fn kernel_status(
|
||||
state: State<'_, KernelState>,
|
||||
) -> Result<KernelStatusResponse, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
match kernel_lock.as_ref() {
|
||||
Some(kernel) => Ok(KernelStatusResponse {
|
||||
initialized: true,
|
||||
agent_count: kernel.list_agents().len(),
|
||||
database_url: Some(kernel.config().database_url.clone()),
|
||||
base_url: Some(kernel.config().llm.base_url.clone()),
|
||||
model: Some(kernel.config().llm.model.clone()),
|
||||
}),
|
||||
None => Ok(KernelStatusResponse {
|
||||
initialized: false,
|
||||
agent_count: 0,
|
||||
database_url: None,
|
||||
base_url: None,
|
||||
model: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Shutdown the kernel
|
||||
#[tauri::command]
|
||||
pub async fn kernel_shutdown(
|
||||
state: State<'_, KernelState>,
|
||||
scheduler_state: State<'_, SchedulerState>,
|
||||
) -> Result<(), String> {
|
||||
// Stop scheduler first
|
||||
{
|
||||
let mut sched_lock = scheduler_state.lock().await;
|
||||
if let Some(scheduler) = sched_lock.take() {
|
||||
scheduler.stop();
|
||||
tracing::info!("[kernel_shutdown] SchedulerService stopped");
|
||||
}
|
||||
}
|
||||
|
||||
let mut kernel_lock = state.lock().await;
|
||||
|
||||
if let Some(kernel) = kernel_lock.take() {
|
||||
kernel.shutdown().await.map_err(|e| e.to_string())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
72
desktop/src-tauri/src/kernel_commands/mod.rs
Normal file
72
desktop/src-tauri/src/kernel_commands/mod.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
//! ZCLAW Kernel commands for Tauri
|
||||
//!
|
||||
//! These commands provide direct access to the internal ZCLAW Kernel,
|
||||
//! eliminating the need for external ZCLAW process.
|
||||
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
use zclaw_kernel::Kernel;
|
||||
|
||||
pub mod agent;
|
||||
pub mod approval;
|
||||
pub mod chat;
|
||||
pub mod hand;
|
||||
pub mod lifecycle;
|
||||
pub mod scheduled_task;
|
||||
pub mod skill;
|
||||
pub mod trigger;
|
||||
|
||||
#[cfg(feature = "multi-agent")]
|
||||
pub mod a2a;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Shared state types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Kernel state wrapper for Tauri
|
||||
pub type KernelState = Arc<Mutex<Option<Kernel>>>;
|
||||
|
||||
/// Scheduler state — holds a reference to the SchedulerService so it can be stopped on shutdown
|
||||
pub type SchedulerState = Arc<Mutex<Option<zclaw_kernel::scheduler::SchedulerService>>>;
|
||||
|
||||
/// Session-level stream concurrency guard.
|
||||
/// Prevents two concurrent `agent_chat_stream` calls from interleaving events
|
||||
/// for the same session_id.
|
||||
pub type SessionStreamGuard = Arc<dashmap::DashMap<String, Arc<Mutex<()>>>>;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Shared validation helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Validate an agent ID string with clear error messages
|
||||
pub(crate) fn validate_agent_id(agent_id: &str) -> Result<String, String> {
|
||||
crate::intelligence::validation::validate_identifier(agent_id, "agent_id")
|
||||
.map_err(|e| format!("Invalid agent_id: {}", e))?;
|
||||
// AgentId is a UUID wrapper — validate UUID format for better error messages
|
||||
if agent_id.contains('-') {
|
||||
crate::intelligence::validation::validate_uuid(agent_id, "agent_id")
|
||||
.map_err(|e| format!("Invalid agent_id: {}", e))?;
|
||||
}
|
||||
Ok(agent_id.to_string())
|
||||
}
|
||||
|
||||
/// Validate a generic ID string (for skills, hands, triggers, etc.)
|
||||
pub(crate) fn validate_id(id: &str, field_name: &str) -> Result<String, String> {
|
||||
crate::intelligence::validation::validate_identifier(id, field_name)
|
||||
.map_err(|e| format!("Invalid {}: {}", field_name, e))?;
|
||||
Ok(id.to_string())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// State constructors
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Create the kernel state for Tauri
|
||||
pub fn create_kernel_state() -> KernelState {
|
||||
Arc::new(Mutex::new(None))
|
||||
}
|
||||
|
||||
/// Create the scheduler state for Tauri
|
||||
pub fn create_scheduler_state() -> SchedulerState {
|
||||
Arc::new(Mutex::new(None))
|
||||
}
|
||||
124
desktop/src-tauri/src/kernel_commands/scheduled_task.rs
Normal file
124
desktop/src-tauri/src/kernel_commands/scheduled_task.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
//! Scheduled task commands
|
||||
//!
|
||||
//! Tasks are backed by kernel triggers (Schedule type).
|
||||
//! The SchedulerService checks every 60 seconds for due triggers.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::State;
|
||||
|
||||
use super::KernelState;
|
||||
|
||||
// ============================================================
|
||||
// Scheduled Task Commands
|
||||
// ============================================================
|
||||
|
||||
/// Request to create a scheduled task (maps to kernel trigger)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CreateScheduledTaskRequest {
|
||||
pub name: String,
|
||||
pub schedule: String,
|
||||
pub schedule_type: String,
|
||||
pub target: Option<ScheduledTaskTarget>,
|
||||
pub description: Option<String>,
|
||||
pub enabled: Option<bool>,
|
||||
}
|
||||
|
||||
/// Target for a scheduled task
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ScheduledTaskTarget {
|
||||
#[serde(rename = "type")]
|
||||
pub target_type: String,
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
/// Response for scheduled task creation
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ScheduledTaskResponse {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub schedule: String,
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
/// Create a scheduled task (backed by kernel TriggerManager)
|
||||
///
|
||||
/// Tasks are automatically executed by the SchedulerService which checks
|
||||
/// every 60 seconds for due triggers.
|
||||
#[tauri::command]
|
||||
pub async fn scheduled_task_create(
|
||||
state: State<'_, KernelState>,
|
||||
request: CreateScheduledTaskRequest,
|
||||
) -> Result<ScheduledTaskResponse, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
// Build TriggerConfig from request
|
||||
let trigger_type = match request.schedule_type.as_str() {
|
||||
"cron" | "schedule" => zclaw_hands::TriggerType::Schedule {
|
||||
cron: request.schedule.clone(),
|
||||
},
|
||||
"interval" => zclaw_hands::TriggerType::Schedule {
|
||||
cron: request.schedule.clone(), // interval as simplified cron
|
||||
},
|
||||
"once" => zclaw_hands::TriggerType::Schedule {
|
||||
cron: request.schedule.clone(),
|
||||
},
|
||||
_ => return Err(format!("Unsupported schedule type: {}", request.schedule_type)),
|
||||
};
|
||||
|
||||
let target_id = request.target.as_ref().map(|t| t.id.clone()).unwrap_or_default();
|
||||
let task_id = format!("sched_{}", chrono::Utc::now().timestamp_millis());
|
||||
|
||||
let config = zclaw_hands::TriggerConfig {
|
||||
id: task_id.clone(),
|
||||
name: request.name.clone(),
|
||||
hand_id: target_id,
|
||||
trigger_type,
|
||||
enabled: request.enabled.unwrap_or(true),
|
||||
max_executions_per_hour: 60,
|
||||
};
|
||||
|
||||
let entry = kernel.create_trigger(config).await
|
||||
.map_err(|e| format!("Failed to create scheduled task: {}", e))?;
|
||||
|
||||
Ok(ScheduledTaskResponse {
|
||||
id: entry.config.id,
|
||||
name: entry.config.name,
|
||||
schedule: request.schedule,
|
||||
status: "active".to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
/// List all scheduled tasks (kernel triggers of Schedule type)
|
||||
#[tauri::command]
|
||||
pub async fn scheduled_task_list(
|
||||
state: State<'_, KernelState>,
|
||||
) -> Result<Vec<ScheduledTaskResponse>, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let triggers = kernel.list_triggers().await;
|
||||
let tasks: Vec<ScheduledTaskResponse> = triggers
|
||||
.into_iter()
|
||||
.filter(|t| matches!(t.config.trigger_type, zclaw_hands::TriggerType::Schedule { .. }))
|
||||
.map(|t| {
|
||||
let schedule = match t.config.trigger_type {
|
||||
zclaw_hands::TriggerType::Schedule { cron } => cron,
|
||||
_ => String::new(),
|
||||
};
|
||||
ScheduledTaskResponse {
|
||||
id: t.config.id,
|
||||
name: t.config.name,
|
||||
schedule,
|
||||
status: if t.config.enabled { "active".to_string() } else { "paused".to_string() },
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(tasks)
|
||||
}
|
||||
350
desktop/src-tauri/src/kernel_commands/skill.rs
Normal file
350
desktop/src-tauri/src/kernel_commands/skill.rs
Normal file
@@ -0,0 +1,350 @@
|
||||
//! Skill CRUD + execute commands
|
||||
//!
|
||||
//! Skills are loaded from the Kernel's SkillRegistry.
|
||||
//! Skills are registered during kernel initialization.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json;
|
||||
use tauri::State;
|
||||
use zclaw_types::SkillId;
|
||||
|
||||
use super::{validate_id, KernelState};
|
||||
use crate::intelligence::validation::validate_identifier;
|
||||
|
||||
// ============================================================================
|
||||
// Skills Commands - Dynamic Discovery
|
||||
// ============================================================================
|
||||
|
||||
/// Skill information response for frontend
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SkillInfoResponse {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub version: String,
|
||||
pub capabilities: Vec<String>,
|
||||
pub tags: Vec<String>,
|
||||
pub mode: String,
|
||||
pub enabled: bool,
|
||||
pub triggers: Vec<String>,
|
||||
pub category: Option<String>,
|
||||
}
|
||||
|
||||
impl From<zclaw_skills::SkillManifest> for SkillInfoResponse {
|
||||
fn from(manifest: zclaw_skills::SkillManifest) -> Self {
|
||||
Self {
|
||||
id: manifest.id.to_string(),
|
||||
name: manifest.name,
|
||||
description: manifest.description,
|
||||
version: manifest.version,
|
||||
capabilities: manifest.capabilities,
|
||||
tags: manifest.tags,
|
||||
mode: format!("{:?}", manifest.mode),
|
||||
enabled: manifest.enabled,
|
||||
triggers: manifest.triggers,
|
||||
category: manifest.category,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// List all discovered skills
|
||||
///
|
||||
/// Returns skills from the Kernel's SkillRegistry.
|
||||
/// Skills are loaded from the skills/ directory during kernel initialization.
|
||||
#[tauri::command]
|
||||
pub async fn skill_list(
|
||||
state: State<'_, KernelState>,
|
||||
) -> Result<Vec<SkillInfoResponse>, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let skills = kernel.list_skills().await;
|
||||
println!("[skill_list] Found {} skills", skills.len());
|
||||
for skill in &skills {
|
||||
println!("[skill_list] - {} ({})", skill.name, skill.id);
|
||||
}
|
||||
Ok(skills.into_iter().map(SkillInfoResponse::from).collect())
|
||||
}
|
||||
|
||||
/// Refresh skills from a directory
|
||||
///
|
||||
/// Re-scans the skills directory for new or updated skills.
|
||||
/// Optionally accepts a custom directory path to scan.
|
||||
#[tauri::command]
|
||||
pub async fn skill_refresh(
|
||||
state: State<'_, KernelState>,
|
||||
skill_dir: Option<String>,
|
||||
) -> Result<Vec<SkillInfoResponse>, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
// Convert optional string to PathBuf
|
||||
let dir_path = skill_dir.map(PathBuf::from);
|
||||
|
||||
// Refresh skills
|
||||
kernel.refresh_skills(dir_path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to refresh skills: {}", e))?;
|
||||
|
||||
// Return updated list
|
||||
let skills = kernel.list_skills().await;
|
||||
Ok(skills.into_iter().map(SkillInfoResponse::from).collect())
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Skill CRUD Commands
|
||||
// ============================================================================
|
||||
|
||||
/// Request body for creating a new skill
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CreateSkillRequest {
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub triggers: Vec<String>,
|
||||
pub actions: Vec<String>,
|
||||
pub enabled: Option<bool>,
|
||||
}
|
||||
|
||||
/// Request body for updating a skill
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UpdateSkillRequest {
|
||||
pub name: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub triggers: Option<Vec<String>>,
|
||||
pub actions: Option<Vec<String>>,
|
||||
pub enabled: Option<bool>,
|
||||
}
|
||||
|
||||
/// Create a new skill in the skills directory
|
||||
#[tauri::command]
|
||||
pub async fn skill_create(
|
||||
state: State<'_, KernelState>,
|
||||
request: CreateSkillRequest,
|
||||
) -> Result<SkillInfoResponse, String> {
|
||||
let name = request.name.trim().to_string();
|
||||
if name.is_empty() {
|
||||
return Err("Skill name cannot be empty".to_string());
|
||||
}
|
||||
|
||||
// Generate skill ID from name
|
||||
let id = name.to_lowercase()
|
||||
.replace(' ', "-")
|
||||
.replace(|c: char| !c.is_alphanumeric() && c != '-', "");
|
||||
|
||||
validate_identifier(&id, "skill_id")
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let manifest = zclaw_skills::SkillManifest {
|
||||
id: SkillId::new(&id),
|
||||
name: name.clone(),
|
||||
description: request.description.unwrap_or_default(),
|
||||
version: "1.0.0".to_string(),
|
||||
author: None,
|
||||
mode: zclaw_skills::SkillMode::PromptOnly,
|
||||
capabilities: request.actions,
|
||||
input_schema: None,
|
||||
output_schema: None,
|
||||
tags: vec![],
|
||||
category: None,
|
||||
triggers: request.triggers,
|
||||
enabled: request.enabled.unwrap_or(true),
|
||||
};
|
||||
|
||||
kernel.create_skill(manifest.clone())
|
||||
.await
|
||||
.map_err(|e| format!("Failed to create skill: {}", e))?;
|
||||
|
||||
Ok(SkillInfoResponse::from(manifest))
|
||||
}
|
||||
|
||||
/// Update an existing skill
|
||||
#[tauri::command]
|
||||
pub async fn skill_update(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
request: UpdateSkillRequest,
|
||||
) -> Result<SkillInfoResponse, String> {
|
||||
validate_identifier(&id, "skill_id")
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
// Get existing manifest
|
||||
let existing = kernel.skills()
|
||||
.get_manifest(&SkillId::new(&id))
|
||||
.await
|
||||
.ok_or_else(|| format!("Skill not found: {}", id))?;
|
||||
|
||||
// Build updated manifest from existing + request fields
|
||||
let updated = zclaw_skills::SkillManifest {
|
||||
id: existing.id.clone(),
|
||||
name: request.name.unwrap_or(existing.name),
|
||||
description: request.description.unwrap_or(existing.description),
|
||||
version: existing.version.clone(),
|
||||
author: existing.author.clone(),
|
||||
mode: existing.mode.clone(),
|
||||
capabilities: request.actions.unwrap_or(existing.capabilities),
|
||||
input_schema: existing.input_schema.clone(),
|
||||
output_schema: existing.output_schema.clone(),
|
||||
tags: existing.tags.clone(),
|
||||
category: existing.category.clone(),
|
||||
triggers: request.triggers.unwrap_or(existing.triggers),
|
||||
enabled: request.enabled.unwrap_or(existing.enabled),
|
||||
};
|
||||
|
||||
let result = kernel.update_skill(&SkillId::new(&id), updated)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to update skill: {}", e))?;
|
||||
|
||||
Ok(SkillInfoResponse::from(result))
|
||||
}
|
||||
|
||||
/// Delete a skill
|
||||
#[tauri::command]
|
||||
pub async fn skill_delete(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
) -> Result<(), String> {
|
||||
validate_identifier(&id, "skill_id")
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
kernel.delete_skill(&SkillId::new(&id))
|
||||
.await
|
||||
.map_err(|e| format!("Failed to delete skill: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Skill Execution Command
|
||||
// ============================================================================
|
||||
|
||||
/// Skill execution context
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SkillContext {
|
||||
pub agent_id: String,
|
||||
pub session_id: String,
|
||||
pub working_dir: Option<String>,
|
||||
}
|
||||
|
||||
impl From<SkillContext> for zclaw_skills::SkillContext {
|
||||
fn from(ctx: SkillContext) -> Self {
|
||||
Self {
|
||||
agent_id: ctx.agent_id,
|
||||
session_id: ctx.session_id,
|
||||
working_dir: ctx.working_dir.map(std::path::PathBuf::from),
|
||||
env: std::collections::HashMap::new(),
|
||||
timeout_secs: 300,
|
||||
network_allowed: true,
|
||||
file_access_allowed: true,
|
||||
llm: None, // Injected by Kernel.execute_skill()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Skill execution result
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SkillResult {
|
||||
pub success: bool,
|
||||
pub output: serde_json::Value,
|
||||
pub error: Option<String>,
|
||||
pub duration_ms: Option<u64>,
|
||||
}
|
||||
|
||||
impl From<zclaw_skills::SkillResult> for SkillResult {
|
||||
fn from(result: zclaw_skills::SkillResult) -> Self {
|
||||
Self {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
error: result.error,
|
||||
duration_ms: result.duration_ms,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute a skill
|
||||
///
|
||||
/// Executes a skill with the given ID and input.
|
||||
/// Returns the skill result as JSON.
|
||||
#[tauri::command]
|
||||
pub async fn skill_execute(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
context: SkillContext,
|
||||
input: serde_json::Value,
|
||||
autonomy_level: Option<String>,
|
||||
) -> Result<SkillResult, String> {
|
||||
// Validate skill ID
|
||||
let id = validate_id(&id, "skill_id")?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
// Autonomy guard: supervised mode creates an approval request for ALL skills
|
||||
if autonomy_level.as_deref() == Some("supervised") {
|
||||
let approval = kernel.create_approval(id.clone(), input).await;
|
||||
return Ok(SkillResult {
|
||||
success: false,
|
||||
output: serde_json::json!({
|
||||
"status": "pending_approval",
|
||||
"approval_id": approval.id,
|
||||
"skill_id": approval.hand_id,
|
||||
"message": "监督模式下所有技能执行需要用户审批"
|
||||
}),
|
||||
error: None,
|
||||
duration_ms: None,
|
||||
});
|
||||
}
|
||||
|
||||
// Assisted mode: require approval for non-prompt skills (shell/python) that have side effects
|
||||
if autonomy_level.as_deref() != Some("autonomous") {
|
||||
let skill_id = SkillId::new(&id);
|
||||
if let Some(manifest) = kernel.skills().get_manifest(&skill_id).await {
|
||||
match manifest.mode {
|
||||
zclaw_skills::SkillMode::Shell | zclaw_skills::SkillMode::Python => {
|
||||
let approval = kernel.create_approval(id.clone(), input).await;
|
||||
return Ok(SkillResult {
|
||||
success: false,
|
||||
output: serde_json::json!({
|
||||
"status": "pending_approval",
|
||||
"approval_id": approval.id,
|
||||
"skill_id": approval.hand_id,
|
||||
"message": format!("技能 '{}' 使用 {:?} 模式,需要用户审批后执行", manifest.name, manifest.mode)
|
||||
}),
|
||||
error: None,
|
||||
duration_ms: None,
|
||||
});
|
||||
}
|
||||
_ => {} // PromptOnly and other modes are safe to execute directly
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Execute skill directly
|
||||
let result = kernel.execute_skill(&id, context.into(), input).await
|
||||
.map_err(|e| format!("Failed to execute skill: {}", e))?;
|
||||
|
||||
Ok(SkillResult::from(result))
|
||||
}
|
||||
242
desktop/src-tauri/src/kernel_commands/trigger.rs
Normal file
242
desktop/src-tauri/src/kernel_commands/trigger.rs
Normal file
@@ -0,0 +1,242 @@
|
||||
//! Trigger commands: CRUD + execute
|
||||
//!
|
||||
//! Triggers are registered in the Kernel's TriggerManager.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json;
|
||||
use tauri::State;
|
||||
|
||||
use super::{validate_id, KernelState};
|
||||
|
||||
// ============================================================
|
||||
// Trigger Commands
|
||||
// ============================================================
|
||||
|
||||
/// Trigger configuration for creation/update
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TriggerConfigRequest {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub hand_id: String,
|
||||
pub trigger_type: TriggerTypeRequest,
|
||||
#[serde(default = "default_trigger_enabled")]
|
||||
pub enabled: bool,
|
||||
#[serde(default)]
|
||||
pub description: Option<String>,
|
||||
#[serde(default)]
|
||||
pub tags: Vec<String>,
|
||||
}
|
||||
|
||||
fn default_trigger_enabled() -> bool { true }
|
||||
|
||||
/// Trigger type for API
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum TriggerTypeRequest {
|
||||
Schedule { cron: String },
|
||||
Event { pattern: String },
|
||||
Webhook { path: String, secret: Option<String> },
|
||||
MessagePattern { pattern: String },
|
||||
FileSystem { path: String, events: Vec<String> },
|
||||
Manual,
|
||||
}
|
||||
|
||||
/// Trigger response
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TriggerResponse {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub hand_id: String,
|
||||
pub trigger_type: TriggerTypeRequest,
|
||||
pub enabled: bool,
|
||||
pub created_at: String,
|
||||
pub modified_at: String,
|
||||
pub description: Option<String>,
|
||||
pub tags: Vec<String>,
|
||||
}
|
||||
|
||||
impl From<zclaw_kernel::trigger_manager::TriggerEntry> for TriggerResponse {
|
||||
fn from(entry: zclaw_kernel::trigger_manager::TriggerEntry) -> Self {
|
||||
let trigger_type = match entry.config.trigger_type {
|
||||
zclaw_hands::TriggerType::Schedule { cron } => {
|
||||
TriggerTypeRequest::Schedule { cron }
|
||||
}
|
||||
zclaw_hands::TriggerType::Event { pattern } => {
|
||||
TriggerTypeRequest::Event { pattern }
|
||||
}
|
||||
zclaw_hands::TriggerType::Webhook { path, secret } => {
|
||||
TriggerTypeRequest::Webhook { path, secret }
|
||||
}
|
||||
zclaw_hands::TriggerType::MessagePattern { pattern } => {
|
||||
TriggerTypeRequest::MessagePattern { pattern }
|
||||
}
|
||||
zclaw_hands::TriggerType::FileSystem { path, events } => {
|
||||
TriggerTypeRequest::FileSystem {
|
||||
path,
|
||||
events: events.iter().map(|e| format!("{:?}", e).to_lowercase()).collect(),
|
||||
}
|
||||
}
|
||||
zclaw_hands::TriggerType::Manual => TriggerTypeRequest::Manual,
|
||||
};
|
||||
|
||||
Self {
|
||||
id: entry.config.id,
|
||||
name: entry.config.name,
|
||||
hand_id: entry.config.hand_id,
|
||||
trigger_type,
|
||||
enabled: entry.config.enabled,
|
||||
created_at: entry.created_at.to_rfc3339(),
|
||||
modified_at: entry.modified_at.to_rfc3339(),
|
||||
description: entry.description,
|
||||
tags: entry.tags,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// List all triggers
|
||||
#[tauri::command]
|
||||
pub async fn trigger_list(
|
||||
state: State<'_, KernelState>,
|
||||
) -> Result<Vec<TriggerResponse>, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let triggers = kernel.list_triggers().await;
|
||||
Ok(triggers.into_iter().map(TriggerResponse::from).collect())
|
||||
}
|
||||
|
||||
/// Get a specific trigger
|
||||
#[tauri::command]
|
||||
pub async fn trigger_get(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
) -> Result<Option<TriggerResponse>, String> {
|
||||
// Validate trigger ID
|
||||
let id = validate_id(&id, "trigger_id")?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
Ok(kernel.get_trigger(&id).await.map(TriggerResponse::from))
|
||||
}
|
||||
|
||||
/// Create a new trigger
|
||||
#[tauri::command]
|
||||
pub async fn trigger_create(
|
||||
state: State<'_, KernelState>,
|
||||
request: TriggerConfigRequest,
|
||||
) -> Result<TriggerResponse, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
// Convert request to config
|
||||
let trigger_type = match request.trigger_type {
|
||||
TriggerTypeRequest::Schedule { cron } => {
|
||||
zclaw_hands::TriggerType::Schedule { cron }
|
||||
}
|
||||
TriggerTypeRequest::Event { pattern } => {
|
||||
zclaw_hands::TriggerType::Event { pattern }
|
||||
}
|
||||
TriggerTypeRequest::Webhook { path, secret } => {
|
||||
zclaw_hands::TriggerType::Webhook { path, secret }
|
||||
}
|
||||
TriggerTypeRequest::MessagePattern { pattern } => {
|
||||
zclaw_hands::TriggerType::MessagePattern { pattern }
|
||||
}
|
||||
TriggerTypeRequest::FileSystem { path, events } => {
|
||||
zclaw_hands::TriggerType::FileSystem {
|
||||
path,
|
||||
events: events.iter().filter_map(|e| match e.as_str() {
|
||||
"created" => Some(zclaw_hands::FileEvent::Created),
|
||||
"modified" => Some(zclaw_hands::FileEvent::Modified),
|
||||
"deleted" => Some(zclaw_hands::FileEvent::Deleted),
|
||||
"any" => Some(zclaw_hands::FileEvent::Any),
|
||||
_ => None,
|
||||
}).collect(),
|
||||
}
|
||||
}
|
||||
TriggerTypeRequest::Manual => zclaw_hands::TriggerType::Manual,
|
||||
};
|
||||
|
||||
let config = zclaw_hands::TriggerConfig {
|
||||
id: request.id,
|
||||
name: request.name,
|
||||
hand_id: request.hand_id,
|
||||
trigger_type,
|
||||
enabled: request.enabled,
|
||||
max_executions_per_hour: 10,
|
||||
};
|
||||
|
||||
let entry = kernel.create_trigger(config).await
|
||||
.map_err(|e| format!("Failed to create trigger: {}", e))?;
|
||||
|
||||
Ok(TriggerResponse::from(entry))
|
||||
}
|
||||
|
||||
/// Update a trigger
|
||||
#[tauri::command]
|
||||
pub async fn trigger_update(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
name: Option<String>,
|
||||
enabled: Option<bool>,
|
||||
hand_id: Option<String>,
|
||||
) -> Result<TriggerResponse, String> {
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let update = zclaw_kernel::trigger_manager::TriggerUpdateRequest {
|
||||
name,
|
||||
enabled,
|
||||
hand_id,
|
||||
trigger_type: None,
|
||||
};
|
||||
|
||||
let entry = kernel.update_trigger(&id, update).await
|
||||
.map_err(|e| format!("Failed to update trigger: {}", e))?;
|
||||
|
||||
Ok(TriggerResponse::from(entry))
|
||||
}
|
||||
|
||||
/// Delete a trigger
|
||||
#[tauri::command]
|
||||
pub async fn trigger_delete(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
) -> Result<(), String> {
|
||||
// Validate trigger ID
|
||||
let id = validate_id(&id, "trigger_id")?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
kernel.delete_trigger(&id).await
|
||||
.map_err(|e| format!("Failed to delete trigger: {}", e))
|
||||
}
|
||||
|
||||
/// Execute a trigger manually
|
||||
#[tauri::command]
|
||||
pub async fn trigger_execute(
|
||||
state: State<'_, KernelState>,
|
||||
id: String,
|
||||
input: serde_json::Value,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
// Validate trigger ID
|
||||
let id = validate_id(&id, "trigger_id")?;
|
||||
|
||||
let kernel_lock = state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel not initialized".to_string())?;
|
||||
|
||||
let result = kernel.execute_trigger(&id, input).await
|
||||
.map_err(|e| format!("Failed to execute trigger: {}", e))?;
|
||||
|
||||
Ok(serde_json::to_value(result).unwrap_or(serde_json::json!({})))
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
210
desktop/src-tauri/src/pipeline_commands/adapters.rs
Normal file
210
desktop/src-tauri/src/pipeline_commands/adapters.rs
Normal file
@@ -0,0 +1,210 @@
|
||||
//! Adapter structs to bridge zclaw-runtime/zclaw-kernel drivers into zclaw-pipeline action drivers.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use async_trait::async_trait;
|
||||
use serde_json::Value;
|
||||
use zclaw_runtime::{LlmDriver, CompletionRequest};
|
||||
use zclaw_skills::SkillContext;
|
||||
|
||||
use zclaw_pipeline::{
|
||||
LlmActionDriver,
|
||||
SkillActionDriver,
|
||||
HandActionDriver,
|
||||
};
|
||||
|
||||
use crate::kernel_commands::KernelState;
|
||||
|
||||
/// Adapter to connect zclaw-runtime LlmDriver to zclaw-pipeline LlmActionDriver
|
||||
pub struct RuntimeLlmAdapter {
|
||||
driver: Arc<dyn LlmDriver>,
|
||||
default_model: String,
|
||||
}
|
||||
|
||||
impl RuntimeLlmAdapter {
|
||||
pub fn new(driver: Arc<dyn LlmDriver>, default_model: Option<String>) -> Self {
|
||||
Self {
|
||||
driver,
|
||||
default_model: default_model.unwrap_or_else(|| "claude-3-sonnet-20240229".to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl LlmActionDriver for RuntimeLlmAdapter {
|
||||
async fn generate(
|
||||
&self,
|
||||
prompt: String,
|
||||
input: HashMap<String, Value>,
|
||||
model: Option<String>,
|
||||
temperature: Option<f32>,
|
||||
max_tokens: Option<u32>,
|
||||
json_mode: bool,
|
||||
) -> Result<Value, String> {
|
||||
tracing::debug!("[RuntimeLlmAdapter] generate called with prompt length: {}", prompt.len());
|
||||
tracing::debug!("[RuntimeLlmAdapter] input HashMap contents:");
|
||||
for (k, v) in &input {
|
||||
println!(" {} => {}", k, v);
|
||||
}
|
||||
|
||||
// Build user content from prompt and input
|
||||
let user_content = if input.is_empty() {
|
||||
tracing::debug!("[RuntimeLlmAdapter] WARNING: input is empty, using raw prompt");
|
||||
prompt.clone()
|
||||
} else {
|
||||
// Inject input values into prompt
|
||||
// Support multiple placeholder formats: {{key}}, {{ key }}, ${key}, ${inputs.key}
|
||||
let mut rendered = prompt.clone();
|
||||
tracing::debug!("[RuntimeLlmAdapter] Original prompt (first 500 chars): {}", &prompt[..prompt.len().min(500)]);
|
||||
for (key, value) in &input {
|
||||
let str_value = if let Some(s) = value.as_str() {
|
||||
s.to_string()
|
||||
} else {
|
||||
value.to_string()
|
||||
};
|
||||
|
||||
tracing::debug!("[RuntimeLlmAdapter] Replacing '{}' with '{}'", key, str_value);
|
||||
|
||||
// Replace all common placeholder formats
|
||||
rendered = rendered.replace(&format!("{{{{{key}}}}}"), &str_value); // {{key}}
|
||||
rendered = rendered.replace(&format!("{{{{ {key} }}}}"), &str_value); // {{ key }}
|
||||
rendered = rendered.replace(&format!("${{{key}}}"), &str_value); // ${key}
|
||||
rendered = rendered.replace(&format!("${{inputs.{key}}}"), &str_value); // ${inputs.key}
|
||||
}
|
||||
tracing::debug!("[RuntimeLlmAdapter] Rendered prompt (first 500 chars): {}", &rendered[..rendered.len().min(500)]);
|
||||
rendered
|
||||
};
|
||||
|
||||
// Create message using zclaw_types::Message enum
|
||||
let messages = vec![zclaw_types::Message::user(user_content)];
|
||||
|
||||
let request = CompletionRequest {
|
||||
model: model.unwrap_or_else(|| self.default_model.clone()),
|
||||
system: None,
|
||||
messages,
|
||||
tools: Vec::new(),
|
||||
max_tokens,
|
||||
temperature,
|
||||
stop: Vec::new(),
|
||||
stream: false,
|
||||
};
|
||||
|
||||
let response = self.driver.complete(request)
|
||||
.await
|
||||
.map_err(|e| format!("LLM completion failed: {}", e))?;
|
||||
|
||||
// Extract text from response
|
||||
let text = response.content.iter()
|
||||
.find_map(|block| match block {
|
||||
zclaw_runtime::ContentBlock::Text { text } => Some(text.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
// Safe truncation for UTF-8 strings
|
||||
let truncated: String = text.chars().take(1000).collect();
|
||||
tracing::debug!("[RuntimeLlmAdapter] LLM response text (first 1000 chars): {}", truncated);
|
||||
|
||||
// Parse as JSON if json_mode, otherwise return as string
|
||||
if json_mode {
|
||||
// Try to extract JSON from the response (LLM might wrap it in markdown code blocks)
|
||||
let json_text = if text.contains("```json") {
|
||||
// Extract JSON from markdown code block
|
||||
let start = text.find("```json").map(|i| i + 7).unwrap_or(0);
|
||||
let end = text.rfind("```").unwrap_or(text.len());
|
||||
text[start..end].trim().to_string()
|
||||
} else if text.contains("```") {
|
||||
// Extract from generic code block
|
||||
let start = text.find("```").map(|i| i + 3).unwrap_or(0);
|
||||
let end = text.rfind("```").unwrap_or(text.len());
|
||||
text[start..end].trim().to_string()
|
||||
} else {
|
||||
text.clone()
|
||||
};
|
||||
|
||||
// Safe truncation for UTF-8 strings
|
||||
let truncated_json: String = json_text.chars().take(500).collect();
|
||||
tracing::debug!("[RuntimeLlmAdapter] JSON text to parse (first 500 chars): {}", truncated_json);
|
||||
|
||||
serde_json::from_str(&json_text)
|
||||
.map_err(|e| {
|
||||
tracing::debug!("[RuntimeLlmAdapter] JSON parse error: {}", e);
|
||||
format!("Failed to parse LLM response as JSON: {}\nResponse: {}", e, json_text)
|
||||
})
|
||||
} else {
|
||||
Ok(Value::String(text))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adapter to bridge Kernel skill execution into Pipeline SkillActionDriver
|
||||
pub struct PipelineSkillDriver {
|
||||
kernel_state: KernelState,
|
||||
}
|
||||
|
||||
impl PipelineSkillDriver {
|
||||
pub fn new(kernel_state: KernelState) -> Self {
|
||||
Self { kernel_state }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl SkillActionDriver for PipelineSkillDriver {
|
||||
async fn execute(
|
||||
&self,
|
||||
skill_id: &str,
|
||||
input: HashMap<String, Value>,
|
||||
) -> Result<Value, String> {
|
||||
let kernel_lock = self.kernel_state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel 未初始化,无法执行技能".to_string())?;
|
||||
|
||||
let context = SkillContext::default();
|
||||
let input_value = Value::Object(input.into_iter().collect());
|
||||
|
||||
tracing::debug!("[PipelineSkillDriver] Executing skill: {}", skill_id);
|
||||
let result = kernel.execute_skill(skill_id, context, input_value).await
|
||||
.map_err(|e| format!("技能执行失败: {}", e))?;
|
||||
|
||||
Ok(result.output)
|
||||
}
|
||||
}
|
||||
|
||||
/// Adapter to bridge Kernel hand execution into Pipeline HandActionDriver
|
||||
pub struct PipelineHandDriver {
|
||||
kernel_state: KernelState,
|
||||
}
|
||||
|
||||
impl PipelineHandDriver {
|
||||
pub fn new(kernel_state: KernelState) -> Self {
|
||||
Self { kernel_state }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl HandActionDriver for PipelineHandDriver {
|
||||
async fn execute(
|
||||
&self,
|
||||
hand_id: &str,
|
||||
action: &str,
|
||||
params: HashMap<String, Value>,
|
||||
) -> Result<Value, String> {
|
||||
let kernel_lock = self.kernel_state.lock().await;
|
||||
let kernel = kernel_lock.as_ref()
|
||||
.ok_or_else(|| "Kernel 未初始化,无法执行 Hand".to_string())?;
|
||||
|
||||
// Build hand input combining action and params
|
||||
let mut input_map = serde_json::Map::new();
|
||||
input_map.insert("action".to_string(), Value::String(action.to_string()));
|
||||
for (k, v) in params {
|
||||
input_map.insert(k, v);
|
||||
}
|
||||
let input_value = Value::Object(input_map);
|
||||
|
||||
tracing::debug!("[PipelineHandDriver] Executing hand: {} / {}", hand_id, action);
|
||||
let (result, _run_id) = kernel.execute_hand(hand_id, input_value).await
|
||||
.map_err(|e| format!("Hand 执行失败: {}", e))?;
|
||||
|
||||
Ok(result.output)
|
||||
}
|
||||
}
|
||||
230
desktop/src-tauri/src/pipeline_commands/crud.rs
Normal file
230
desktop/src-tauri/src/pipeline_commands/crud.rs
Normal file
@@ -0,0 +1,230 @@
|
||||
//! Pipeline CRUD commands (Create / Update / Delete).
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tauri::State;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
use zclaw_pipeline::{
|
||||
Pipeline,
|
||||
PipelineMetadata,
|
||||
PipelineSpec,
|
||||
PipelineStep,
|
||||
Action,
|
||||
ErrorStrategy,
|
||||
};
|
||||
|
||||
use super::{PipelineState, PipelineInfo};
|
||||
use super::helpers::{get_pipelines_directory, pipeline_to_info};
|
||||
|
||||
/// Create pipeline request
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CreatePipelineRequest {
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub steps: Vec<WorkflowStepInput>,
|
||||
}
|
||||
|
||||
/// Update pipeline request
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UpdatePipelineRequest {
|
||||
pub name: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub steps: Option<Vec<WorkflowStepInput>>,
|
||||
}
|
||||
|
||||
/// Workflow step input from frontend
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct WorkflowStepInput {
|
||||
pub hand_name: String,
|
||||
pub name: Option<String>,
|
||||
pub params: Option<HashMap<String, Value>>,
|
||||
pub condition: Option<String>,
|
||||
}
|
||||
|
||||
/// Create a new pipeline as a YAML file
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_create(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
request: CreatePipelineRequest,
|
||||
) -> Result<PipelineInfo, String> {
|
||||
let name = request.name.trim().to_string();
|
||||
if name.is_empty() {
|
||||
return Err("Pipeline name cannot be empty".to_string());
|
||||
}
|
||||
|
||||
let pipelines_dir = get_pipelines_directory()?;
|
||||
if !pipelines_dir.exists() {
|
||||
std::fs::create_dir_all(&pipelines_dir)
|
||||
.map_err(|e| format!("Failed to create pipelines directory: {}", e))?;
|
||||
}
|
||||
|
||||
// Generate pipeline ID from name
|
||||
let pipeline_id = name.to_lowercase()
|
||||
.replace(' ', "-")
|
||||
.replace(|c: char| !c.is_alphanumeric() && c != '-', "");
|
||||
|
||||
let file_path = pipelines_dir.join(format!("{}.yaml", pipeline_id));
|
||||
if file_path.exists() {
|
||||
return Err(format!("Pipeline file already exists: {}", file_path.display()));
|
||||
}
|
||||
|
||||
// Build Pipeline struct
|
||||
let steps: Vec<PipelineStep> = request.steps.into_iter().enumerate().map(|(i, s)| {
|
||||
let step_id = s.name.clone().unwrap_or_else(|| format!("step-{}", i + 1));
|
||||
PipelineStep {
|
||||
id: step_id,
|
||||
action: Action::Hand {
|
||||
hand_id: s.hand_name.clone(),
|
||||
hand_action: "execute".to_string(),
|
||||
params: s.params.unwrap_or_default().into_iter().map(|(k, v)| (k, v.to_string())).collect(),
|
||||
},
|
||||
description: s.name,
|
||||
when: s.condition,
|
||||
retry: None,
|
||||
timeout_secs: None,
|
||||
}
|
||||
}).collect();
|
||||
|
||||
let pipeline = Pipeline {
|
||||
api_version: "zclaw/v1".to_string(),
|
||||
kind: "Pipeline".to_string(),
|
||||
metadata: PipelineMetadata {
|
||||
name: pipeline_id.clone(),
|
||||
display_name: Some(name),
|
||||
description: request.description,
|
||||
category: None,
|
||||
industry: None,
|
||||
tags: vec![],
|
||||
icon: None,
|
||||
author: None,
|
||||
version: "1.0.0".to_string(),
|
||||
annotations: None,
|
||||
},
|
||||
spec: PipelineSpec {
|
||||
inputs: vec![],
|
||||
steps,
|
||||
outputs: HashMap::new(),
|
||||
on_error: ErrorStrategy::Stop,
|
||||
timeout_secs: 0,
|
||||
max_workers: 4,
|
||||
},
|
||||
};
|
||||
|
||||
// Serialize to YAML
|
||||
let yaml_content = serde_yaml::to_string(&pipeline)
|
||||
.map_err(|e| format!("Failed to serialize pipeline: {}", e))?;
|
||||
|
||||
std::fs::write(&file_path, yaml_content)
|
||||
.map_err(|e| format!("Failed to write pipeline file: {}", e))?;
|
||||
|
||||
// Register in state
|
||||
let mut state_pipelines = state.pipelines.write().await;
|
||||
let mut state_paths = state.pipeline_paths.write().await;
|
||||
state_pipelines.insert(pipeline_id.clone(), pipeline.clone());
|
||||
state_paths.insert(pipeline_id, file_path);
|
||||
|
||||
Ok(pipeline_to_info(&pipeline))
|
||||
}
|
||||
|
||||
/// Update an existing pipeline
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_update(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
pipeline_id: String,
|
||||
request: UpdatePipelineRequest,
|
||||
) -> Result<PipelineInfo, String> {
|
||||
let pipelines = state.pipelines.read().await;
|
||||
let paths = state.pipeline_paths.read().await;
|
||||
|
||||
let existing = pipelines.get(&pipeline_id)
|
||||
.ok_or_else(|| format!("Pipeline not found: {}", pipeline_id))?;
|
||||
let file_path = paths.get(&pipeline_id)
|
||||
.ok_or_else(|| format!("Pipeline file path not found: {}", pipeline_id))?
|
||||
.clone();
|
||||
|
||||
// Build updated pipeline
|
||||
let updated_metadata = PipelineMetadata {
|
||||
display_name: request.name.or(existing.metadata.display_name.clone()),
|
||||
description: request.description.or(existing.metadata.description.clone()),
|
||||
..existing.metadata.clone()
|
||||
};
|
||||
|
||||
let updated_steps = match request.steps {
|
||||
Some(steps) => steps.into_iter().enumerate().map(|(i, s)| {
|
||||
let step_id = s.name.clone().unwrap_or_else(|| format!("step-{}", i + 1));
|
||||
PipelineStep {
|
||||
id: step_id,
|
||||
action: Action::Hand {
|
||||
hand_id: s.hand_name.clone(),
|
||||
hand_action: "execute".to_string(),
|
||||
params: s.params.unwrap_or_default().into_iter().map(|(k, v)| (k, v.to_string())).collect(),
|
||||
},
|
||||
description: s.name,
|
||||
when: s.condition,
|
||||
retry: None,
|
||||
timeout_secs: None,
|
||||
}
|
||||
}).collect(),
|
||||
None => existing.spec.steps.clone(),
|
||||
};
|
||||
|
||||
let updated_pipeline = Pipeline {
|
||||
metadata: updated_metadata,
|
||||
spec: PipelineSpec {
|
||||
steps: updated_steps,
|
||||
..existing.spec.clone()
|
||||
},
|
||||
..existing.clone()
|
||||
};
|
||||
|
||||
// Write to file
|
||||
let yaml_content = serde_yaml::to_string(&updated_pipeline)
|
||||
.map_err(|e| format!("Failed to serialize pipeline: {}", e))?;
|
||||
|
||||
// Drop read locks before write
|
||||
drop(pipelines);
|
||||
drop(paths);
|
||||
|
||||
std::fs::write(file_path, yaml_content)
|
||||
.map_err(|e| format!("Failed to write pipeline file: {}", e))?;
|
||||
|
||||
// Update state
|
||||
let mut state_pipelines = state.pipelines.write().await;
|
||||
state_pipelines.insert(pipeline_id.clone(), updated_pipeline.clone());
|
||||
|
||||
Ok(pipeline_to_info(&updated_pipeline))
|
||||
}
|
||||
|
||||
/// Delete a pipeline
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_delete(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
pipeline_id: String,
|
||||
) -> Result<(), String> {
|
||||
let paths = state.pipeline_paths.read().await;
|
||||
|
||||
let file_path = paths.get(&pipeline_id)
|
||||
.ok_or_else(|| format!("Pipeline not found: {}", pipeline_id))?;
|
||||
|
||||
let path = file_path.clone();
|
||||
drop(paths);
|
||||
|
||||
// Remove file
|
||||
if path.exists() {
|
||||
std::fs::remove_file(&path)
|
||||
.map_err(|e| format!("Failed to delete pipeline file: {}", e))?;
|
||||
}
|
||||
|
||||
// Remove from state
|
||||
let mut state_pipelines = state.pipelines.write().await;
|
||||
let mut state_paths = state.pipeline_paths.write().await;
|
||||
state_pipelines.remove(&pipeline_id);
|
||||
state_paths.remove(&pipeline_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
310
desktop/src-tauri/src/pipeline_commands/discovery.rs
Normal file
310
desktop/src-tauri/src/pipeline_commands/discovery.rs
Normal file
@@ -0,0 +1,310 @@
|
||||
//! Pipeline discovery, listing, running, and monitoring commands.
|
||||
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Emitter, State};
|
||||
|
||||
use zclaw_pipeline::{
|
||||
RunStatus,
|
||||
parse_pipeline_yaml,
|
||||
PipelineExecutor,
|
||||
ActionRegistry,
|
||||
LlmActionDriver,
|
||||
SkillActionDriver,
|
||||
HandActionDriver,
|
||||
};
|
||||
|
||||
use super::{PipelineState, PipelineInfo, PipelineRunResponse, RunPipelineResponse, RunPipelineRequest};
|
||||
use super::adapters::{RuntimeLlmAdapter, PipelineSkillDriver, PipelineHandDriver};
|
||||
use super::helpers::{get_pipelines_directory, scan_pipelines_with_paths, scan_pipelines_full_sync, pipeline_to_info};
|
||||
|
||||
use crate::kernel_commands::KernelState;
|
||||
|
||||
/// Discover and list all available pipelines
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_list(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
category: Option<String>,
|
||||
industry: Option<String>,
|
||||
) -> Result<Vec<PipelineInfo>, String> {
|
||||
// Get pipelines directory
|
||||
let pipelines_dir = get_pipelines_directory()?;
|
||||
|
||||
tracing::debug!("[pipeline_list] Scanning directory: {:?}", pipelines_dir);
|
||||
tracing::debug!("[pipeline_list] Filters - category: {:?}, industry: {:?}", category, industry);
|
||||
|
||||
// Scan for pipeline files (returns both info and paths)
|
||||
let mut pipelines_with_paths: Vec<(PipelineInfo, std::path::PathBuf)> = Vec::new();
|
||||
if pipelines_dir.exists() {
|
||||
scan_pipelines_with_paths(&pipelines_dir, category.as_deref(), industry.as_deref(), &mut pipelines_with_paths)?;
|
||||
} else {
|
||||
tracing::warn!("[WARN pipeline_list] Pipelines directory does not exist: {:?}", pipelines_dir);
|
||||
}
|
||||
|
||||
tracing::debug!("[pipeline_list] Found {} pipelines", pipelines_with_paths.len());
|
||||
|
||||
// Debug: log all pipelines with their industry values
|
||||
for (info, _) in &pipelines_with_paths {
|
||||
tracing::debug!("[pipeline_list] Pipeline: {} -> category: {}, industry: '{}'", info.id, info.category, info.industry);
|
||||
}
|
||||
|
||||
// Update state
|
||||
let mut state_pipelines = state.pipelines.write().await;
|
||||
let mut state_paths = state.pipeline_paths.write().await;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for (info, path) in &pipelines_with_paths {
|
||||
// Load full pipeline into state
|
||||
if let Ok(content) = std::fs::read_to_string(path) {
|
||||
if let Ok(pipeline) = parse_pipeline_yaml(&content) {
|
||||
state_pipelines.insert(info.id.clone(), pipeline);
|
||||
state_paths.insert(info.id.clone(), path.clone());
|
||||
}
|
||||
}
|
||||
result.push(info.clone());
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Get pipeline details
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_get(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
pipeline_id: String,
|
||||
) -> Result<PipelineInfo, String> {
|
||||
let pipelines = state.pipelines.read().await;
|
||||
|
||||
let pipeline = pipelines.get(&pipeline_id)
|
||||
.ok_or_else(|| format!("Pipeline not found: {}", pipeline_id))?;
|
||||
|
||||
Ok(pipeline_to_info(pipeline))
|
||||
}
|
||||
|
||||
/// Run a pipeline
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_run(
|
||||
app: AppHandle,
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
kernel_state: State<'_, KernelState>,
|
||||
request: RunPipelineRequest,
|
||||
) -> Result<RunPipelineResponse, String> {
|
||||
tracing::debug!("[pipeline_run] Received request for pipeline_id: {}", request.pipeline_id);
|
||||
|
||||
// Get pipeline
|
||||
let pipelines = state.pipelines.read().await;
|
||||
tracing::debug!("[pipeline_run] State has {} pipelines loaded", pipelines.len());
|
||||
|
||||
// Debug: list all loaded pipeline IDs
|
||||
for (id, _) in pipelines.iter() {
|
||||
tracing::debug!("[pipeline_run] Loaded pipeline: {}", id);
|
||||
}
|
||||
|
||||
let pipeline = pipelines.get(&request.pipeline_id)
|
||||
.ok_or_else(|| {
|
||||
println!("[ERROR pipeline_run] Pipeline '{}' not found in state. Available: {:?}",
|
||||
request.pipeline_id,
|
||||
pipelines.keys().collect::<Vec<_>>());
|
||||
format!("Pipeline not found: {}", request.pipeline_id)
|
||||
})?
|
||||
.clone();
|
||||
drop(pipelines);
|
||||
|
||||
// Try to get LLM driver from Kernel
|
||||
let (llm_driver, skill_driver, hand_driver) = {
|
||||
let kernel_lock = kernel_state.lock().await;
|
||||
if let Some(kernel) = kernel_lock.as_ref() {
|
||||
tracing::debug!("[pipeline_run] Got LLM driver from Kernel");
|
||||
let llm = Some(Arc::new(RuntimeLlmAdapter::new(
|
||||
kernel.driver(),
|
||||
Some(kernel.config().llm.model.clone()),
|
||||
)) as Arc<dyn LlmActionDriver>);
|
||||
let kernel_arc = (*kernel_state).clone();
|
||||
let skill = Some(Arc::new(PipelineSkillDriver::new(kernel_arc.clone()))
|
||||
as Arc<dyn SkillActionDriver>);
|
||||
let hand = Some(Arc::new(PipelineHandDriver::new(kernel_arc))
|
||||
as Arc<dyn HandActionDriver>);
|
||||
(llm, skill, hand)
|
||||
} else {
|
||||
tracing::debug!("[pipeline_run] Kernel not initialized, no drivers available");
|
||||
(None, None, None)
|
||||
}
|
||||
};
|
||||
|
||||
// Create executor with all available drivers
|
||||
let executor = if let Some(driver) = llm_driver {
|
||||
let mut registry = ActionRegistry::new().with_llm_driver(driver);
|
||||
if let Some(skill) = skill_driver {
|
||||
registry = registry.with_skill_registry(skill);
|
||||
}
|
||||
if let Some(hand) = hand_driver {
|
||||
registry = registry.with_hand_registry(hand);
|
||||
}
|
||||
Arc::new(PipelineExecutor::new(Arc::new(registry)))
|
||||
} else {
|
||||
state.executor.clone()
|
||||
};
|
||||
|
||||
// Generate run ID upfront so we can return it to the caller
|
||||
let run_id = uuid::Uuid::new_v4().to_string();
|
||||
let pipeline_id = request.pipeline_id.clone();
|
||||
let inputs = request.inputs.clone();
|
||||
|
||||
// Clone for async task
|
||||
let run_id_for_spawn = run_id.clone();
|
||||
|
||||
// Run pipeline in background with the known run_id
|
||||
tokio::spawn(async move {
|
||||
tracing::debug!("[pipeline_run] Starting execution with run_id: {}", run_id_for_spawn);
|
||||
let result = executor.execute_with_id(&pipeline, inputs, &run_id_for_spawn).await;
|
||||
|
||||
tracing::debug!("[pipeline_run] Execution completed for run_id: {}, status: {:?}",
|
||||
run_id_for_spawn,
|
||||
result.as_ref().map(|r| r.status.clone()).unwrap_or(RunStatus::Failed));
|
||||
|
||||
// Emit completion event
|
||||
let _ = app.emit("pipeline-complete", &PipelineRunResponse {
|
||||
run_id: run_id_for_spawn.clone(),
|
||||
pipeline_id: pipeline_id.clone(),
|
||||
status: match &result {
|
||||
Ok(r) => r.status.to_string(),
|
||||
Err(_) => "failed".to_string(),
|
||||
},
|
||||
current_step: None,
|
||||
percentage: 100,
|
||||
message: match &result {
|
||||
Ok(_) => "Pipeline completed".to_string(),
|
||||
Err(e) => e.to_string(),
|
||||
},
|
||||
outputs: result.as_ref().ok().and_then(|r| r.outputs.clone()),
|
||||
error: result.as_ref().err().map(|e| e.to_string()),
|
||||
started_at: result.as_ref().map(|r| r.started_at.to_rfc3339()).unwrap_or_else(|_| chrono::Utc::now().to_rfc3339()),
|
||||
ended_at: result.as_ref().map(|r| r.ended_at.map(|t| t.to_rfc3339())).unwrap_or_else(|_| Some(chrono::Utc::now().to_rfc3339())),
|
||||
});
|
||||
});
|
||||
|
||||
// Return immediately with the known run ID
|
||||
tracing::debug!("[pipeline_run] Returning run_id: {} to caller", run_id);
|
||||
Ok(RunPipelineResponse {
|
||||
run_id,
|
||||
pipeline_id: request.pipeline_id,
|
||||
status: "running".to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get pipeline run progress
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_progress(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
run_id: String,
|
||||
) -> Result<PipelineRunResponse, String> {
|
||||
let progress = state.executor.get_progress(&run_id).await
|
||||
.ok_or_else(|| format!("Run not found: {}", run_id))?;
|
||||
|
||||
let run = state.executor.get_run(&run_id).await;
|
||||
|
||||
Ok(PipelineRunResponse {
|
||||
run_id: progress.run_id,
|
||||
pipeline_id: run.as_ref().map(|r| r.pipeline_id.clone()).unwrap_or_default(),
|
||||
status: progress.status.to_string(),
|
||||
current_step: Some(progress.current_step),
|
||||
percentage: progress.percentage,
|
||||
message: progress.message,
|
||||
outputs: run.as_ref().and_then(|r| r.outputs.clone()),
|
||||
error: run.as_ref().and_then(|r| r.error.clone()),
|
||||
started_at: run.as_ref().map(|r| r.started_at.to_rfc3339()).unwrap_or_default(),
|
||||
ended_at: run.as_ref().and_then(|r| r.ended_at.map(|t| t.to_rfc3339())),
|
||||
})
|
||||
}
|
||||
|
||||
/// Cancel a pipeline run
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_cancel(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
run_id: String,
|
||||
) -> Result<(), String> {
|
||||
state.executor.cancel(&run_id).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get pipeline run result
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_result(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
run_id: String,
|
||||
) -> Result<PipelineRunResponse, String> {
|
||||
let run = state.executor.get_run(&run_id).await
|
||||
.ok_or_else(|| format!("Run not found: {}", run_id))?;
|
||||
|
||||
let current_step = run.current_step.clone();
|
||||
let status = run.status.clone();
|
||||
|
||||
Ok(PipelineRunResponse {
|
||||
run_id: run.id,
|
||||
pipeline_id: run.pipeline_id,
|
||||
status: status.to_string(),
|
||||
current_step: current_step.clone(),
|
||||
percentage: if status == RunStatus::Completed { 100 } else { 0 },
|
||||
message: current_step.unwrap_or_default(),
|
||||
outputs: run.outputs,
|
||||
error: run.error,
|
||||
started_at: run.started_at.to_rfc3339(),
|
||||
ended_at: run.ended_at.map(|t| t.to_rfc3339()),
|
||||
})
|
||||
}
|
||||
|
||||
/// List all runs
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_runs(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
) -> Result<Vec<PipelineRunResponse>, String> {
|
||||
let runs = state.executor.list_runs().await;
|
||||
|
||||
Ok(runs.into_iter().map(|run| {
|
||||
let current_step = run.current_step.clone();
|
||||
let status = run.status.clone();
|
||||
PipelineRunResponse {
|
||||
run_id: run.id,
|
||||
pipeline_id: run.pipeline_id,
|
||||
status: status.to_string(),
|
||||
current_step: current_step.clone(),
|
||||
percentage: if status == RunStatus::Completed { 100 } else if status == RunStatus::Running { 50 } else { 0 },
|
||||
message: current_step.unwrap_or_default(),
|
||||
outputs: run.outputs,
|
||||
error: run.error,
|
||||
started_at: run.started_at.to_rfc3339(),
|
||||
ended_at: run.ended_at.map(|t| t.to_rfc3339()),
|
||||
}
|
||||
}).collect())
|
||||
}
|
||||
|
||||
/// Refresh pipeline discovery
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_refresh(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
) -> Result<Vec<PipelineInfo>, String> {
|
||||
let pipelines_dir = get_pipelines_directory()?;
|
||||
|
||||
if !pipelines_dir.exists() {
|
||||
std::fs::create_dir_all(&pipelines_dir)
|
||||
.map_err(|e| format!("Failed to create pipelines directory: {}", e))?;
|
||||
}
|
||||
|
||||
let mut state_pipelines = state.pipelines.write().await;
|
||||
let mut state_paths = state.pipeline_paths.write().await;
|
||||
|
||||
// Clear existing
|
||||
state_pipelines.clear();
|
||||
state_paths.clear();
|
||||
|
||||
// Scan and load all pipelines (synchronous)
|
||||
let mut pipelines = Vec::new();
|
||||
scan_pipelines_full_sync(&pipelines_dir, &mut pipelines)?;
|
||||
|
||||
for (path, pipeline) in &pipelines {
|
||||
let id = pipeline.metadata.name.clone();
|
||||
state_pipelines.insert(id.clone(), pipeline.clone());
|
||||
state_paths.insert(id, path.clone());
|
||||
}
|
||||
|
||||
Ok(pipelines.into_iter().map(|(_, p)| pipeline_to_info(&p)).collect())
|
||||
}
|
||||
167
desktop/src-tauri/src/pipeline_commands/helpers.rs
Normal file
167
desktop/src-tauri/src/pipeline_commands/helpers.rs
Normal file
@@ -0,0 +1,167 @@
|
||||
//! Helper functions for Pipeline commands.
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use zclaw_pipeline::{
|
||||
Pipeline,
|
||||
parse_pipeline_yaml,
|
||||
};
|
||||
|
||||
use super::types::{PipelineInfo, PipelineInputInfo};
|
||||
|
||||
pub(crate) fn get_pipelines_directory() -> Result<PathBuf, String> {
|
||||
// Try to find pipelines directory
|
||||
// Priority: ZCLAW_PIPELINES_DIR env > workspace pipelines/ > ~/.zclaw/pipelines/
|
||||
|
||||
if let Ok(dir) = std::env::var("ZCLAW_PIPELINES_DIR") {
|
||||
return Ok(PathBuf::from(dir));
|
||||
}
|
||||
|
||||
// Try workspace directory
|
||||
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
let workspace_pipelines = manifest_dir
|
||||
.parent()
|
||||
.and_then(|p| p.parent())
|
||||
.map(|p| p.join("pipelines"));
|
||||
|
||||
if let Some(ref dir) = workspace_pipelines {
|
||||
if dir.exists() {
|
||||
return Ok(dir.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to user home directory
|
||||
if let Some(home) = dirs::home_dir() {
|
||||
let dir = home.join(".zclaw").join("pipelines");
|
||||
return Ok(dir);
|
||||
}
|
||||
|
||||
Err("Could not determine pipelines directory".to_string())
|
||||
}
|
||||
|
||||
/// Scan pipelines with paths (returns both info and file paths)
|
||||
pub(crate) fn scan_pipelines_with_paths(
|
||||
dir: &PathBuf,
|
||||
category_filter: Option<&str>,
|
||||
industry_filter: Option<&str>,
|
||||
pipelines: &mut Vec<(PipelineInfo, PathBuf)>,
|
||||
) -> Result<(), String> {
|
||||
tracing::debug!("[scan] Entering directory: {:?}", dir);
|
||||
let entries = std::fs::read_dir(dir)
|
||||
.map_err(|e| format!("Failed to read pipelines directory: {}", e))?;
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry.map_err(|e| format!("Failed to read entry: {}", e))?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_dir() {
|
||||
// Recursively scan subdirectory
|
||||
scan_pipelines_with_paths(&path, category_filter, industry_filter, pipelines)?;
|
||||
} else if path.extension().map(|e| e == "yaml" || e == "yml").unwrap_or(false) {
|
||||
// Try to parse pipeline file
|
||||
tracing::debug!("[scan] Found YAML file: {:?}", path);
|
||||
if let Ok(content) = std::fs::read_to_string(&path) {
|
||||
tracing::debug!("[scan] File content length: {} bytes", content.len());
|
||||
match parse_pipeline_yaml(&content) {
|
||||
Ok(pipeline) => {
|
||||
tracing::debug!(
|
||||
"[scan] Parsed YAML: {} -> category: {:?}, industry: {:?}",
|
||||
pipeline.metadata.name,
|
||||
pipeline.metadata.category,
|
||||
pipeline.metadata.industry
|
||||
);
|
||||
|
||||
// Apply category filter
|
||||
if let Some(filter) = category_filter {
|
||||
if pipeline.metadata.category.as_deref() != Some(filter) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply industry filter
|
||||
if let Some(filter) = industry_filter {
|
||||
if pipeline.metadata.industry.as_deref() != Some(filter) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("[scan] Found pipeline: {} at {:?}", pipeline.metadata.name, path);
|
||||
pipelines.push((pipeline_to_info(&pipeline), path));
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("[scan] Failed to parse pipeline at {:?}: {}", path, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn scan_pipelines_full_sync(
|
||||
dir: &PathBuf,
|
||||
pipelines: &mut Vec<(PathBuf, Pipeline)>,
|
||||
) -> Result<(), String> {
|
||||
let entries = std::fs::read_dir(dir)
|
||||
.map_err(|e| format!("Failed to read pipelines directory: {}", e))?;
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry.map_err(|e| format!("Failed to read entry: {}", e))?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_dir() {
|
||||
scan_pipelines_full_sync(&path, pipelines)?;
|
||||
} else if path.extension().map(|e| e == "yaml" || e == "yml").unwrap_or(false) {
|
||||
if let Ok(content) = std::fs::read_to_string(&path) {
|
||||
if let Ok(pipeline) = parse_pipeline_yaml(&content) {
|
||||
pipelines.push((path, pipeline));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn pipeline_to_info(pipeline: &Pipeline) -> PipelineInfo {
|
||||
let industry = pipeline.metadata.industry.clone().unwrap_or_default();
|
||||
tracing::debug!(
|
||||
"[pipeline_to_info] Pipeline: {}, category: {:?}, industry: {:?}",
|
||||
pipeline.metadata.name,
|
||||
pipeline.metadata.category,
|
||||
pipeline.metadata.industry
|
||||
);
|
||||
|
||||
PipelineInfo {
|
||||
id: pipeline.metadata.name.clone(),
|
||||
display_name: pipeline.metadata.display_name.clone()
|
||||
.unwrap_or_else(|| pipeline.metadata.name.clone()),
|
||||
description: pipeline.metadata.description.clone().unwrap_or_default(),
|
||||
category: pipeline.metadata.category.clone().unwrap_or_default(),
|
||||
industry,
|
||||
tags: pipeline.metadata.tags.clone(),
|
||||
icon: pipeline.metadata.icon.clone().unwrap_or_else(|| "📦".to_string()),
|
||||
version: pipeline.metadata.version.clone(),
|
||||
author: pipeline.metadata.author.clone().unwrap_or_default(),
|
||||
inputs: pipeline.spec.inputs.iter().map(|input| {
|
||||
PipelineInputInfo {
|
||||
name: input.name.clone(),
|
||||
input_type: match input.input_type {
|
||||
zclaw_pipeline::InputType::String => "string".to_string(),
|
||||
zclaw_pipeline::InputType::Number => "number".to_string(),
|
||||
zclaw_pipeline::InputType::Boolean => "boolean".to_string(),
|
||||
zclaw_pipeline::InputType::Select => "select".to_string(),
|
||||
zclaw_pipeline::InputType::MultiSelect => "multi-select".to_string(),
|
||||
zclaw_pipeline::InputType::File => "file".to_string(),
|
||||
zclaw_pipeline::InputType::Text => "text".to_string(),
|
||||
},
|
||||
required: input.required,
|
||||
label: input.label.clone().unwrap_or_else(|| input.name.clone()),
|
||||
placeholder: input.placeholder.clone(),
|
||||
default: input.default.clone(),
|
||||
options: input.options.clone(),
|
||||
}
|
||||
}).collect(),
|
||||
}
|
||||
}
|
||||
293
desktop/src-tauri/src/pipeline_commands/intent_router.rs
Normal file
293
desktop/src-tauri/src/pipeline_commands/intent_router.rs
Normal file
@@ -0,0 +1,293 @@
|
||||
//! Intent routing commands and LLM driver creation from config.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tauri::State;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use secrecy::SecretString;
|
||||
|
||||
use zclaw_pipeline::LlmActionDriver;
|
||||
|
||||
use super::adapters::RuntimeLlmAdapter;
|
||||
use super::PipelineState;
|
||||
|
||||
use crate::kernel_commands::KernelState;
|
||||
|
||||
/// Route result for frontend
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum RouteResultResponse {
|
||||
Matched {
|
||||
pipeline_id: String,
|
||||
display_name: Option<String>,
|
||||
mode: String,
|
||||
params: HashMap<String, Value>,
|
||||
confidence: f32,
|
||||
missing_params: Vec<MissingParamInfo>,
|
||||
},
|
||||
Ambiguous {
|
||||
candidates: Vec<PipelineCandidateInfo>,
|
||||
},
|
||||
NoMatch {
|
||||
suggestions: Vec<PipelineCandidateInfo>,
|
||||
},
|
||||
NeedMoreInfo {
|
||||
prompt: String,
|
||||
related_pipeline: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Missing parameter info
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MissingParamInfo {
|
||||
pub name: String,
|
||||
pub label: Option<String>,
|
||||
pub param_type: String,
|
||||
pub required: bool,
|
||||
pub default: Option<Value>,
|
||||
}
|
||||
|
||||
/// Pipeline candidate info
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PipelineCandidateInfo {
|
||||
pub id: String,
|
||||
pub display_name: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub icon: Option<String>,
|
||||
pub category: Option<String>,
|
||||
pub match_reason: Option<String>,
|
||||
}
|
||||
|
||||
/// Route user input to matching pipeline
|
||||
#[tauri::command]
|
||||
pub async fn route_intent(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
kernel_state: State<'_, KernelState>,
|
||||
user_input: String,
|
||||
) -> Result<RouteResultResponse, String> {
|
||||
use zclaw_pipeline::{TriggerParser, Trigger, TriggerParam, compile_trigger};
|
||||
|
||||
tracing::debug!("[route_intent] Routing user input: {}", user_input);
|
||||
|
||||
// Build trigger parser from loaded pipelines
|
||||
let pipelines = state.pipelines.read().await;
|
||||
let mut parser = TriggerParser::new();
|
||||
|
||||
for (id, pipeline) in pipelines.iter() {
|
||||
// Derive trigger info from pipeline metadata (tags as keywords, description)
|
||||
let trigger = Trigger {
|
||||
keywords: pipeline.metadata.tags.clone(),
|
||||
patterns: vec![], // Patterns not defined in Pipeline struct
|
||||
description: pipeline.metadata.description.clone(),
|
||||
examples: vec![], // Examples not defined in Pipeline struct
|
||||
};
|
||||
|
||||
// Convert pipeline inputs to trigger params
|
||||
let param_defs: Vec<TriggerParam> = pipeline.spec.inputs.iter().map(|input| {
|
||||
TriggerParam {
|
||||
name: input.name.clone(),
|
||||
param_type: match input.input_type {
|
||||
zclaw_pipeline::InputType::String => "string".to_string(),
|
||||
zclaw_pipeline::InputType::Number => "number".to_string(),
|
||||
zclaw_pipeline::InputType::Boolean => "boolean".to_string(),
|
||||
zclaw_pipeline::InputType::Select => "select".to_string(),
|
||||
zclaw_pipeline::InputType::MultiSelect => "multi-select".to_string(),
|
||||
zclaw_pipeline::InputType::File => "file".to_string(),
|
||||
zclaw_pipeline::InputType::Text => "text".to_string(),
|
||||
},
|
||||
required: input.required,
|
||||
label: input.label.clone(),
|
||||
default: input.default.clone(),
|
||||
}
|
||||
}).collect();
|
||||
|
||||
match compile_trigger(
|
||||
id.clone(),
|
||||
pipeline.metadata.display_name.clone(),
|
||||
&trigger,
|
||||
param_defs,
|
||||
) {
|
||||
Ok(compiled) => parser.register(compiled),
|
||||
Err(e) => {
|
||||
tracing::warn!("[WARN route_intent] Failed to compile trigger for {}: {}", id, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Quick match
|
||||
if let Some(match_result) = parser.quick_match(&user_input) {
|
||||
let trigger = parser.get_trigger(&match_result.pipeline_id);
|
||||
|
||||
// Determine input mode
|
||||
let mode = if let Some(t) = &trigger {
|
||||
let required_count = t.param_defs.iter().filter(|p| p.required).count();
|
||||
if required_count > 3 || t.param_defs.len() > 5 {
|
||||
"form"
|
||||
} else if t.param_defs.is_empty() {
|
||||
"conversation"
|
||||
} else {
|
||||
"conversation"
|
||||
}
|
||||
} else {
|
||||
"auto"
|
||||
};
|
||||
|
||||
// Find missing params
|
||||
let missing_params: Vec<MissingParamInfo> = trigger
|
||||
.map(|t| {
|
||||
t.param_defs.iter()
|
||||
.filter(|p| p.required && !match_result.params.contains_key(&p.name) && p.default.is_none())
|
||||
.map(|p| MissingParamInfo {
|
||||
name: p.name.clone(),
|
||||
label: p.label.clone(),
|
||||
param_type: p.param_type.clone(),
|
||||
required: p.required,
|
||||
default: p.default.clone(),
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
return Ok(RouteResultResponse::Matched {
|
||||
pipeline_id: match_result.pipeline_id,
|
||||
display_name: trigger.and_then(|t| t.display_name.clone()),
|
||||
mode: mode.to_string(),
|
||||
params: match_result.params,
|
||||
confidence: match_result.confidence,
|
||||
missing_params,
|
||||
});
|
||||
}
|
||||
|
||||
// Semantic match via LLM (if kernel is initialized)
|
||||
let triggers = parser.triggers();
|
||||
if !triggers.is_empty() {
|
||||
let llm_driver = {
|
||||
let kernel_lock = kernel_state.lock().await;
|
||||
kernel_lock.as_ref().map(|k| k.driver())
|
||||
};
|
||||
|
||||
if let Some(driver) = llm_driver {
|
||||
use zclaw_pipeline::{RuntimeLlmIntentDriver, LlmIntentDriver};
|
||||
let intent_driver = RuntimeLlmIntentDriver::new(driver);
|
||||
|
||||
if let Some(result) = intent_driver.semantic_match(&user_input, &triggers).await {
|
||||
tracing::debug!(
|
||||
"[route_intent] Semantic match: pipeline={}, confidence={}",
|
||||
result.pipeline_id, result.confidence
|
||||
);
|
||||
|
||||
let trigger = parser.get_trigger(&result.pipeline_id);
|
||||
let mode = "auto".to_string();
|
||||
|
||||
let missing_params: Vec<MissingParamInfo> = trigger
|
||||
.map(|t| {
|
||||
t.param_defs.iter()
|
||||
.filter(|p| p.required && !result.params.contains_key(&p.name) && p.default.is_none())
|
||||
.map(|p| MissingParamInfo {
|
||||
name: p.name.clone(),
|
||||
label: p.label.clone(),
|
||||
param_type: p.param_type.clone(),
|
||||
required: p.required,
|
||||
default: p.default.clone(),
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
return Ok(RouteResultResponse::Matched {
|
||||
pipeline_id: result.pipeline_id,
|
||||
display_name: trigger.and_then(|t| t.display_name.clone()),
|
||||
mode,
|
||||
params: result.params,
|
||||
confidence: result.confidence,
|
||||
missing_params,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No match - return suggestions
|
||||
let suggestions: Vec<PipelineCandidateInfo> = parser.triggers()
|
||||
.iter()
|
||||
.take(3)
|
||||
.map(|t| PipelineCandidateInfo {
|
||||
id: t.pipeline_id.clone(),
|
||||
display_name: t.display_name.clone(),
|
||||
description: t.description.clone(),
|
||||
icon: None,
|
||||
category: None,
|
||||
match_reason: Some("推荐".to_string()),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(RouteResultResponse::NoMatch { suggestions })
|
||||
}
|
||||
|
||||
/// Create an LLM driver from configuration file or environment variables
|
||||
pub(crate) fn create_llm_driver_from_config() -> Option<Arc<dyn LlmActionDriver>> {
|
||||
// Try to read config file
|
||||
let config_path = dirs::config_dir()
|
||||
.map(|p| p.join("zclaw").join("config.toml"))?;
|
||||
|
||||
if !config_path.exists() {
|
||||
tracing::debug!("[create_llm_driver] Config file not found at {:?}", config_path);
|
||||
return None;
|
||||
}
|
||||
|
||||
// Read and parse config
|
||||
let config_content = std::fs::read_to_string(&config_path).ok()?;
|
||||
let config: toml::Value = toml::from_str(&config_content).ok()?;
|
||||
|
||||
// Extract LLM config
|
||||
let llm_config = config.get("llm")?;
|
||||
|
||||
let provider = llm_config.get("provider")?.as_str()?.to_string();
|
||||
let api_key = llm_config.get("api_key")?.as_str()?.to_string();
|
||||
let base_url = llm_config.get("base_url").and_then(|v| v.as_str()).map(|s| s.to_string());
|
||||
let model = llm_config.get("model").and_then(|v| v.as_str()).map(|s| s.to_string());
|
||||
|
||||
tracing::debug!("[create_llm_driver] Found LLM config: provider={}, model={:?}", provider, model);
|
||||
|
||||
// Convert api_key to SecretString
|
||||
let secret_key = SecretString::new(api_key);
|
||||
|
||||
// Create the runtime driver — use with_base_url when a custom endpoint is configured
|
||||
// (essential for Chinese providers like doubao, qwen, deepseek, kimi)
|
||||
let runtime_driver: Arc<dyn zclaw_runtime::LlmDriver> = match provider.as_str() {
|
||||
"anthropic" => {
|
||||
if let Some(url) = base_url {
|
||||
Arc::new(zclaw_runtime::AnthropicDriver::with_base_url(secret_key, url))
|
||||
} else {
|
||||
Arc::new(zclaw_runtime::AnthropicDriver::new(secret_key))
|
||||
}
|
||||
}
|
||||
"openai" | "doubao" | "qwen" | "deepseek" | "kimi" | "zhipu" => {
|
||||
// Chinese providers typically need a custom base_url
|
||||
if let Some(url) = base_url {
|
||||
Arc::new(zclaw_runtime::OpenAiDriver::with_base_url(secret_key, url))
|
||||
} else {
|
||||
Arc::new(zclaw_runtime::OpenAiDriver::new(secret_key))
|
||||
}
|
||||
}
|
||||
"gemini" => {
|
||||
if let Some(url) = base_url {
|
||||
Arc::new(zclaw_runtime::GeminiDriver::with_base_url(secret_key, url))
|
||||
} else {
|
||||
Arc::new(zclaw_runtime::GeminiDriver::new(secret_key))
|
||||
}
|
||||
}
|
||||
"local" | "ollama" => {
|
||||
let url = base_url.unwrap_or_else(|| "http://localhost:11434".to_string());
|
||||
Arc::new(zclaw_runtime::LocalDriver::new(&url))
|
||||
}
|
||||
_ => {
|
||||
tracing::warn!("[WARN create_llm_driver] Unknown provider: {}", provider);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
Some(Arc::new(RuntimeLlmAdapter::new(runtime_driver, model)))
|
||||
}
|
||||
63
desktop/src-tauri/src/pipeline_commands/mod.rs
Normal file
63
desktop/src-tauri/src/pipeline_commands/mod.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
//! Pipeline commands for Tauri
|
||||
//!
|
||||
//! Commands for discovering, running, and monitoring Pipelines.
|
||||
|
||||
pub mod adapters;
|
||||
pub mod types;
|
||||
pub mod discovery;
|
||||
pub mod crud;
|
||||
pub mod helpers;
|
||||
pub mod intent_router;
|
||||
pub mod presentation;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use zclaw_pipeline::{Pipeline, PipelineExecutor, ActionRegistry};
|
||||
|
||||
// Re-export key types from sub-modules for external consumers
|
||||
#[allow(unused_imports)]
|
||||
pub use adapters::{RuntimeLlmAdapter, PipelineSkillDriver, PipelineHandDriver};
|
||||
#[allow(unused_imports)]
|
||||
pub use types::{PipelineInfo, PipelineInputInfo, RunPipelineRequest, RunPipelineResponse, PipelineRunResponse};
|
||||
#[allow(unused_imports)]
|
||||
pub use crud::{CreatePipelineRequest, UpdatePipelineRequest, WorkflowStepInput};
|
||||
#[allow(unused_imports)]
|
||||
pub use intent_router::{RouteResultResponse, MissingParamInfo, PipelineCandidateInfo};
|
||||
#[allow(unused_imports)]
|
||||
pub use presentation::PipelineTemplateInfo;
|
||||
|
||||
/// Pipeline state wrapper for Tauri
|
||||
pub struct PipelineState {
|
||||
/// Pipeline executor
|
||||
pub executor: Arc<PipelineExecutor>,
|
||||
/// Discovered pipelines (id -> Pipeline)
|
||||
pub pipelines: RwLock<HashMap<String, Pipeline>>,
|
||||
/// Pipeline file paths (id -> path)
|
||||
pub pipeline_paths: RwLock<HashMap<String, PathBuf>>,
|
||||
}
|
||||
|
||||
impl PipelineState {
|
||||
pub fn new(action_registry: Arc<ActionRegistry>) -> Self {
|
||||
Self {
|
||||
executor: Arc::new(PipelineExecutor::new(action_registry)),
|
||||
pipelines: RwLock::new(HashMap::new()),
|
||||
pipeline_paths: RwLock::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create pipeline state with default action registry
|
||||
pub fn create_pipeline_state() -> Arc<PipelineState> {
|
||||
// Try to create an LLM driver from environment/config
|
||||
let action_registry = if let Some(driver) = intent_router::create_llm_driver_from_config() {
|
||||
tracing::debug!("[create_pipeline_state] LLM driver configured successfully");
|
||||
Arc::new(ActionRegistry::new().with_llm_driver(driver))
|
||||
} else {
|
||||
tracing::debug!("[create_pipeline_state] No LLM driver configured - pipelines requiring LLM will fail");
|
||||
Arc::new(ActionRegistry::new())
|
||||
};
|
||||
Arc::new(PipelineState::new(action_registry))
|
||||
}
|
||||
103
desktop/src-tauri/src/pipeline_commands/presentation.rs
Normal file
103
desktop/src-tauri/src/pipeline_commands/presentation.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
//! Presentation analysis and template listing commands.
|
||||
|
||||
use std::sync::Arc;
|
||||
use tauri::State;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
use super::types::PipelineInputInfo;
|
||||
use super::PipelineState;
|
||||
|
||||
/// Analyze presentation data
|
||||
#[tauri::command]
|
||||
pub async fn analyze_presentation(
|
||||
data: Value,
|
||||
) -> Result<serde_json::Value, String> {
|
||||
use zclaw_pipeline::presentation::PresentationAnalyzer;
|
||||
|
||||
let analyzer = PresentationAnalyzer::new();
|
||||
let analysis = analyzer.analyze(&data);
|
||||
|
||||
// Convert analysis to JSON
|
||||
serde_json::to_value(&analysis).map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
/// Pipeline template metadata
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PipelineTemplateInfo {
|
||||
pub id: String,
|
||||
pub display_name: String,
|
||||
pub description: String,
|
||||
pub category: String,
|
||||
pub industry: String,
|
||||
pub tags: Vec<String>,
|
||||
pub icon: String,
|
||||
pub version: String,
|
||||
pub author: String,
|
||||
pub inputs: Vec<PipelineInputInfo>,
|
||||
}
|
||||
|
||||
/// List available pipeline templates from the `_templates/` directory.
|
||||
///
|
||||
/// Templates are pipeline YAML files that users can browse and instantiate.
|
||||
/// They live in `pipelines/_templates/` and are not directly runnable
|
||||
/// (they serve as blueprints).
|
||||
#[tauri::command]
|
||||
pub async fn pipeline_templates(
|
||||
state: State<'_, Arc<PipelineState>>,
|
||||
) -> Result<Vec<PipelineTemplateInfo>, String> {
|
||||
let pipelines = state.pipelines.read().await;
|
||||
|
||||
// Filter pipelines that have `is_template: true` in metadata
|
||||
// or are in the _templates directory
|
||||
let templates: Vec<PipelineTemplateInfo> = pipelines.iter()
|
||||
.filter_map(|(_id, pipeline)| {
|
||||
// Check if this pipeline has template metadata
|
||||
let is_template = pipeline.metadata.annotations
|
||||
.as_ref()
|
||||
.and_then(|a| a.get("is_template"))
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !is_template {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(PipelineTemplateInfo {
|
||||
id: pipeline.metadata.name.clone(),
|
||||
display_name: pipeline.metadata.display_name.clone()
|
||||
.unwrap_or_else(|| pipeline.metadata.name.clone()),
|
||||
description: pipeline.metadata.description.clone().unwrap_or_default(),
|
||||
category: pipeline.metadata.category.clone().unwrap_or_default(),
|
||||
industry: pipeline.metadata.industry.clone().unwrap_or_default(),
|
||||
tags: pipeline.metadata.tags.clone(),
|
||||
icon: pipeline.metadata.icon.clone().unwrap_or_else(|| "📦".to_string()),
|
||||
version: pipeline.metadata.version.clone(),
|
||||
author: pipeline.metadata.author.clone().unwrap_or_default(),
|
||||
inputs: pipeline.spec.inputs.iter().map(|input| {
|
||||
PipelineInputInfo {
|
||||
name: input.name.clone(),
|
||||
input_type: match input.input_type {
|
||||
zclaw_pipeline::InputType::String => "string".to_string(),
|
||||
zclaw_pipeline::InputType::Number => "number".to_string(),
|
||||
zclaw_pipeline::InputType::Boolean => "boolean".to_string(),
|
||||
zclaw_pipeline::InputType::Select => "select".to_string(),
|
||||
zclaw_pipeline::InputType::MultiSelect => "multi-select".to_string(),
|
||||
zclaw_pipeline::InputType::File => "file".to_string(),
|
||||
zclaw_pipeline::InputType::Text => "text".to_string(),
|
||||
},
|
||||
required: input.required,
|
||||
label: input.label.clone().unwrap_or_else(|| input.name.clone()),
|
||||
placeholder: input.placeholder.clone(),
|
||||
default: input.default.clone(),
|
||||
options: input.options.clone(),
|
||||
}
|
||||
}).collect(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
tracing::debug!("[pipeline_templates] Found {} templates", templates.len());
|
||||
Ok(templates)
|
||||
}
|
||||
99
desktop/src-tauri/src/pipeline_commands/types.rs
Normal file
99
desktop/src-tauri/src/pipeline_commands/types.rs
Normal file
@@ -0,0 +1,99 @@
|
||||
//! Public types for Pipeline commands.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
/// Pipeline info for list display
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PipelineInfo {
|
||||
/// Pipeline ID (name)
|
||||
pub id: String,
|
||||
/// Display name
|
||||
pub display_name: String,
|
||||
/// Description
|
||||
pub description: String,
|
||||
/// Category (functional classification)
|
||||
pub category: String,
|
||||
/// Industry classification (e.g., "internet", "finance", "healthcare")
|
||||
pub industry: String,
|
||||
/// Tags
|
||||
pub tags: Vec<String>,
|
||||
/// Icon (emoji)
|
||||
pub icon: String,
|
||||
/// Version
|
||||
pub version: String,
|
||||
/// Author
|
||||
pub author: String,
|
||||
/// Input parameters
|
||||
pub inputs: Vec<PipelineInputInfo>,
|
||||
}
|
||||
|
||||
/// Pipeline input parameter info
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PipelineInputInfo {
|
||||
/// Parameter name
|
||||
pub name: String,
|
||||
/// Input type
|
||||
pub input_type: String,
|
||||
/// Is required
|
||||
pub required: bool,
|
||||
/// Label
|
||||
pub label: String,
|
||||
/// Placeholder
|
||||
pub placeholder: Option<String>,
|
||||
/// Default value
|
||||
pub default: Option<Value>,
|
||||
/// Options (for select/multi-select)
|
||||
pub options: Vec<String>,
|
||||
}
|
||||
|
||||
/// Run pipeline request
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RunPipelineRequest {
|
||||
/// Pipeline ID
|
||||
pub pipeline_id: String,
|
||||
/// Input values
|
||||
pub inputs: HashMap<String, Value>,
|
||||
}
|
||||
|
||||
/// Run pipeline response
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RunPipelineResponse {
|
||||
/// Run ID
|
||||
pub run_id: String,
|
||||
/// Pipeline ID
|
||||
pub pipeline_id: String,
|
||||
/// Status
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
/// Pipeline run status response
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PipelineRunResponse {
|
||||
/// Run ID
|
||||
pub run_id: String,
|
||||
/// Pipeline ID
|
||||
pub pipeline_id: String,
|
||||
/// Status
|
||||
pub status: String,
|
||||
/// Current step
|
||||
pub current_step: Option<String>,
|
||||
/// Progress percentage
|
||||
pub percentage: u8,
|
||||
/// Message
|
||||
pub message: String,
|
||||
/// Outputs (if completed)
|
||||
pub outputs: Option<Value>,
|
||||
/// Error (if failed)
|
||||
pub error: Option<String>,
|
||||
/// Started at
|
||||
pub started_at: String,
|
||||
/// Ended at
|
||||
pub ended_at: Option<String>,
|
||||
}
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
Check,
|
||||
Loader2,
|
||||
AlertCircle,
|
||||
LayoutGrid,
|
||||
} from 'lucide-react';
|
||||
import { cn } from '../lib/utils';
|
||||
import { useAgentStore, type CloneCreateOptions } from '../store/agentStore';
|
||||
@@ -28,6 +29,12 @@ import type { Clone } from '../store/agentStore';
|
||||
import { intelligenceClient } from '../lib/intelligence-client';
|
||||
import { generateSoulContent, generateUserContent } from '../lib/personality-presets';
|
||||
import { createLogger } from '../lib/logger';
|
||||
import {
|
||||
type AgentTemplateAvailable,
|
||||
type AgentTemplateFull,
|
||||
saasClient,
|
||||
} from '../lib/saas-client';
|
||||
import { useSaaSStore } from '../store/saasStore';
|
||||
|
||||
const log = createLogger('AgentOnboardingWizard');
|
||||
|
||||
@@ -72,6 +79,7 @@ const initialFormData: WizardFormData = {
|
||||
// === Step Configuration ===
|
||||
|
||||
const steps = [
|
||||
{ id: 0, title: '行业模板', description: '选择预设或自定义', icon: LayoutGrid },
|
||||
{ id: 1, title: '认识用户', description: '让我们了解一下您', icon: User },
|
||||
{ id: 2, title: 'Agent 身份', description: '给助手起个名字', icon: Bot },
|
||||
{ id: 3, title: '人格风格', description: '选择沟通风格', icon: Sparkles },
|
||||
@@ -82,19 +90,22 @@ const steps = [
|
||||
// === Component ===
|
||||
|
||||
export function AgentOnboardingWizard({ isOpen, onClose, onSuccess }: AgentOnboardingWizardProps) {
|
||||
const { createClone, updateClone, clones, isLoading, error, clearError } = useAgentStore();
|
||||
const [currentStep, setCurrentStep] = useState(1);
|
||||
const { createClone, createFromTemplate, updateClone, clones, isLoading, error, clearError } = useAgentStore();
|
||||
const availableTemplates = useSaaSStore((s) => s.availableTemplates);
|
||||
const [currentStep, setCurrentStep] = useState(0);
|
||||
const [formData, setFormData] = useState<WizardFormData>(initialFormData);
|
||||
const [errors, setErrors] = useState<Record<string, string>>({});
|
||||
const [submitStatus, setSubmitStatus] = useState<'idle' | 'success' | 'error'>('idle');
|
||||
const [selectedTemplate, setSelectedTemplate] = useState<AgentTemplateFull | null>(null);
|
||||
|
||||
// Reset form when modal opens
|
||||
useEffect(() => {
|
||||
if (isOpen) {
|
||||
setFormData(initialFormData);
|
||||
setCurrentStep(1);
|
||||
setCurrentStep(0);
|
||||
setErrors({});
|
||||
setSubmitStatus('idle');
|
||||
setSelectedTemplate(null);
|
||||
clearError();
|
||||
}
|
||||
}, [isOpen, clearError]);
|
||||
@@ -111,11 +122,33 @@ export function AgentOnboardingWizard({ isOpen, onClose, onSuccess }: AgentOnboa
|
||||
}
|
||||
};
|
||||
|
||||
// Handle template selection
|
||||
const handleSelectTemplate = async (t: AgentTemplateAvailable) => {
|
||||
try {
|
||||
const full = await saasClient.fetchTemplateFull(t.id);
|
||||
setSelectedTemplate(full);
|
||||
setFormData(prev => ({
|
||||
...prev,
|
||||
agentName: full.name,
|
||||
emoji: full.emoji || prev.emoji,
|
||||
personality: full.personality || prev.personality,
|
||||
scenarios: full.scenarios.length > 0 ? full.scenarios : prev.scenarios,
|
||||
}));
|
||||
setCurrentStep(1);
|
||||
} catch {
|
||||
// If fetch fails, still allow manual creation
|
||||
setCurrentStep(1);
|
||||
}
|
||||
};
|
||||
|
||||
// Validate current step
|
||||
const validateStep = useCallback((step: number): boolean => {
|
||||
const newErrors: Record<string, string> = {};
|
||||
|
||||
switch (step) {
|
||||
case 0:
|
||||
// Template selection is always valid (blank agent is an option)
|
||||
break;
|
||||
case 1:
|
||||
if (!formData.userName.trim()) {
|
||||
newErrors.userName = '请输入您的名字';
|
||||
@@ -157,7 +190,7 @@ export function AgentOnboardingWizard({ isOpen, onClose, onSuccess }: AgentOnboa
|
||||
|
||||
// Navigate to previous step
|
||||
const prevStep = () => {
|
||||
setCurrentStep((prev) => Math.max(prev - 1, 1));
|
||||
setCurrentStep((prev) => Math.max(prev - 1, 0));
|
||||
};
|
||||
|
||||
// Handle form submission
|
||||
@@ -169,59 +202,76 @@ export function AgentOnboardingWizard({ isOpen, onClose, onSuccess }: AgentOnboa
|
||||
setSubmitStatus('idle');
|
||||
|
||||
try {
|
||||
const personalityUpdates = {
|
||||
name: formData.agentName,
|
||||
role: formData.agentRole || undefined,
|
||||
nickname: formData.agentNickname || undefined,
|
||||
userName: formData.userName,
|
||||
userRole: formData.userRole || undefined,
|
||||
scenarios: formData.scenarios,
|
||||
workspaceDir: formData.workspaceDir || undefined,
|
||||
restrictFiles: formData.restrictFiles,
|
||||
emoji: formData.emoji,
|
||||
personality: formData.personality,
|
||||
notes: formData.notes || undefined,
|
||||
};
|
||||
|
||||
let clone: Clone | undefined;
|
||||
|
||||
// If there's an existing clone, update it instead of creating a new one
|
||||
if (clones && clones.length > 0) {
|
||||
clone = await updateClone(clones[0].id, personalityUpdates);
|
||||
// Template-based creation path
|
||||
if (selectedTemplate && clones.length === 0) {
|
||||
clone = await createFromTemplate(selectedTemplate);
|
||||
|
||||
// Persist USER.md for template-created agents
|
||||
if (clone) {
|
||||
try {
|
||||
const userContent = generateUserContent({
|
||||
userName: formData.userName,
|
||||
userRole: formData.userRole,
|
||||
scenarios: formData.scenarios,
|
||||
});
|
||||
await intelligenceClient.identity.updateFile(clone.id, 'user_profile', userContent);
|
||||
} catch (err) {
|
||||
log.warn('Failed to persist USER.md for template agent:', err);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const createOptions: CloneCreateOptions = {
|
||||
...personalityUpdates,
|
||||
privacyOptIn: formData.privacyOptIn,
|
||||
// Manual creation / update path
|
||||
const personalityUpdates = {
|
||||
name: formData.agentName,
|
||||
role: formData.agentRole || undefined,
|
||||
nickname: formData.agentNickname || undefined,
|
||||
userName: formData.userName,
|
||||
userRole: formData.userRole || undefined,
|
||||
scenarios: formData.scenarios,
|
||||
workspaceDir: formData.workspaceDir || undefined,
|
||||
restrictFiles: formData.restrictFiles,
|
||||
emoji: formData.emoji,
|
||||
personality: formData.personality,
|
||||
notes: formData.notes || undefined,
|
||||
};
|
||||
clone = await createClone(createOptions);
|
||||
|
||||
if (clones && clones.length > 0) {
|
||||
clone = await updateClone(clones[0].id, personalityUpdates);
|
||||
} else {
|
||||
const createOptions: CloneCreateOptions = {
|
||||
...personalityUpdates,
|
||||
privacyOptIn: formData.privacyOptIn,
|
||||
};
|
||||
clone = await createClone(createOptions);
|
||||
}
|
||||
}
|
||||
|
||||
if (clone) {
|
||||
// Persist SOUL.md and USER.md to the identity system
|
||||
try {
|
||||
const soulContent = generateSoulContent({
|
||||
agentName: formData.agentName,
|
||||
emoji: formData.emoji,
|
||||
personality: formData.personality,
|
||||
scenarios: formData.scenarios,
|
||||
});
|
||||
// Persist SOUL.md and USER.md to the identity system (manual path only)
|
||||
if (!selectedTemplate) {
|
||||
try {
|
||||
const soulContent = generateSoulContent({
|
||||
agentName: formData.agentName,
|
||||
emoji: formData.emoji,
|
||||
personality: formData.personality,
|
||||
scenarios: formData.scenarios,
|
||||
});
|
||||
|
||||
const userContent = generateUserContent({
|
||||
userName: formData.userName,
|
||||
userRole: formData.userRole,
|
||||
scenarios: formData.scenarios,
|
||||
});
|
||||
const userContent = generateUserContent({
|
||||
userName: formData.userName,
|
||||
userRole: formData.userRole,
|
||||
scenarios: formData.scenarios,
|
||||
});
|
||||
|
||||
// Write SOUL.md (agent personality)
|
||||
await intelligenceClient.identity.updateFile(clone.id, 'soul', soulContent);
|
||||
await intelligenceClient.identity.updateFile(clone.id, 'soul', soulContent);
|
||||
await intelligenceClient.identity.updateFile(clone.id, 'user_profile', userContent);
|
||||
|
||||
// Write USER.md (user profile)
|
||||
await intelligenceClient.identity.updateFile(clone.id, 'user_profile', userContent);
|
||||
|
||||
log.debug('SOUL.md and USER.md persisted for agent:', clone.id);
|
||||
} catch (err) {
|
||||
log.warn('Failed to persist identity files:', err);
|
||||
// Don't fail the whole onboarding if identity persistence fails
|
||||
log.debug('SOUL.md and USER.md persisted for agent:', clone.id);
|
||||
} catch (err) {
|
||||
log.warn('Failed to persist identity files:', err);
|
||||
}
|
||||
}
|
||||
|
||||
setSubmitStatus('success');
|
||||
@@ -239,7 +289,7 @@ export function AgentOnboardingWizard({ isOpen, onClose, onSuccess }: AgentOnboa
|
||||
|
||||
if (!isOpen) return null;
|
||||
|
||||
const CurrentStepIcon = steps[currentStep - 1]?.icon || Bot;
|
||||
const CurrentStepIcon = steps[currentStep]?.icon || Bot;
|
||||
|
||||
return (
|
||||
<div className="fixed inset-0 z-50 flex items-center justify-center">
|
||||
@@ -262,7 +312,7 @@ export function AgentOnboardingWizard({ isOpen, onClose, onSuccess }: AgentOnboa
|
||||
创建新 Agent
|
||||
</h2>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400">
|
||||
步骤 {currentStep}/{steps.length}: {steps[currentStep - 1]?.title}
|
||||
步骤 {currentStep + 1}/{steps.length}: {steps[currentStep]?.title}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -321,6 +371,36 @@ export function AgentOnboardingWizard({ isOpen, onClose, onSuccess }: AgentOnboa
|
||||
transition={{ duration: 0.2 }}
|
||||
className="space-y-4"
|
||||
>
|
||||
{/* Step 0: 行业模板 */}
|
||||
{currentStep === 0 && (
|
||||
<div className="space-y-4">
|
||||
<p className="text-sm text-muted-foreground">选择一个行业预设快速开始,或创建空白 Agent</p>
|
||||
<div className="grid grid-cols-2 sm:grid-cols-3 gap-3">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => { setSelectedTemplate(null); setCurrentStep(1); }}
|
||||
className="p-4 rounded-lg border-2 border-dashed border-muted-foreground/25 hover:border-primary/50 transition-colors text-center"
|
||||
>
|
||||
<div className="text-2xl mb-2">✨</div>
|
||||
<div className="font-medium text-sm">空白 Agent</div>
|
||||
<div className="text-xs text-muted-foreground">从零配置</div>
|
||||
</button>
|
||||
{(availableTemplates ?? []).map(t => (
|
||||
<button
|
||||
key={t.id}
|
||||
type="button"
|
||||
onClick={() => handleSelectTemplate(t)}
|
||||
className="p-4 rounded-lg border-2 hover:border-primary/50 transition-colors text-center"
|
||||
>
|
||||
<div className="text-2xl mb-2">{t.emoji || '🤖'}</div>
|
||||
<div className="font-medium text-sm">{t.name}</div>
|
||||
<div className="text-xs text-muted-foreground line-clamp-2">{t.description}</div>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Step 1: 认识用户 */}
|
||||
{currentStep === 1 && (
|
||||
<>
|
||||
@@ -627,7 +707,7 @@ export function AgentOnboardingWizard({ isOpen, onClose, onSuccess }: AgentOnboa
|
||||
<button
|
||||
type="button"
|
||||
onClick={prevStep}
|
||||
disabled={currentStep === 1}
|
||||
disabled={currentStep === 0}
|
||||
className="px-4 py-2 text-sm text-gray-700 dark:text-gray-300 bg-gray-100 dark:bg-gray-700 rounded-lg hover:bg-gray-200 dark:hover:bg-gray-600 transition-colors disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-1"
|
||||
>
|
||||
<ChevronLeft className="w-4 h-4" />
|
||||
|
||||
@@ -14,6 +14,9 @@
|
||||
|
||||
import { secureStorage, isSecureStorageAvailable } from './secure-storage';
|
||||
import { hashSha256, generateRandomString } from './crypto-utils';
|
||||
import { createLogger } from './logger';
|
||||
|
||||
const logger = createLogger('api-key-storage');
|
||||
|
||||
// Storage key prefixes
|
||||
const API_KEY_PREFIX = 'zclaw_api_key_';
|
||||
@@ -248,8 +251,8 @@ export async function getApiKey(type: ApiKeyType): Promise<string | null> {
|
||||
// Update last used timestamp
|
||||
metadata.lastUsedAt = Date.now();
|
||||
localStorage.setItem(API_KEY_META_PREFIX + type, JSON.stringify(metadata));
|
||||
} catch {
|
||||
// Ignore metadata parsing errors
|
||||
} catch (e) {
|
||||
logger.debug('Failed to update API key metadata', { type, error: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -271,7 +274,8 @@ export function getApiKeyMetadata(type: ApiKeyType): ApiKeyMetadata | null {
|
||||
|
||||
try {
|
||||
return JSON.parse(metaJson) as ApiKeyMetadata;
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('Failed to parse API key metadata', { type, error: e });
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -290,8 +294,8 @@ export function listApiKeyMetadata(): ApiKeyMetadata[] {
|
||||
try {
|
||||
const meta = JSON.parse(localStorage.getItem(key) || '');
|
||||
metadata.push(meta);
|
||||
} catch {
|
||||
// Ignore parsing errors
|
||||
} catch (e) {
|
||||
logger.debug('Failed to parse API key metadata entry', { key, error: e });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -431,8 +435,8 @@ function logSecurityEvent(
|
||||
}
|
||||
|
||||
localStorage.setItem(SECURITY_LOG_KEY, JSON.stringify(events));
|
||||
} catch {
|
||||
// Ignore logging failures
|
||||
} catch (e) {
|
||||
logger.debug('Failed to persist security event log', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -442,7 +446,8 @@ function logSecurityEvent(
|
||||
export function getSecurityLog(): SecurityEvent[] {
|
||||
try {
|
||||
return JSON.parse(localStorage.getItem(SECURITY_LOG_KEY) || '[]');
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('Failed to read security event log', { error: e });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,7 +61,8 @@ function loadLocalLogs(): FrontendAuditEntry[] {
|
||||
if (!stored) return [];
|
||||
const logs = JSON.parse(stored) as FrontendAuditEntry[];
|
||||
return Array.isArray(logs) ? logs : [];
|
||||
} catch {
|
||||
} catch (e) {
|
||||
log.debug('Failed to parse audit logs from localStorage', { error: e });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -446,8 +446,8 @@ export class AutonomyManager {
|
||||
const parsed = JSON.parse(raw);
|
||||
return { ...DEFAULT_AUTONOMY_CONFIGS.assisted, ...parsed };
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
} catch (e) {
|
||||
log.debug('Failed to load autonomy config from localStorage', { error: e });
|
||||
}
|
||||
return DEFAULT_AUTONOMY_CONFIGS.assisted;
|
||||
}
|
||||
@@ -455,8 +455,8 @@ export class AutonomyManager {
|
||||
private saveConfig(): void {
|
||||
try {
|
||||
localStorage.setItem(AUTONOMY_CONFIG_KEY, JSON.stringify(this.config));
|
||||
} catch {
|
||||
// Ignore
|
||||
} catch (e) {
|
||||
log.debug('Failed to save autonomy config to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -466,7 +466,8 @@ export class AutonomyManager {
|
||||
if (raw) {
|
||||
this.auditLog = JSON.parse(raw);
|
||||
}
|
||||
} catch {
|
||||
} catch (e) {
|
||||
log.debug('Failed to load audit log from localStorage', { error: e });
|
||||
this.auditLog = [];
|
||||
}
|
||||
}
|
||||
@@ -474,8 +475,8 @@ export class AutonomyManager {
|
||||
private saveAuditLog(): void {
|
||||
try {
|
||||
localStorage.setItem(AUDIT_LOG_KEY, JSON.stringify(this.auditLog.slice(-100)));
|
||||
} catch {
|
||||
// Ignore
|
||||
} catch (e) {
|
||||
log.debug('Failed to save audit log to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -483,8 +484,8 @@ export class AutonomyManager {
|
||||
try {
|
||||
const pending = Array.from(this.pendingApprovals.entries());
|
||||
localStorage.setItem('zclaw-pending-approvals', JSON.stringify(pending));
|
||||
} catch {
|
||||
// Ignore
|
||||
} catch (e) {
|
||||
log.debug('Failed to persist pending approvals to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,9 @@
|
||||
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import { secureStorage } from './secure-storage';
|
||||
import { createLogger } from './logger';
|
||||
|
||||
const logger = createLogger('embedding-client');
|
||||
|
||||
export interface EmbeddingConfig {
|
||||
provider: string;
|
||||
@@ -46,8 +49,8 @@ export function loadEmbeddingConfig(): EmbeddingConfig {
|
||||
const parsed = JSON.parse(stored);
|
||||
return { ...parsed, apiKey: '' };
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
} catch (e) {
|
||||
logger.debug('Failed to load embedding config', { error: e });
|
||||
}
|
||||
return {
|
||||
provider: 'local',
|
||||
@@ -66,8 +69,8 @@ export function saveEmbeddingConfig(config: EmbeddingConfig): void {
|
||||
try {
|
||||
const { apiKey: _, ...rest } = config;
|
||||
localStorage.setItem(EMBEDDING_STORAGE_KEY, JSON.stringify(rest));
|
||||
} catch {
|
||||
// ignore
|
||||
} catch (e) {
|
||||
logger.debug('Failed to save embedding config', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -203,7 +206,7 @@ export class EmbeddingClient {
|
||||
saveEmbeddingConfig(this.config);
|
||||
// Save apiKey to secure storage (fire-and-forget)
|
||||
if (config.apiKey !== undefined) {
|
||||
saveEmbeddingApiKey(config.apiKey).catch(() => {});
|
||||
saveEmbeddingApiKey(config.apiKey).catch(e => logger.debug('Failed to save embedding API key', { error: e }));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -343,7 +343,8 @@ export async function isEncryptedStorageActive(): Promise<boolean> {
|
||||
try {
|
||||
const container: EncryptedContainer = JSON.parse(stored);
|
||||
return container.metadata?.version === STORAGE_VERSION;
|
||||
} catch {
|
||||
} catch (e) {
|
||||
log.debug('Failed to check encrypted storage version', { error: e });
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -369,8 +370,8 @@ export async function getStorageStats(): Promise<{
|
||||
// Count conversations without full decryption
|
||||
const conversations = await loadConversations();
|
||||
conversationCount = conversations.length;
|
||||
} catch {
|
||||
// Ignore parsing errors
|
||||
} catch (e) {
|
||||
log.debug('Failed to parse storage stats', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -32,6 +32,9 @@ import {
|
||||
getSecurityStatusFallback,
|
||||
isNotFoundError,
|
||||
} from './api-fallbacks';
|
||||
import { createLogger } from './logger';
|
||||
|
||||
const logger = createLogger('GatewayApi');
|
||||
|
||||
// === Install all API methods onto GatewayClient prototype ===
|
||||
|
||||
@@ -131,7 +134,8 @@ export function installApiMethods(ClientClass: { prototype: GatewayClient }): vo
|
||||
proto.getSessionStats = async function (this: GatewayClient): Promise<any> {
|
||||
try {
|
||||
return await this.restGet('/api/stats/sessions');
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('getSessionStats failed', { error: e });
|
||||
return { sessions: [] };
|
||||
}
|
||||
};
|
||||
@@ -619,7 +623,8 @@ export function installApiMethods(ClientClass: { prototype: GatewayClient }): vo
|
||||
proto.getCapabilities = async function (this: GatewayClient): Promise<{ capabilities: string[] }> {
|
||||
try {
|
||||
return await this.restGet('/api/capabilities');
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('getCapabilities failed, using defaults', { error: e });
|
||||
return { capabilities: ['chat', 'agents', 'hands', 'workflows'] };
|
||||
}
|
||||
};
|
||||
|
||||
@@ -74,6 +74,7 @@ import {
|
||||
import type { GatewayConfigSnapshot, GatewayModelChoice } from './gateway-config';
|
||||
import { installApiMethods } from './gateway-api';
|
||||
import { createLogger } from './logger';
|
||||
import { GatewayHttpError } from './gateway-errors';
|
||||
|
||||
const log = createLogger('GatewayClient');
|
||||
|
||||
@@ -712,10 +713,8 @@ export class GatewayClient {
|
||||
const baseUrl = this.getRestBaseUrl();
|
||||
const response = await fetch(`${baseUrl}${path}`);
|
||||
if (!response.ok) {
|
||||
// For 404 errors, throw with status code so callers can handle gracefully
|
||||
const error = new Error(`REST API error: ${response.status} ${response.statusText}`);
|
||||
(error as any).status = response.status;
|
||||
throw error;
|
||||
const errorBody = await response.text().catch(() => '');
|
||||
throw new GatewayHttpError(`HTTP ${response.status}: ${errorBody || response.statusText}`, response.status, errorBody);
|
||||
}
|
||||
return response.json();
|
||||
}
|
||||
@@ -734,10 +733,7 @@ export class GatewayClient {
|
||||
if (!response.ok) {
|
||||
const errorBody = await response.text().catch(() => '');
|
||||
log.error(`POST ${url} failed: ${response.status} ${response.statusText}`, errorBody);
|
||||
const error = new Error(`REST API error: ${response.status} ${response.statusText}`);
|
||||
(error as any).status = response.status;
|
||||
(error as any).body = errorBody;
|
||||
throw error;
|
||||
throw new GatewayHttpError(`HTTP ${response.status}: ${errorBody || response.statusText}`, response.status, errorBody);
|
||||
}
|
||||
|
||||
const result = await response.json();
|
||||
@@ -932,8 +928,8 @@ export class GatewayClient {
|
||||
} else {
|
||||
originalHandler?.call(this.ws!, evt);
|
||||
}
|
||||
} catch {
|
||||
// Ignore parse errors
|
||||
} catch (e) {
|
||||
log.debug('Parse error in handshake response handler', { error: e });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -969,14 +965,14 @@ export class GatewayClient {
|
||||
const listeners = this.eventListeners.get(event);
|
||||
if (listeners) {
|
||||
for (const cb of listeners) {
|
||||
try { cb(payload); } catch { /* ignore listener errors */ }
|
||||
try { cb(payload); } catch (e) { log.debug('Event listener error', { error: e }); }
|
||||
}
|
||||
}
|
||||
// Also emit wildcard
|
||||
const wildcardListeners = this.eventListeners.get('*');
|
||||
if (wildcardListeners) {
|
||||
for (const cb of wildcardListeners) {
|
||||
try { cb({ event, payload }); } catch { /* ignore */ }
|
||||
try { cb({ event, payload }); } catch (e) { log.debug('Wildcard event listener error', { error: e }); }
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1003,7 +999,7 @@ export class GatewayClient {
|
||||
this.ws.onclose = null;
|
||||
this.ws.onerror = null;
|
||||
if (this.ws.readyState === WebSocket.OPEN || this.ws.readyState === WebSocket.CONNECTING) {
|
||||
try { this.ws.close(); } catch { /* ignore */ }
|
||||
try { this.ws.close(); } catch (e) { log.debug('WebSocket close failed during cleanup', { error: e }); }
|
||||
}
|
||||
this.ws = null;
|
||||
}
|
||||
@@ -1117,9 +1113,9 @@ export class GatewayClient {
|
||||
this.reconnectTimer = window.setTimeout(async () => {
|
||||
try {
|
||||
await this.connect();
|
||||
} catch {
|
||||
} catch (e) {
|
||||
/* close handler will trigger another reconnect */
|
||||
this.log('warn', `Reconnect attempt ${this.reconnectAttempts} failed`);
|
||||
this.log('warn', `Reconnect attempt ${this.reconnectAttempts} failed: ${e instanceof Error ? e.message : String(e)}`);
|
||||
}
|
||||
}, delay);
|
||||
}
|
||||
|
||||
108
desktop/src/lib/gateway-errors.ts
Normal file
108
desktop/src/lib/gateway-errors.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* gateway-errors.ts - Gateway Error Classes & Security Utilities
|
||||
*
|
||||
* Extracted from gateway-client.ts for modularity.
|
||||
* Contains error classes and WebSocket security validation.
|
||||
*/
|
||||
|
||||
import { isLocalhost } from './gateway-storage';
|
||||
|
||||
// === Error Classes ===
|
||||
|
||||
/**
|
||||
* Security error for invalid WebSocket connections.
|
||||
* Thrown when non-localhost URLs use ws:// instead of wss://.
|
||||
*/
|
||||
export class SecurityError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'SecurityError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Connection error for WebSocket/HTTP connection failures.
|
||||
*/
|
||||
export class ConnectionError extends Error {
|
||||
public readonly code?: string;
|
||||
public readonly recoverable: boolean;
|
||||
|
||||
constructor(message: string, code?: string, recoverable: boolean = true) {
|
||||
super(message);
|
||||
this.name = 'ConnectionError';
|
||||
this.code = code;
|
||||
this.recoverable = recoverable;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Timeout error for request/response timeouts.
|
||||
*/
|
||||
export class TimeoutError extends Error {
|
||||
public readonly timeout: number;
|
||||
|
||||
constructor(message: string, timeout: number) {
|
||||
super(message);
|
||||
this.name = 'TimeoutError';
|
||||
this.timeout = timeout;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Authentication error for handshake/token failures.
|
||||
*/
|
||||
export class AuthenticationError extends Error {
|
||||
public readonly code?: string;
|
||||
|
||||
constructor(message: string, code?: string) {
|
||||
super(message);
|
||||
this.name = 'AuthenticationError';
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* HTTP error for REST API responses with non-2xx status codes.
|
||||
*/
|
||||
export class GatewayHttpError extends Error {
|
||||
public readonly status: number;
|
||||
public readonly body?: unknown;
|
||||
|
||||
constructor(message: string, status: number, body?: unknown) {
|
||||
super(message);
|
||||
this.name = 'GatewayHttpError';
|
||||
this.status = status;
|
||||
this.body = body;
|
||||
}
|
||||
}
|
||||
|
||||
// === Utility Functions ===
|
||||
|
||||
/**
|
||||
* Validate WebSocket URL security.
|
||||
* Ensures non-localhost connections use WSS protocol.
|
||||
*
|
||||
* @param url - The WebSocket URL to validate
|
||||
* @throws SecurityError if non-localhost URL uses ws:// instead of wss://
|
||||
*/
|
||||
export function validateWebSocketSecurity(url: string): void {
|
||||
if (!url.startsWith('wss://') && !isLocalhost(url)) {
|
||||
throw new SecurityError(
|
||||
'Non-localhost connections must use WSS protocol for security. ' +
|
||||
`URL: ${url.replace(/:[^:@]+@/, ':****@')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a unique idempotency key for requests.
|
||||
* Uses crypto.randomUUID when available, otherwise falls back to manual generation.
|
||||
*/
|
||||
export function createIdempotencyKey(): string {
|
||||
if (typeof crypto !== 'undefined' && typeof crypto.randomUUID === 'function') {
|
||||
return crypto.randomUUID();
|
||||
}
|
||||
const bytes = crypto.getRandomValues(new Uint8Array(6));
|
||||
const suffix = Array.from(bytes).map(b => b.toString(36).padStart(2, '0')).join('');
|
||||
return `idem_${Date.now()}_${suffix}`;
|
||||
}
|
||||
117
desktop/src/lib/gateway-heartbeat.ts
Normal file
117
desktop/src/lib/gateway-heartbeat.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
/**
|
||||
* gateway-heartbeat.ts - Gateway Heartbeat Methods
|
||||
*
|
||||
* Extracted from gateway-client.ts for modularity.
|
||||
* Installs heartbeat methods onto GatewayClient.prototype via mixin pattern.
|
||||
*
|
||||
* Heartbeat constants are defined here as module-level values
|
||||
* to avoid static field coupling with the main class.
|
||||
*/
|
||||
|
||||
import type { GatewayClient } from './gateway-client';
|
||||
|
||||
// === Heartbeat Constants ===
|
||||
|
||||
/** Interval between heartbeat pings (30 seconds) */
|
||||
export const HEARTBEAT_INTERVAL = 30000;
|
||||
|
||||
/** Timeout for waiting for pong response (10 seconds) */
|
||||
export const HEARTBEAT_TIMEOUT = 10000;
|
||||
|
||||
/** Maximum missed heartbeats before reconnecting */
|
||||
export const MAX_MISSED_HEARTBEATS = 3;
|
||||
|
||||
// === Mixin Installer ===
|
||||
|
||||
/**
|
||||
* Install heartbeat methods onto GatewayClient.prototype.
|
||||
*
|
||||
* These methods access instance properties:
|
||||
* - this.ws: WebSocket | null
|
||||
* - this.heartbeatInterval: number | null
|
||||
* - this.heartbeatTimeout: number | null
|
||||
* - this.missedHeartbeats: number
|
||||
* - this.log(level, message): void
|
||||
* - this.stopHeartbeat(): void
|
||||
*/
|
||||
export function installHeartbeatMethods(ClientClass: { prototype: GatewayClient }): void {
|
||||
const proto = ClientClass.prototype as any;
|
||||
|
||||
/**
|
||||
* Start heartbeat to keep connection alive.
|
||||
* Called after successful connection.
|
||||
*/
|
||||
proto.startHeartbeat = function (this: GatewayClient): void {
|
||||
(this as any).stopHeartbeat();
|
||||
(this as any).missedHeartbeats = 0;
|
||||
|
||||
(this as any).heartbeatInterval = window.setInterval(() => {
|
||||
(this as any).sendHeartbeat();
|
||||
}, HEARTBEAT_INTERVAL);
|
||||
|
||||
(this as any).log('debug', 'Heartbeat started');
|
||||
};
|
||||
|
||||
/**
|
||||
* Stop heartbeat.
|
||||
* Called on cleanup or disconnect.
|
||||
*/
|
||||
proto.stopHeartbeat = function (this: GatewayClient): void {
|
||||
const self = this as any;
|
||||
if (self.heartbeatInterval) {
|
||||
clearInterval(self.heartbeatInterval);
|
||||
self.heartbeatInterval = null;
|
||||
}
|
||||
if (self.heartbeatTimeout) {
|
||||
clearTimeout(self.heartbeatTimeout);
|
||||
self.heartbeatTimeout = null;
|
||||
}
|
||||
self.log('debug', 'Heartbeat stopped');
|
||||
};
|
||||
|
||||
/**
|
||||
* Send a ping heartbeat to the server.
|
||||
*/
|
||||
proto.sendHeartbeat = function (this: GatewayClient): void {
|
||||
const self = this as any;
|
||||
if (self.ws?.readyState !== WebSocket.OPEN) {
|
||||
self.log('debug', 'Skipping heartbeat - WebSocket not open');
|
||||
return;
|
||||
}
|
||||
|
||||
self.missedHeartbeats++;
|
||||
if (self.missedHeartbeats > MAX_MISSED_HEARTBEATS) {
|
||||
self.log('warn', `Max missed heartbeats (${MAX_MISSED_HEARTBEATS}), reconnecting`);
|
||||
self.stopHeartbeat();
|
||||
self.ws.close(4000, 'Heartbeat timeout');
|
||||
return;
|
||||
}
|
||||
|
||||
// Send ping frame
|
||||
try {
|
||||
self.ws.send(JSON.stringify({ type: 'ping' }));
|
||||
self.log('debug', `Ping sent (missed: ${self.missedHeartbeats})`);
|
||||
|
||||
// Set timeout for pong
|
||||
self.heartbeatTimeout = window.setTimeout(() => {
|
||||
self.log('warn', 'Heartbeat pong timeout');
|
||||
// Don't reconnect immediately, let the next heartbeat check
|
||||
}, HEARTBEAT_TIMEOUT);
|
||||
} catch (error) {
|
||||
self.log('error', `Failed to send heartbeat: ${error instanceof Error ? error.message : String(error)}`);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Handle pong response from server.
|
||||
*/
|
||||
proto.handlePong = function (this: GatewayClient): void {
|
||||
const self = this as any;
|
||||
self.missedHeartbeats = 0;
|
||||
if (self.heartbeatTimeout) {
|
||||
clearTimeout(self.heartbeatTimeout);
|
||||
self.heartbeatTimeout = null;
|
||||
}
|
||||
self.log('debug', 'Pong received, heartbeat reset');
|
||||
};
|
||||
}
|
||||
80
desktop/src/lib/gateway-reconnect.ts
Normal file
80
desktop/src/lib/gateway-reconnect.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* gateway-reconnect.ts - Gateway Reconnect Methods
|
||||
*
|
||||
* Extracted from gateway-client.ts for modularity.
|
||||
* Installs reconnect methods onto GatewayClient.prototype via mixin pattern.
|
||||
*/
|
||||
|
||||
import type { GatewayClient } from './gateway-client';
|
||||
|
||||
// === Reconnect Constants ===
|
||||
|
||||
/** Maximum number of reconnect attempts before giving up */
|
||||
export const MAX_RECONNECT_ATTEMPTS = 10;
|
||||
|
||||
// === Mixin Installer ===
|
||||
|
||||
/**
|
||||
* Install reconnect methods onto GatewayClient.prototype.
|
||||
*
|
||||
* These methods access instance properties:
|
||||
* - this.reconnectAttempts: number
|
||||
* - this.reconnectInterval: number
|
||||
* - this.reconnectTimer: number | null
|
||||
* - this.log(level, message): void
|
||||
* - this.connect(): Promise<void>
|
||||
* - this.setState(state): void
|
||||
* - this.emitEvent(event, payload): void
|
||||
*/
|
||||
export function installReconnectMethods(ClientClass: { prototype: GatewayClient }): void {
|
||||
const proto = ClientClass.prototype as any;
|
||||
|
||||
/**
|
||||
* Schedule a reconnect attempt with exponential backoff.
|
||||
*/
|
||||
proto.scheduleReconnect = function (this: GatewayClient): void {
|
||||
const self = this as any;
|
||||
if (self.reconnectAttempts >= MAX_RECONNECT_ATTEMPTS) {
|
||||
self.log('error', `Max reconnect attempts (${MAX_RECONNECT_ATTEMPTS}) reached. Please reconnect manually.`);
|
||||
self.setState('disconnected');
|
||||
self.emitEvent('reconnect_failed', {
|
||||
attempts: self.reconnectAttempts,
|
||||
maxAttempts: MAX_RECONNECT_ATTEMPTS,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
self.reconnectAttempts++;
|
||||
self.setState('reconnecting');
|
||||
const delay = Math.min(self.reconnectInterval * Math.pow(1.5, self.reconnectAttempts - 1), 30000);
|
||||
|
||||
self.log('info', `Scheduling reconnect attempt ${self.reconnectAttempts} in ${delay}ms`);
|
||||
|
||||
// Emit reconnecting event for UI
|
||||
self.emitEvent('reconnecting', {
|
||||
attempt: self.reconnectAttempts,
|
||||
delay,
|
||||
maxAttempts: MAX_RECONNECT_ATTEMPTS,
|
||||
});
|
||||
|
||||
self.reconnectTimer = window.setTimeout(async () => {
|
||||
try {
|
||||
await self.connect();
|
||||
} catch (e) {
|
||||
/* close handler will trigger another reconnect */
|
||||
self.log('warn', `Reconnect attempt ${self.reconnectAttempts} failed: ${e instanceof Error ? e.message : String(e)}`);
|
||||
}
|
||||
}, delay);
|
||||
};
|
||||
|
||||
/**
|
||||
* Cancel a pending reconnect attempt.
|
||||
*/
|
||||
proto.cancelReconnect = function (this: GatewayClient): void {
|
||||
const self = this as any;
|
||||
if (self.reconnectTimer !== null) {
|
||||
clearTimeout(self.reconnectTimer);
|
||||
self.reconnectTimer = null;
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -10,6 +10,9 @@
|
||||
|
||||
import { secureStorage } from './secure-storage';
|
||||
import { logKeyEvent, logSecurityEvent } from './security-audit';
|
||||
import { createLogger } from './logger';
|
||||
|
||||
const logger = createLogger('GatewayStorage');
|
||||
|
||||
// === WSS Configuration ===
|
||||
|
||||
@@ -35,7 +38,8 @@ export function isLocalhost(url: string): boolean {
|
||||
return parsed.hostname === 'localhost' ||
|
||||
parsed.hostname === '127.0.0.1' ||
|
||||
parsed.hostname === '[::1]';
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('URL parsing failed in isLocalhost', { error: e });
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -87,7 +91,8 @@ export function getStoredGatewayUrl(): string {
|
||||
try {
|
||||
const stored = localStorage.getItem(GATEWAY_URL_STORAGE_KEY);
|
||||
return normalizeGatewayUrl(stored || DEFAULT_GATEWAY_URL);
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('localStorage unavailable for gateway URL read', { error: e });
|
||||
return DEFAULT_GATEWAY_URL;
|
||||
}
|
||||
}
|
||||
@@ -96,7 +101,7 @@ export function setStoredGatewayUrl(url: string): string {
|
||||
const normalized = normalizeGatewayUrl(url || DEFAULT_GATEWAY_URL);
|
||||
try {
|
||||
localStorage.setItem(GATEWAY_URL_STORAGE_KEY, normalized);
|
||||
} catch { /* ignore localStorage failures */ }
|
||||
} catch (e) { logger.debug('localStorage unavailable for gateway URL write', { error: e }); }
|
||||
return normalized;
|
||||
}
|
||||
|
||||
@@ -142,13 +147,15 @@ export function getStoredGatewayToken(): string {
|
||||
console.warn('[GatewayStorage] Token is encrypted - use async version');
|
||||
return '';
|
||||
}
|
||||
} catch {
|
||||
} catch (e) {
|
||||
// Not JSON, so it's plaintext (legacy format)
|
||||
logger.debug('Legacy plaintext token format detected', { error: e });
|
||||
return stored;
|
||||
}
|
||||
}
|
||||
return '';
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.warn('Failed to read gateway token from localStorage', { error: e });
|
||||
return '';
|
||||
}
|
||||
}
|
||||
@@ -202,8 +209,8 @@ export function setStoredGatewayToken(token: string): string {
|
||||
} else {
|
||||
localStorage.removeItem(GATEWAY_TOKEN_STORAGE_KEY);
|
||||
}
|
||||
} catch {
|
||||
/* ignore localStorage failures */
|
||||
} catch (e) {
|
||||
logger.warn('Failed to write gateway token to localStorage', { error: e });
|
||||
}
|
||||
|
||||
return normalized;
|
||||
|
||||
288
desktop/src/lib/gateway-stream.ts
Normal file
288
desktop/src/lib/gateway-stream.ts
Normal file
@@ -0,0 +1,288 @@
|
||||
/**
|
||||
* gateway-stream.ts - Gateway Stream Methods
|
||||
*
|
||||
* Extracted from gateway-client.ts for modularity.
|
||||
* Installs streaming methods onto GatewayClient.prototype via mixin pattern.
|
||||
*
|
||||
* Contains:
|
||||
* - chatStream (public): Send message with streaming response
|
||||
* - connectZclawStream (private): Connect to ZCLAW WebSocket for streaming
|
||||
* - handleZclawStreamEvent (private): Parse and dispatch stream events
|
||||
* - cancelStream (public): Cancel an ongoing stream
|
||||
*/
|
||||
|
||||
import type { ZclawStreamEvent } from './gateway-types';
|
||||
import type { GatewayClient } from './gateway-client';
|
||||
import { createIdempotencyKey } from './gateway-errors';
|
||||
|
||||
// === Mixin Installer ===
|
||||
|
||||
/**
|
||||
* Install streaming methods onto GatewayClient.prototype.
|
||||
*
|
||||
* These methods access instance properties:
|
||||
* - this.defaultAgentId: string
|
||||
* - this.zclawWs: WebSocket | null
|
||||
* - this.streamCallbacks: Map<string, StreamCallbacks>
|
||||
* - this.log(level, message): void
|
||||
* - this.getRestBaseUrl(): string
|
||||
* - this.fetchDefaultAgentId(): Promise<string | null>
|
||||
* - this.emitEvent(event, payload): void
|
||||
*/
|
||||
export function installStreamMethods(ClientClass: { prototype: GatewayClient }): void {
|
||||
const proto = ClientClass.prototype as any;
|
||||
|
||||
/**
|
||||
* Send message with streaming response (ZCLAW WebSocket).
|
||||
*/
|
||||
proto.chatStream = async function (
|
||||
this: GatewayClient,
|
||||
message: string,
|
||||
callbacks: {
|
||||
onDelta: (delta: string) => void;
|
||||
onTool?: (tool: string, input: string, output: string) => void;
|
||||
onHand?: (name: string, status: string, result?: unknown) => void;
|
||||
onComplete: () => void;
|
||||
onError: (error: string) => void;
|
||||
},
|
||||
opts?: {
|
||||
sessionKey?: string;
|
||||
agentId?: string;
|
||||
}
|
||||
): Promise<{ runId: string }> {
|
||||
const self = this as any;
|
||||
const agentId = opts?.agentId || self.defaultAgentId;
|
||||
const runId = createIdempotencyKey();
|
||||
const sessionId = opts?.sessionKey || crypto.randomUUID();
|
||||
|
||||
// If no agent ID, try to fetch from ZCLAW status (async, but we'll handle it in connectZclawStream)
|
||||
if (!agentId) {
|
||||
// Try to get default agent asynchronously
|
||||
self.fetchDefaultAgentId().then(() => {
|
||||
const resolvedAgentId = self.defaultAgentId;
|
||||
if (resolvedAgentId) {
|
||||
self.streamCallbacks.set(runId, callbacks);
|
||||
self.connectZclawStream(resolvedAgentId, runId, sessionId, message);
|
||||
} else {
|
||||
callbacks.onError('No agent available. Please ensure ZCLAW has at least one agent.');
|
||||
callbacks.onComplete();
|
||||
}
|
||||
}).catch((err: unknown) => {
|
||||
callbacks.onError(`Failed to get agent: ${err}`);
|
||||
callbacks.onComplete();
|
||||
});
|
||||
return { runId };
|
||||
}
|
||||
|
||||
// Store callbacks for this run
|
||||
self.streamCallbacks.set(runId, callbacks);
|
||||
|
||||
// Connect to ZCLAW WebSocket if not connected
|
||||
self.connectZclawStream(agentId, runId, sessionId, message);
|
||||
|
||||
return { runId };
|
||||
};
|
||||
|
||||
/**
|
||||
* Connect to ZCLAW streaming WebSocket.
|
||||
*/
|
||||
proto.connectZclawStream = function (
|
||||
this: GatewayClient,
|
||||
agentId: string,
|
||||
runId: string,
|
||||
sessionId: string,
|
||||
message: string
|
||||
): void {
|
||||
const self = this as any;
|
||||
// Close existing connection if any
|
||||
if (self.zclawWs && self.zclawWs.readyState !== WebSocket.CLOSED) {
|
||||
self.zclawWs.close();
|
||||
}
|
||||
|
||||
// Build WebSocket URL
|
||||
// In dev mode, use Vite proxy; in production, use direct connection
|
||||
let wsUrl: string;
|
||||
if (typeof window !== 'undefined' && window.location.port === '1420') {
|
||||
// Dev mode: use Vite proxy with relative path
|
||||
wsUrl = `ws://${window.location.host}/api/agents/${agentId}/ws`;
|
||||
} else {
|
||||
// Production: extract from stored URL
|
||||
const httpUrl = self.getRestBaseUrl();
|
||||
wsUrl = httpUrl.replace(/^http/, 'ws') + `/api/agents/${agentId}/ws`;
|
||||
}
|
||||
|
||||
self.log('info', `Connecting to ZCLAW stream: ${wsUrl}`);
|
||||
|
||||
try {
|
||||
self.zclawWs = new WebSocket(wsUrl);
|
||||
|
||||
self.zclawWs.onopen = () => {
|
||||
self.log('info', 'ZCLAW WebSocket connected');
|
||||
// Send chat message using ZCLAW actual protocol
|
||||
const chatRequest = {
|
||||
type: 'message',
|
||||
content: message,
|
||||
session_id: sessionId,
|
||||
};
|
||||
self.zclawWs?.send(JSON.stringify(chatRequest));
|
||||
};
|
||||
|
||||
self.zclawWs.onmessage = (event: MessageEvent) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
self.handleZclawStreamEvent(runId, data, sessionId);
|
||||
} catch (err: unknown) {
|
||||
const errorMessage = err instanceof Error ? err.message : String(err);
|
||||
self.log('error', `Failed to parse stream event: ${errorMessage}`);
|
||||
}
|
||||
};
|
||||
|
||||
self.zclawWs.onerror = (_event: Event) => {
|
||||
self.log('error', 'ZCLAW WebSocket error');
|
||||
const callbacks = self.streamCallbacks.get(runId);
|
||||
if (callbacks) {
|
||||
callbacks.onError('WebSocket connection failed');
|
||||
self.streamCallbacks.delete(runId);
|
||||
}
|
||||
};
|
||||
|
||||
self.zclawWs.onclose = (event: CloseEvent) => {
|
||||
self.log('info', `ZCLAW WebSocket closed: ${event.code} ${event.reason}`);
|
||||
const callbacks = self.streamCallbacks.get(runId);
|
||||
if (callbacks && event.code !== 1000) {
|
||||
callbacks.onError(`Connection closed: ${event.reason || 'unknown'}`);
|
||||
}
|
||||
self.streamCallbacks.delete(runId);
|
||||
self.zclawWs = null;
|
||||
};
|
||||
} catch (err: unknown) {
|
||||
const errorMessage = err instanceof Error ? err.message : String(err);
|
||||
self.log('error', `Failed to create WebSocket: ${errorMessage}`);
|
||||
const callbacks = self.streamCallbacks.get(runId);
|
||||
if (callbacks) {
|
||||
callbacks.onError(errorMessage);
|
||||
self.streamCallbacks.delete(runId);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Handle ZCLAW stream events.
|
||||
*/
|
||||
proto.handleZclawStreamEvent = function (
|
||||
this: GatewayClient,
|
||||
runId: string,
|
||||
data: ZclawStreamEvent,
|
||||
sessionId: string
|
||||
): void {
|
||||
const self = this as any;
|
||||
const callbacks = self.streamCallbacks.get(runId);
|
||||
if (!callbacks) return;
|
||||
|
||||
switch (data.type) {
|
||||
// ZCLAW actual event types
|
||||
case 'text_delta':
|
||||
// Stream delta content
|
||||
if (data.content) {
|
||||
callbacks.onDelta(data.content);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'phase':
|
||||
// Phase change: streaming | done
|
||||
if (data.phase === 'done') {
|
||||
callbacks.onComplete();
|
||||
self.streamCallbacks.delete(runId);
|
||||
if (self.zclawWs) {
|
||||
self.zclawWs.close(1000, 'Stream complete');
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 'response':
|
||||
// Final response with tokens info
|
||||
if (data.content) {
|
||||
// If we haven't received any deltas yet, send the full response
|
||||
// This handles non-streaming responses
|
||||
}
|
||||
// Mark complete if phase done wasn't sent
|
||||
callbacks.onComplete();
|
||||
self.streamCallbacks.delete(runId);
|
||||
if (self.zclawWs) {
|
||||
self.zclawWs.close(1000, 'Stream complete');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'typing':
|
||||
// Typing indicator: { state: 'start' | 'stop' }
|
||||
// Can be used for UI feedback
|
||||
break;
|
||||
|
||||
case 'tool_call':
|
||||
// Tool call event
|
||||
if (callbacks.onTool && data.tool) {
|
||||
callbacks.onTool(data.tool, JSON.stringify(data.input || {}), data.output || '');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'tool_result':
|
||||
if (callbacks.onTool && data.tool) {
|
||||
callbacks.onTool(data.tool, '', String(data.result || data.output || ''));
|
||||
}
|
||||
break;
|
||||
|
||||
case 'hand':
|
||||
if (callbacks.onHand && data.hand_name) {
|
||||
callbacks.onHand(data.hand_name, data.hand_status || 'triggered', data.hand_result);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'error':
|
||||
callbacks.onError(data.message || data.code || data.content || 'Unknown error');
|
||||
self.streamCallbacks.delete(runId);
|
||||
if (self.zclawWs) {
|
||||
self.zclawWs.close(1011, 'Error');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'connected':
|
||||
// Connection established
|
||||
self.log('info', `ZCLAW agent connected: ${data.agent_id}`);
|
||||
break;
|
||||
|
||||
case 'agents_updated':
|
||||
// Agents list updated
|
||||
self.log('debug', 'Agents list updated');
|
||||
break;
|
||||
|
||||
default:
|
||||
// Emit unknown events for debugging
|
||||
self.log('debug', `Stream event: ${data.type}`);
|
||||
}
|
||||
|
||||
// Also emit to general 'agent' event listeners
|
||||
self.emitEvent('agent', {
|
||||
stream: data.type === 'text_delta' ? 'assistant' : data.type,
|
||||
delta: data.content,
|
||||
content: data.content,
|
||||
runId,
|
||||
sessionId,
|
||||
...data,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Cancel an ongoing stream.
|
||||
*/
|
||||
proto.cancelStream = function (this: GatewayClient, runId: string): void {
|
||||
const self = this as any;
|
||||
const callbacks = self.streamCallbacks.get(runId);
|
||||
if (callbacks) {
|
||||
callbacks.onError('Stream cancelled');
|
||||
self.streamCallbacks.delete(runId);
|
||||
}
|
||||
if (self.zclawWs && self.zclawWs.readyState === WebSocket.OPEN) {
|
||||
self.zclawWs.close(1000, 'User cancelled');
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -49,6 +49,9 @@ import { invoke } from '@tauri-apps/api/core';
|
||||
|
||||
import { isTauriRuntime } from './tauri-gateway';
|
||||
import { generateRandomString } from './crypto-utils';
|
||||
import { createLogger } from './logger';
|
||||
|
||||
const logger = createLogger('intelligence-client');
|
||||
|
||||
import {
|
||||
intelligence,
|
||||
@@ -339,7 +342,8 @@ function parseTags(tags: string | string[]): string[] {
|
||||
if (!tags) return [];
|
||||
try {
|
||||
return JSON.parse(tags);
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('JSON parse failed for tags, using fallback', { error: e });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -358,8 +362,8 @@ function getFallbackStore(): FallbackMemoryStore {
|
||||
if (stored) {
|
||||
return JSON.parse(stored);
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
} catch (e) {
|
||||
logger.debug('Failed to read fallback store from localStorage', { error: e });
|
||||
}
|
||||
return { memories: [] };
|
||||
}
|
||||
@@ -367,8 +371,8 @@ function getFallbackStore(): FallbackMemoryStore {
|
||||
function saveFallbackStore(store: FallbackMemoryStore): void {
|
||||
try {
|
||||
localStorage.setItem(FALLBACK_STORAGE_KEY, JSON.stringify(store));
|
||||
} catch {
|
||||
console.warn('[IntelligenceClient] Failed to save to localStorage');
|
||||
} catch (e) {
|
||||
logger.warn('Failed to save fallback store to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -467,8 +471,8 @@ const fallbackMemory = {
|
||||
try {
|
||||
const serialized = JSON.stringify(store.memories);
|
||||
storageSizeBytes = new Blob([serialized]).size;
|
||||
} catch {
|
||||
// Ignore serialization errors
|
||||
} catch (e) {
|
||||
logger.debug('Failed to estimate storage size', { error: e });
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -718,8 +722,8 @@ function loadIdentitiesFromStorage(): Map<string, IdentityFiles> {
|
||||
const parsed = JSON.parse(stored) as Record<string, IdentityFiles>;
|
||||
return new Map(Object.entries(parsed));
|
||||
}
|
||||
} catch {
|
||||
console.warn('[IntelligenceClient] Failed to load identities from localStorage');
|
||||
} catch (e) {
|
||||
logger.warn('Failed to load identities from localStorage', { error: e });
|
||||
}
|
||||
return new Map();
|
||||
}
|
||||
@@ -728,8 +732,8 @@ function saveIdentitiesToStorage(identities: Map<string, IdentityFiles>): void {
|
||||
try {
|
||||
const obj = Object.fromEntries(identities);
|
||||
localStorage.setItem(IDENTITY_STORAGE_KEY, JSON.stringify(obj));
|
||||
} catch {
|
||||
console.warn('[IntelligenceClient] Failed to save identities to localStorage');
|
||||
} catch (e) {
|
||||
logger.warn('Failed to save identities to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -739,8 +743,8 @@ function loadProposalsFromStorage(): IdentityChangeProposal[] {
|
||||
if (stored) {
|
||||
return JSON.parse(stored) as IdentityChangeProposal[];
|
||||
}
|
||||
} catch {
|
||||
console.warn('[IntelligenceClient] Failed to load proposals from localStorage');
|
||||
} catch (e) {
|
||||
logger.warn('Failed to load proposals from localStorage', { error: e });
|
||||
}
|
||||
return [];
|
||||
}
|
||||
@@ -748,8 +752,8 @@ function loadProposalsFromStorage(): IdentityChangeProposal[] {
|
||||
function saveProposalsToStorage(proposals: IdentityChangeProposal[]): void {
|
||||
try {
|
||||
localStorage.setItem(PROPOSALS_STORAGE_KEY, JSON.stringify(proposals));
|
||||
} catch {
|
||||
console.warn('[IntelligenceClient] Failed to save proposals to localStorage');
|
||||
} catch (e) {
|
||||
logger.warn('Failed to save proposals to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -759,8 +763,8 @@ function loadSnapshotsFromStorage(): IdentitySnapshot[] {
|
||||
if (stored) {
|
||||
return JSON.parse(stored) as IdentitySnapshot[];
|
||||
}
|
||||
} catch {
|
||||
console.warn('[IntelligenceClient] Failed to load snapshots from localStorage');
|
||||
} catch (e) {
|
||||
logger.warn('Failed to load snapshots from localStorage', { error: e });
|
||||
}
|
||||
return [];
|
||||
}
|
||||
@@ -768,8 +772,8 @@ function loadSnapshotsFromStorage(): IdentitySnapshot[] {
|
||||
function saveSnapshotsToStorage(snapshots: IdentitySnapshot[]): void {
|
||||
try {
|
||||
localStorage.setItem(SNAPSHOTS_STORAGE_KEY, JSON.stringify(snapshots));
|
||||
} catch {
|
||||
console.warn('[IntelligenceClient] Failed to save snapshots to localStorage');
|
||||
} catch (e) {
|
||||
logger.warn('Failed to save snapshots to localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -102,7 +102,7 @@ export function isJsonSerializable(value: unknown): boolean {
|
||||
try {
|
||||
JSON.stringify(value);
|
||||
return true;
|
||||
} catch {
|
||||
} catch (_e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
59
desktop/src/lib/kernel-a2a.ts
Normal file
59
desktop/src/lib/kernel-a2a.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* kernel-a2a.ts - Agent-to-Agent (A2A) methods for KernelClient
|
||||
*
|
||||
* Installed onto KernelClient.prototype via installA2aMethods().
|
||||
*/
|
||||
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import type { KernelClient } from './kernel-client';
|
||||
|
||||
export function installA2aMethods(ClientClass: { prototype: KernelClient }): void {
|
||||
const proto = ClientClass.prototype as any;
|
||||
|
||||
// ─── A2A (Agent-to-Agent) API ───
|
||||
|
||||
/**
|
||||
* Send a direct A2A message from one agent to another
|
||||
*/
|
||||
proto.a2aSend = async function (this: KernelClient, from: string, to: string, payload: unknown, messageType?: string): Promise<void> {
|
||||
await invoke('agent_a2a_send', {
|
||||
from,
|
||||
to,
|
||||
payload,
|
||||
messageType: messageType || 'notification',
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Broadcast a message from an agent to all other agents
|
||||
*/
|
||||
proto.a2aBroadcast = async function (this: KernelClient, from: string, payload: unknown): Promise<void> {
|
||||
await invoke('agent_a2a_broadcast', { from, payload });
|
||||
};
|
||||
|
||||
/**
|
||||
* Discover agents that have a specific capability
|
||||
*/
|
||||
proto.a2aDiscover = async function (this: KernelClient, capability: string): Promise<Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
capabilities: Array<{ name: string; description: string }>;
|
||||
role: string;
|
||||
priority: number;
|
||||
}>> {
|
||||
return await invoke('agent_a2a_discover', { capability });
|
||||
};
|
||||
|
||||
/**
|
||||
* Delegate a task to another agent and wait for response
|
||||
*/
|
||||
proto.a2aDelegateTask = async function (this: KernelClient, from: string, to: string, task: string, timeoutMs?: number): Promise<unknown> {
|
||||
return await invoke('agent_a2a_delegate_task', {
|
||||
from,
|
||||
to,
|
||||
task,
|
||||
timeoutMs: timeoutMs || 30000,
|
||||
});
|
||||
};
|
||||
}
|
||||
135
desktop/src/lib/kernel-agent.ts
Normal file
135
desktop/src/lib/kernel-agent.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
/**
|
||||
* kernel-agent.ts - Agent & Clone management methods for KernelClient
|
||||
*
|
||||
* Installed onto KernelClient.prototype via installAgentMethods().
|
||||
*/
|
||||
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import type { KernelClient } from './kernel-client';
|
||||
import type { AgentInfo, CreateAgentRequest, CreateAgentResponse } from './kernel-types';
|
||||
|
||||
export function installAgentMethods(ClientClass: { prototype: KernelClient }): void {
|
||||
const proto = ClientClass.prototype as any;
|
||||
|
||||
// ─── Agent Management ───
|
||||
|
||||
/**
|
||||
* List all agents
|
||||
*/
|
||||
proto.listAgents = async function (this: KernelClient): Promise<AgentInfo[]> {
|
||||
return invoke<AgentInfo[]>('agent_list');
|
||||
};
|
||||
|
||||
/**
|
||||
* Get agent by ID
|
||||
*/
|
||||
proto.getAgent = async function (this: KernelClient, agentId: string): Promise<AgentInfo | null> {
|
||||
return invoke<AgentInfo | null>('agent_get', { agentId });
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new agent
|
||||
*/
|
||||
proto.createAgent = async function (this: KernelClient, request: CreateAgentRequest): Promise<CreateAgentResponse> {
|
||||
return invoke<CreateAgentResponse>('agent_create', {
|
||||
request: {
|
||||
name: request.name,
|
||||
description: request.description,
|
||||
systemPrompt: request.systemPrompt,
|
||||
provider: request.provider || 'anthropic',
|
||||
model: request.model || 'claude-sonnet-4-20250514',
|
||||
maxTokens: request.maxTokens || 4096,
|
||||
temperature: request.temperature || 0.7,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete an agent
|
||||
*/
|
||||
proto.deleteAgent = async function (this: KernelClient, agentId: string): Promise<void> {
|
||||
return invoke('agent_delete', { agentId });
|
||||
};
|
||||
|
||||
// ─── Clone/Agent Adaptation (GatewayClient interface compatibility) ───
|
||||
|
||||
/**
|
||||
* List clones — maps to listAgents() with field adaptation
|
||||
*/
|
||||
proto.listClones = async function (this: KernelClient): Promise<{ clones: any[] }> {
|
||||
const agents = await this.listAgents();
|
||||
const clones = agents.map((agent) => ({
|
||||
id: agent.id,
|
||||
name: agent.name,
|
||||
role: agent.description,
|
||||
model: agent.model,
|
||||
createdAt: new Date().toISOString(),
|
||||
}));
|
||||
return { clones };
|
||||
};
|
||||
|
||||
/**
|
||||
* Create clone — maps to createAgent()
|
||||
*/
|
||||
proto.createClone = async function (this: KernelClient, opts: {
|
||||
name: string;
|
||||
role?: string;
|
||||
model?: string;
|
||||
personality?: string;
|
||||
communicationStyle?: string;
|
||||
[key: string]: unknown;
|
||||
}): Promise<{ clone: any }> {
|
||||
const response = await this.createAgent({
|
||||
name: opts.name,
|
||||
description: opts.role,
|
||||
model: opts.model,
|
||||
});
|
||||
const clone = {
|
||||
id: response.id,
|
||||
name: response.name,
|
||||
role: opts.role,
|
||||
model: opts.model,
|
||||
personality: opts.personality,
|
||||
communicationStyle: opts.communicationStyle,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
return { clone };
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete clone — maps to deleteAgent()
|
||||
*/
|
||||
proto.deleteClone = async function (this: KernelClient, id: string): Promise<void> {
|
||||
return this.deleteAgent(id);
|
||||
};
|
||||
|
||||
/**
|
||||
* Update clone — maps to kernel agent_update
|
||||
*/
|
||||
proto.updateClone = async function (this: KernelClient, id: string, updates: Record<string, unknown>): Promise<{ clone: unknown }> {
|
||||
await invoke('agent_update', {
|
||||
agentId: id,
|
||||
updates: {
|
||||
name: updates.name as string | undefined,
|
||||
description: updates.description as string | undefined,
|
||||
systemPrompt: updates.systemPrompt as string | undefined,
|
||||
model: updates.model as string | undefined,
|
||||
provider: updates.provider as string | undefined,
|
||||
maxTokens: updates.maxTokens as number | undefined,
|
||||
temperature: updates.temperature as number | undefined,
|
||||
},
|
||||
});
|
||||
|
||||
// Return updated clone representation
|
||||
const clone = {
|
||||
id,
|
||||
name: updates.name,
|
||||
role: updates.description || updates.role,
|
||||
model: updates.model,
|
||||
personality: updates.personality,
|
||||
communicationStyle: updates.communicationStyle,
|
||||
systemPrompt: updates.systemPrompt,
|
||||
};
|
||||
return { clone };
|
||||
};
|
||||
}
|
||||
202
desktop/src/lib/kernel-chat.ts
Normal file
202
desktop/src/lib/kernel-chat.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
/**
|
||||
* kernel-chat.ts - Chat & streaming methods for KernelClient
|
||||
*
|
||||
* Installed onto KernelClient.prototype via installChatMethods().
|
||||
*/
|
||||
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import { listen, type UnlistenFn } from '@tauri-apps/api/event';
|
||||
import { createLogger } from './logger';
|
||||
import type { KernelClient } from './kernel-client';
|
||||
import type { ChatResponse, StreamCallbacks, StreamChunkPayload } from './kernel-types';
|
||||
|
||||
const log = createLogger('KernelClient');
|
||||
|
||||
export function installChatMethods(ClientClass: { prototype: KernelClient }): void {
|
||||
const proto = ClientClass.prototype as any;
|
||||
|
||||
/**
|
||||
* Send a message and get a response
|
||||
*/
|
||||
proto.chat = async function (
|
||||
this: KernelClient,
|
||||
message: string,
|
||||
opts?: {
|
||||
sessionKey?: string;
|
||||
agentId?: string;
|
||||
}
|
||||
): Promise<{ runId: string; sessionId?: string; response?: string }> {
|
||||
const agentId = opts?.agentId || this.getDefaultAgentId();
|
||||
|
||||
if (!agentId) {
|
||||
throw new Error('No agent available');
|
||||
}
|
||||
|
||||
const response = await invoke<ChatResponse>('agent_chat', {
|
||||
request: {
|
||||
agentId,
|
||||
message,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
runId: `run_${Date.now()}`,
|
||||
sessionId: opts?.sessionKey,
|
||||
response: response.content,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Send a message with streaming response via Tauri events
|
||||
*/
|
||||
proto.chatStream = async function (
|
||||
this: KernelClient,
|
||||
message: string,
|
||||
callbacks: StreamCallbacks,
|
||||
opts?: {
|
||||
sessionKey?: string;
|
||||
agentId?: string;
|
||||
}
|
||||
): Promise<{ runId: string }> {
|
||||
const runId = crypto.randomUUID();
|
||||
const sessionId = opts?.sessionKey || runId;
|
||||
const agentId = opts?.agentId || this.getDefaultAgentId();
|
||||
|
||||
if (!agentId) {
|
||||
callbacks.onError('No agent available');
|
||||
return { runId };
|
||||
}
|
||||
|
||||
let unlisten: UnlistenFn | null = null;
|
||||
|
||||
try {
|
||||
// Set up event listener for stream chunks
|
||||
unlisten = await listen<StreamChunkPayload>('stream:chunk', (event) => {
|
||||
const payload = event.payload;
|
||||
|
||||
// Only process events for this session
|
||||
if (payload.sessionId !== sessionId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const streamEvent = payload.event;
|
||||
|
||||
switch (streamEvent.type) {
|
||||
case 'delta':
|
||||
callbacks.onDelta(streamEvent.delta);
|
||||
break;
|
||||
|
||||
case 'tool_start':
|
||||
log.debug('Tool started:', streamEvent.name, streamEvent.input);
|
||||
if (callbacks.onTool) {
|
||||
callbacks.onTool(
|
||||
streamEvent.name,
|
||||
JSON.stringify(streamEvent.input),
|
||||
''
|
||||
);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'tool_end':
|
||||
log.debug('Tool ended:', streamEvent.name, streamEvent.output);
|
||||
if (callbacks.onTool) {
|
||||
callbacks.onTool(
|
||||
streamEvent.name,
|
||||
'',
|
||||
JSON.stringify(streamEvent.output)
|
||||
);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'handStart':
|
||||
log.debug('Hand started:', streamEvent.name, streamEvent.params);
|
||||
if (callbacks.onHand) {
|
||||
callbacks.onHand(streamEvent.name, 'running', undefined);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'handEnd':
|
||||
log.debug('Hand ended:', streamEvent.name, streamEvent.result);
|
||||
if (callbacks.onHand) {
|
||||
callbacks.onHand(streamEvent.name, 'completed', streamEvent.result);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'iteration_start':
|
||||
log.debug('Iteration started:', streamEvent.iteration, '/', streamEvent.maxIterations);
|
||||
// Don't need to notify user about iterations
|
||||
break;
|
||||
|
||||
case 'complete':
|
||||
log.debug('Stream complete:', streamEvent.inputTokens, streamEvent.outputTokens);
|
||||
callbacks.onComplete(streamEvent.inputTokens, streamEvent.outputTokens);
|
||||
// Clean up listener
|
||||
if (unlisten) {
|
||||
unlisten();
|
||||
unlisten = null;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'error':
|
||||
log.error('Stream error:', streamEvent.message);
|
||||
callbacks.onError(streamEvent.message);
|
||||
// Clean up listener
|
||||
if (unlisten) {
|
||||
unlisten();
|
||||
unlisten = null;
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
// Invoke the streaming command
|
||||
await invoke('agent_chat_stream', {
|
||||
request: {
|
||||
agentId,
|
||||
sessionId,
|
||||
message,
|
||||
},
|
||||
});
|
||||
} catch (err: unknown) {
|
||||
const errorMessage = err instanceof Error ? err.message : String(err);
|
||||
callbacks.onError(errorMessage);
|
||||
|
||||
// Clean up listener on error
|
||||
if (unlisten) {
|
||||
unlisten();
|
||||
}
|
||||
}
|
||||
|
||||
return { runId };
|
||||
};
|
||||
|
||||
/**
|
||||
* Cancel a stream (no-op for internal kernel)
|
||||
*/
|
||||
proto.cancelStream = function (this: KernelClient, _runId: string): void {
|
||||
// No-op: internal kernel doesn't support stream cancellation
|
||||
};
|
||||
|
||||
// ─── Default Agent ───
|
||||
|
||||
/**
|
||||
* Fetch default agent ID (returns current default)
|
||||
*/
|
||||
proto.fetchDefaultAgentId = async function (this: KernelClient): Promise<string | null> {
|
||||
return this.getDefaultAgentId();
|
||||
};
|
||||
|
||||
/**
|
||||
* Set default agent ID
|
||||
*/
|
||||
proto.setDefaultAgentId = function (this: KernelClient, agentId: string): void {
|
||||
(this as any).defaultAgentId = agentId;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get default agent ID
|
||||
*/
|
||||
proto.getDefaultAgentId = function (this: KernelClient): string {
|
||||
return (this as any).defaultAgentId || '';
|
||||
};
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
174
desktop/src/lib/kernel-hands.ts
Normal file
174
desktop/src/lib/kernel-hands.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
/**
|
||||
* kernel-hands.ts - Hands API methods for KernelClient
|
||||
*
|
||||
* Installed onto KernelClient.prototype via installHandMethods().
|
||||
*/
|
||||
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import type { KernelClient } from './kernel-client';
|
||||
|
||||
export function installHandMethods(ClientClass: { prototype: KernelClient }): void {
|
||||
const proto = ClientClass.prototype as any;
|
||||
|
||||
// ─── Hands API ───
|
||||
|
||||
/**
|
||||
* List all available hands
|
||||
*/
|
||||
proto.listHands = async function (this: KernelClient): Promise<{
|
||||
hands: {
|
||||
id?: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
status?: string;
|
||||
requirements_met?: boolean;
|
||||
category?: string;
|
||||
icon?: string;
|
||||
tool_count?: number;
|
||||
tools?: string[];
|
||||
metric_count?: number;
|
||||
metrics?: string[];
|
||||
}[]
|
||||
}> {
|
||||
const hands = await invoke<Array<{
|
||||
id?: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
status?: string;
|
||||
requirements_met?: boolean;
|
||||
category?: string;
|
||||
icon?: string;
|
||||
tool_count?: number;
|
||||
tools?: string[];
|
||||
metric_count?: number;
|
||||
metrics?: string[];
|
||||
}>>('hand_list');
|
||||
return { hands: hands || [] };
|
||||
};
|
||||
|
||||
/**
|
||||
* Get hand details
|
||||
*/
|
||||
proto.getHand = async function (this: KernelClient, name: string): Promise<{
|
||||
id?: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
status?: string;
|
||||
requirements_met?: boolean;
|
||||
category?: string;
|
||||
icon?: string;
|
||||
provider?: string;
|
||||
model?: string;
|
||||
requirements?: { description?: string; name?: string; met?: boolean; satisfied?: boolean; details?: string; hint?: string }[];
|
||||
tools?: string[];
|
||||
metrics?: string[];
|
||||
config?: Record<string, unknown>;
|
||||
tool_count?: number;
|
||||
metric_count?: number;
|
||||
}> {
|
||||
try {
|
||||
return await invoke('hand_get', { name });
|
||||
} catch (e) {
|
||||
const { createLogger } = await import('./logger');
|
||||
createLogger('KernelHands').debug('hand_get failed', { name, error: e });
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Trigger/execute a hand
|
||||
*/
|
||||
proto.triggerHand = async function (this: KernelClient, name: string, params?: Record<string, unknown>, autonomyLevel?: string): Promise<{ runId: string; status: string }> {
|
||||
const result = await invoke<{ instance_id: string; status: string }>('hand_execute', {
|
||||
id: name,
|
||||
input: params || {},
|
||||
...(autonomyLevel ? { autonomyLevel } : {}),
|
||||
});
|
||||
return { runId: result.instance_id, status: result.status };
|
||||
};
|
||||
|
||||
/**
|
||||
* Get hand run status
|
||||
*/
|
||||
proto.getHandStatus = async function (this: KernelClient, name: string, runId: string): Promise<{ status: string; result?: unknown }> {
|
||||
try {
|
||||
return await invoke('hand_run_status', { handName: name, runId });
|
||||
} catch (e) {
|
||||
const { createLogger } = await import('./logger');
|
||||
createLogger('KernelHands').debug('hand_run_status failed', { name, runId, error: e });
|
||||
return { status: 'unknown' };
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Approve a hand execution
|
||||
*/
|
||||
proto.approveHand = async function (this: KernelClient, name: string, runId: string, approved: boolean, reason?: string): Promise<{ status: string }> {
|
||||
return await invoke('hand_approve', { handName: name, runId, approved, reason });
|
||||
};
|
||||
|
||||
/**
|
||||
* Cancel a hand execution
|
||||
*/
|
||||
proto.cancelHand = async function (this: KernelClient, name: string, runId: string): Promise<{ status: string }> {
|
||||
return await invoke('hand_cancel', { handName: name, runId });
|
||||
};
|
||||
|
||||
/**
|
||||
* List hand runs (execution history)
|
||||
*/
|
||||
proto.listHandRuns = async function (this: KernelClient, name: string, opts?: { limit?: number; offset?: number }): Promise<{
|
||||
runs: {
|
||||
runId?: string;
|
||||
run_id?: string;
|
||||
id?: string;
|
||||
status?: string;
|
||||
startedAt?: string;
|
||||
started_at?: string;
|
||||
completedAt?: string;
|
||||
completed_at?: string;
|
||||
result?: unknown;
|
||||
error?: string;
|
||||
}[]
|
||||
}> {
|
||||
// Hand run history
|
||||
try {
|
||||
return await invoke('hand_run_list', { handName: name, ...opts });
|
||||
} catch (e) {
|
||||
const { createLogger } = await import('./logger');
|
||||
createLogger('KernelHands').debug('hand_run_list failed', { name, error: e });
|
||||
return { runs: [] };
|
||||
}
|
||||
};
|
||||
|
||||
// ─── Approvals API ───
|
||||
|
||||
proto.listApprovals = async function (this: KernelClient, _status?: string): Promise<{
|
||||
approvals: Array<{
|
||||
id: string;
|
||||
handId: string;
|
||||
status: string;
|
||||
createdAt: string;
|
||||
input: Record<string, unknown>;
|
||||
}>
|
||||
}> {
|
||||
try {
|
||||
const approvals = await invoke<Array<{
|
||||
id: string;
|
||||
handId: string;
|
||||
status: string;
|
||||
createdAt: string;
|
||||
input: Record<string, unknown>;
|
||||
}>>('approval_list');
|
||||
return { approvals };
|
||||
} catch (error) {
|
||||
const { createLogger } = await import('./logger');
|
||||
createLogger('KernelClient').error('listApprovals error:', error);
|
||||
return { approvals: [] };
|
||||
}
|
||||
};
|
||||
|
||||
proto.respondToApproval = async function (this: KernelClient, approvalId: string, approved: boolean, reason?: string): Promise<void> {
|
||||
return invoke('approval_respond', { id: approvalId, approved, reason });
|
||||
};
|
||||
}
|
||||
116
desktop/src/lib/kernel-skills.ts
Normal file
116
desktop/src/lib/kernel-skills.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
/**
|
||||
* kernel-skills.ts - Skills API methods for KernelClient
|
||||
*
|
||||
* Installed onto KernelClient.prototype via installSkillMethods().
|
||||
*/
|
||||
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import type { KernelClient } from './kernel-client';
|
||||
|
||||
/** Skill shape returned by list/refresh/create/update operations. */
|
||||
type SkillItem = {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
version: string;
|
||||
capabilities: string[];
|
||||
tags: string[];
|
||||
mode: string;
|
||||
enabled: boolean;
|
||||
triggers: string[];
|
||||
category?: string;
|
||||
};
|
||||
|
||||
/** Skill list container shared by list/refresh responses. */
|
||||
type SkillListResult = { skills: SkillItem[] };
|
||||
|
||||
export function installSkillMethods(ClientClass: { prototype: KernelClient }): void {
|
||||
const proto = ClientClass.prototype as any;
|
||||
|
||||
// ─── Skills API ───
|
||||
|
||||
/**
|
||||
* List all discovered skills
|
||||
*/
|
||||
proto.listSkills = async function (this: KernelClient): Promise<SkillListResult> {
|
||||
const skills = await invoke<SkillItem[]>('skill_list');
|
||||
return { skills: skills || [] };
|
||||
};
|
||||
|
||||
/**
|
||||
* Refresh skills from directory
|
||||
*/
|
||||
proto.refreshSkills = async function (this: KernelClient, skillDir?: string): Promise<SkillListResult> {
|
||||
const skills = await invoke<SkillItem[]>('skill_refresh', { skillDir: skillDir || null });
|
||||
return { skills: skills || [] };
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new skill
|
||||
*/
|
||||
proto.createSkill = async function (this: KernelClient, skill: {
|
||||
name: string;
|
||||
description?: string;
|
||||
triggers: Array<{ type: string; pattern?: string }>;
|
||||
actions: Array<{ type: string; params?: Record<string, unknown> }>;
|
||||
enabled?: boolean;
|
||||
}): Promise<{ skill?: SkillItem }> {
|
||||
const result = await invoke<SkillItem>('skill_create', {
|
||||
request: {
|
||||
name: skill.name,
|
||||
description: skill.description,
|
||||
triggers: skill.triggers.map(t => t.pattern || t.type),
|
||||
actions: skill.actions.map(a => a.type),
|
||||
enabled: skill.enabled,
|
||||
},
|
||||
});
|
||||
return { skill: result };
|
||||
};
|
||||
|
||||
/**
|
||||
* Update an existing skill
|
||||
*/
|
||||
proto.updateSkill = async function (this: KernelClient, id: string, updates: {
|
||||
name?: string;
|
||||
description?: string;
|
||||
triggers?: Array<{ type: string; pattern?: string }>;
|
||||
actions?: Array<{ type: string; params?: Record<string, unknown> }>;
|
||||
enabled?: boolean;
|
||||
}): Promise<{ skill?: SkillItem }> {
|
||||
const result = await invoke<SkillItem>('skill_update', {
|
||||
id,
|
||||
request: {
|
||||
name: updates.name,
|
||||
description: updates.description,
|
||||
triggers: updates.triggers?.map(t => t.pattern || t.type),
|
||||
actions: updates.actions?.map(a => a.type),
|
||||
enabled: updates.enabled,
|
||||
},
|
||||
});
|
||||
return { skill: result };
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a skill
|
||||
*/
|
||||
proto.deleteSkill = async function (this: KernelClient, id: string): Promise<void> {
|
||||
await invoke('skill_delete', { id });
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute a skill by ID with optional input parameters.
|
||||
* Checks autonomy level before execution.
|
||||
*/
|
||||
proto.executeSkill = async function (this: KernelClient, id: string, input?: Record<string, unknown>): Promise<{
|
||||
success: boolean;
|
||||
output?: unknown;
|
||||
error?: string;
|
||||
durationMs?: number;
|
||||
}> {
|
||||
return invoke('skill_execute', {
|
||||
id,
|
||||
context: {},
|
||||
input: input || {},
|
||||
});
|
||||
};
|
||||
}
|
||||
131
desktop/src/lib/kernel-triggers.ts
Normal file
131
desktop/src/lib/kernel-triggers.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
/**
|
||||
* kernel-triggers.ts - Triggers API methods for KernelClient
|
||||
*
|
||||
* Installed onto KernelClient.prototype via installTriggerMethods().
|
||||
*/
|
||||
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import type { KernelClient } from './kernel-client';
|
||||
|
||||
/** Trigger shape shared across trigger operations. */
|
||||
type TriggerItem = {
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: string;
|
||||
enabled: boolean;
|
||||
createdAt: string;
|
||||
modifiedAt: string;
|
||||
description?: string;
|
||||
tags: string[];
|
||||
};
|
||||
|
||||
/** Trigger type definition for create/update operations. */
|
||||
type TriggerTypeSpec = {
|
||||
type: string;
|
||||
cron?: string;
|
||||
pattern?: string;
|
||||
path?: string;
|
||||
secret?: string;
|
||||
events?: string[];
|
||||
};
|
||||
|
||||
export function installTriggerMethods(ClientClass: { prototype: KernelClient }): void {
|
||||
const proto = ClientClass.prototype as any;
|
||||
|
||||
// ─── Triggers API ───
|
||||
|
||||
/**
|
||||
* List all triggers
|
||||
* Returns empty array on error for graceful degradation
|
||||
*/
|
||||
proto.listTriggers = async function (this: KernelClient): Promise<{
|
||||
triggers?: TriggerItem[]
|
||||
}> {
|
||||
try {
|
||||
const triggers = await invoke<TriggerItem[]>('trigger_list');
|
||||
return { triggers };
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] listTriggers failed: ${this.formatError(error)}`);
|
||||
return { triggers: [] };
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a single trigger by ID
|
||||
* Returns null on error for graceful degradation
|
||||
*/
|
||||
proto.getTrigger = async function (this: KernelClient, id: string): Promise<TriggerItem | null> {
|
||||
try {
|
||||
return await invoke<TriggerItem | null>('trigger_get', { id });
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] getTrigger(${id}) failed: ${this.formatError(error)}`);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new trigger
|
||||
* Returns null on error for graceful degradation
|
||||
*/
|
||||
proto.createTrigger = async function (this: KernelClient, trigger: {
|
||||
id: string;
|
||||
name: string;
|
||||
handId: string;
|
||||
triggerType: TriggerTypeSpec;
|
||||
enabled?: boolean;
|
||||
description?: string;
|
||||
tags?: string[];
|
||||
}): Promise<TriggerItem | null> {
|
||||
try {
|
||||
return await invoke<TriggerItem>('trigger_create', { request: trigger });
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] createTrigger(${trigger.id}) failed: ${this.formatError(error)}`);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Update an existing trigger
|
||||
* Throws on error as this is a mutation operation that callers need to handle
|
||||
*/
|
||||
proto.updateTrigger = async function (this: KernelClient, id: string, updates: {
|
||||
name?: string;
|
||||
enabled?: boolean;
|
||||
handId?: string;
|
||||
triggerType?: TriggerTypeSpec;
|
||||
}): Promise<TriggerItem> {
|
||||
try {
|
||||
return await invoke<TriggerItem>('trigger_update', { id, updates });
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] updateTrigger(${id}) failed: ${this.formatError(error)}`);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a trigger
|
||||
* Throws on error as this is a destructive operation that callers need to handle
|
||||
*/
|
||||
proto.deleteTrigger = async function (this: KernelClient, id: string): Promise<void> {
|
||||
try {
|
||||
await invoke('trigger_delete', { id });
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] deleteTrigger(${id}) failed: ${this.formatError(error)}`);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute a trigger
|
||||
* Throws on error as callers need to know if execution failed
|
||||
*/
|
||||
proto.executeTrigger = async function (this: KernelClient, id: string, input?: Record<string, unknown>): Promise<Record<string, unknown>> {
|
||||
try {
|
||||
return await invoke<Record<string, unknown>>('trigger_execute', { id, input: input || {} });
|
||||
} catch (error) {
|
||||
this.log('error', `[TriggersAPI] executeTrigger(${id}) failed: ${this.formatError(error)}`);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
}
|
||||
138
desktop/src/lib/kernel-types.ts
Normal file
138
desktop/src/lib/kernel-types.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
/**
|
||||
* kernel-types.ts - Shared types for the Kernel Client subsystem
|
||||
*
|
||||
* Extracted from kernel-client.ts for modularity.
|
||||
* All type/interface definitions used across kernel-client and its mixin modules.
|
||||
*/
|
||||
|
||||
// === Connection & Status Types ===
|
||||
|
||||
export type ConnectionState = 'disconnected' | 'connecting' | 'connected' | 'reconnecting';
|
||||
|
||||
export interface KernelStatus {
|
||||
initialized: boolean;
|
||||
agentCount: number;
|
||||
databaseUrl: string | null;
|
||||
defaultProvider: string | null;
|
||||
defaultModel: string | null;
|
||||
}
|
||||
|
||||
// === Agent Types ===
|
||||
|
||||
export interface AgentInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
state: string;
|
||||
model?: string;
|
||||
provider?: string;
|
||||
}
|
||||
|
||||
export interface CreateAgentRequest {
|
||||
name: string;
|
||||
description?: string;
|
||||
systemPrompt?: string;
|
||||
provider?: string;
|
||||
model?: string;
|
||||
maxTokens?: number;
|
||||
temperature?: number;
|
||||
}
|
||||
|
||||
export interface CreateAgentResponse {
|
||||
id: string;
|
||||
name: string;
|
||||
state: string;
|
||||
}
|
||||
|
||||
// === Chat Types ===
|
||||
|
||||
export interface ChatResponse {
|
||||
content: string;
|
||||
inputTokens: number;
|
||||
outputTokens: number;
|
||||
}
|
||||
|
||||
export interface EventCallback {
|
||||
(payload: unknown): void;
|
||||
}
|
||||
|
||||
export interface StreamCallbacks {
|
||||
onDelta: (delta: string) => void;
|
||||
onTool?: (tool: string, input: string, output: string) => void;
|
||||
onHand?: (name: string, status: string, result?: unknown) => void;
|
||||
onComplete: (inputTokens?: number, outputTokens?: number) => void;
|
||||
onError: (error: string) => void;
|
||||
}
|
||||
|
||||
// === Streaming Types (match Rust StreamChatEvent) ===
|
||||
|
||||
export interface StreamEventDelta {
|
||||
type: 'delta';
|
||||
delta: string;
|
||||
}
|
||||
|
||||
export interface StreamEventToolStart {
|
||||
type: 'tool_start';
|
||||
name: string;
|
||||
input: unknown;
|
||||
}
|
||||
|
||||
export interface StreamEventToolEnd {
|
||||
type: 'tool_end';
|
||||
name: string;
|
||||
output: unknown;
|
||||
}
|
||||
|
||||
export interface StreamEventIterationStart {
|
||||
type: 'iteration_start';
|
||||
iteration: number;
|
||||
maxIterations: number;
|
||||
}
|
||||
|
||||
export interface StreamEventComplete {
|
||||
type: 'complete';
|
||||
inputTokens: number;
|
||||
outputTokens: number;
|
||||
}
|
||||
|
||||
export interface StreamEventError {
|
||||
type: 'error';
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface StreamEventHandStart {
|
||||
type: 'handStart';
|
||||
name: string;
|
||||
params: unknown;
|
||||
}
|
||||
|
||||
export interface StreamEventHandEnd {
|
||||
type: 'handEnd';
|
||||
name: string;
|
||||
result: unknown;
|
||||
}
|
||||
|
||||
export type StreamChatEvent =
|
||||
| StreamEventDelta
|
||||
| StreamEventToolStart
|
||||
| StreamEventToolEnd
|
||||
| StreamEventIterationStart
|
||||
| StreamEventHandStart
|
||||
| StreamEventHandEnd
|
||||
| StreamEventComplete
|
||||
| StreamEventError;
|
||||
|
||||
export interface StreamChunkPayload {
|
||||
sessionId: string;
|
||||
event: StreamChatEvent;
|
||||
}
|
||||
|
||||
// === Config Types ===
|
||||
|
||||
export interface KernelConfig {
|
||||
provider?: string;
|
||||
model?: string;
|
||||
apiKey?: string;
|
||||
baseUrl?: string;
|
||||
apiProtocol?: string; // openai, anthropic, custom
|
||||
}
|
||||
@@ -488,7 +488,9 @@ class SaasLLMAdapter implements LLMServiceAdapter {
|
||||
result.tokensUsed.output,
|
||||
{ latencyMs, success: true, connectionMode: 'saas' },
|
||||
);
|
||||
} catch { /* non-blocking */ }
|
||||
} catch (e) {
|
||||
log.debug('Failed to record LLM telemetry', { error: e });
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -500,7 +502,8 @@ class SaasLLMAdapter implements LLMServiceAdapter {
|
||||
const mode = localStorage.getItem('zclaw-connection-mode');
|
||||
const saasUrl = localStorage.getItem('zclaw-saas-url');
|
||||
return mode === 'saas' && !!saasUrl;
|
||||
} catch {
|
||||
} catch (e) {
|
||||
log.debug('Failed to check SaaS adapter availability', { error: e });
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -556,8 +559,8 @@ export function loadConfig(): LLMConfig {
|
||||
if (saved) {
|
||||
return JSON.parse(saved);
|
||||
}
|
||||
} catch {
|
||||
// Ignore parse errors
|
||||
} catch (e) {
|
||||
log.debug('Failed to parse LLM config', { error: e });
|
||||
}
|
||||
|
||||
// Default to gateway (ZCLAW passthrough) for L4 self-evolution
|
||||
@@ -661,7 +664,8 @@ function loadPromptCache(): Record<string, CachedPrompt> {
|
||||
try {
|
||||
const raw = localStorage.getItem(PROMPT_CACHE_KEY);
|
||||
return raw ? JSON.parse(raw) : {};
|
||||
} catch {
|
||||
} catch (e) {
|
||||
log.debug('Failed to parse prompt cache', { error: e });
|
||||
return {};
|
||||
}
|
||||
}
|
||||
@@ -827,8 +831,8 @@ function trackLLMCall(
|
||||
connectionMode: adapter.getProvider() === 'saas' ? 'saas' : 'tauri',
|
||||
},
|
||||
);
|
||||
} catch {
|
||||
// telemetry-collector may not be available (e.g., SSR)
|
||||
} catch (e) {
|
||||
log.debug('Telemetry recording failed (SSR or unavailable)', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -201,7 +201,8 @@ export class MemoryExtractor {
|
||||
conversation_id: conversationId,
|
||||
});
|
||||
saved++;
|
||||
} catch {
|
||||
} catch (e) {
|
||||
log.debug('Failed to save memory item', { error: e });
|
||||
skipped++;
|
||||
}
|
||||
}
|
||||
@@ -406,8 +407,8 @@ export class MemoryExtractor {
|
||||
importance: Math.max(1, Math.min(10, Number(item.importance))),
|
||||
tags: Array.isArray(item.tags) ? item.tags.map(String) : [],
|
||||
}));
|
||||
} catch {
|
||||
log.warn('Failed to parse LLM extraction response');
|
||||
} catch (e) {
|
||||
log.warn('Failed to parse LLM extraction response', { error: e });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,7 +151,8 @@ export async function requestWithRetry(
|
||||
// Try to read response body for error details
|
||||
try {
|
||||
responseBody = await response.text();
|
||||
} catch {
|
||||
} catch (e) {
|
||||
log.debug('Failed to read response body', { error: e });
|
||||
responseBody = '';
|
||||
}
|
||||
|
||||
|
||||
233
desktop/src/lib/saas-admin.ts
Normal file
233
desktop/src/lib/saas-admin.ts
Normal file
@@ -0,0 +1,233 @@
|
||||
/**
|
||||
* SaaS Admin Methods — Mixin
|
||||
*
|
||||
* Installs admin panel API methods onto SaaSClient.prototype.
|
||||
* Uses the same mixin pattern as gateway-api.ts.
|
||||
*
|
||||
* Reserved for future admin UI (Next.js admin dashboard).
|
||||
* These methods are not called by the desktop app but are kept as thin API
|
||||
* wrappers for when the admin panel is built.
|
||||
*/
|
||||
|
||||
import type {
|
||||
ProviderInfo,
|
||||
CreateProviderRequest,
|
||||
UpdateProviderRequest,
|
||||
ModelInfo,
|
||||
CreateModelRequest,
|
||||
UpdateModelRequest,
|
||||
AccountApiKeyInfo,
|
||||
CreateApiKeyRequest,
|
||||
AccountPublic,
|
||||
UpdateAccountRequest,
|
||||
PaginatedResponse,
|
||||
TokenInfo,
|
||||
CreateTokenRequest,
|
||||
OperationLogInfo,
|
||||
DashboardStats,
|
||||
RoleInfo,
|
||||
CreateRoleRequest,
|
||||
UpdateRoleRequest,
|
||||
PermissionTemplate,
|
||||
CreateTemplateRequest,
|
||||
} from './saas-types';
|
||||
|
||||
export function installAdminMethods(ClientClass: { prototype: any }): void {
|
||||
const proto = ClientClass.prototype;
|
||||
|
||||
// --- Provider Management (Admin) ---
|
||||
|
||||
/** List all providers */
|
||||
proto.listProviders = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }): Promise<ProviderInfo[]> {
|
||||
return this.request<ProviderInfo[]>('GET', '/api/v1/providers');
|
||||
};
|
||||
|
||||
/** Get provider by ID */
|
||||
proto.getProvider = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<ProviderInfo> {
|
||||
return this.request<ProviderInfo>('GET', `/api/v1/providers/${id}`);
|
||||
};
|
||||
|
||||
/** Create a new provider (admin only) */
|
||||
proto.createProvider = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, data: CreateProviderRequest): Promise<ProviderInfo> {
|
||||
return this.request<ProviderInfo>('POST', '/api/v1/providers', data);
|
||||
};
|
||||
|
||||
/** Update a provider (admin only) */
|
||||
proto.updateProvider = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string, data: UpdateProviderRequest): Promise<ProviderInfo> {
|
||||
return this.request<ProviderInfo>('PATCH', `/api/v1/providers/${id}`, data);
|
||||
};
|
||||
|
||||
/** Delete a provider (admin only) */
|
||||
proto.deleteProvider = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<void> {
|
||||
await this.request<void>('DELETE', `/api/v1/providers/${id}`);
|
||||
};
|
||||
|
||||
// --- Model Management (Admin) ---
|
||||
|
||||
/** List models, optionally filtered by provider */
|
||||
proto.listModelsAdmin = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, providerId?: string): Promise<ModelInfo[]> {
|
||||
const qs = providerId ? `?provider_id=${encodeURIComponent(providerId)}` : '';
|
||||
return this.request<ModelInfo[]>('GET', `/api/v1/models${qs}`);
|
||||
};
|
||||
|
||||
/** Get model by ID */
|
||||
proto.getModel = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<ModelInfo> {
|
||||
return this.request<ModelInfo>('GET', `/api/v1/models/${id}`);
|
||||
};
|
||||
|
||||
/** Create a new model (admin only) */
|
||||
proto.createModel = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, data: CreateModelRequest): Promise<ModelInfo> {
|
||||
return this.request<ModelInfo>('POST', '/api/v1/models', data);
|
||||
};
|
||||
|
||||
/** Update a model (admin only) */
|
||||
proto.updateModel = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string, data: UpdateModelRequest): Promise<ModelInfo> {
|
||||
return this.request<ModelInfo>('PATCH', `/api/v1/models/${id}`, data);
|
||||
};
|
||||
|
||||
/** Delete a model (admin only) */
|
||||
proto.deleteModel = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<void> {
|
||||
await this.request<void>('DELETE', `/api/v1/models/${id}`);
|
||||
};
|
||||
|
||||
// --- Account API Keys ---
|
||||
|
||||
/** List account's API keys */
|
||||
proto.listApiKeys = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, providerId?: string): Promise<AccountApiKeyInfo[]> {
|
||||
const qs = providerId ? `?provider_id=${encodeURIComponent(providerId)}` : '';
|
||||
return this.request<AccountApiKeyInfo[]>('GET', `/api/v1/keys${qs}`);
|
||||
};
|
||||
|
||||
/** Create a new API key */
|
||||
proto.createApiKey = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, data: CreateApiKeyRequest): Promise<AccountApiKeyInfo> {
|
||||
return this.request<AccountApiKeyInfo>('POST', '/api/v1/keys', data);
|
||||
};
|
||||
|
||||
/** Rotate an API key */
|
||||
proto.rotateApiKey = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string, newKeyValue: string): Promise<void> {
|
||||
await this.request<void>('POST', `/api/v1/keys/${id}/rotate`, { new_key_value: newKeyValue });
|
||||
};
|
||||
|
||||
/** Revoke an API key */
|
||||
proto.revokeApiKey = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<void> {
|
||||
await this.request<void>('DELETE', `/api/v1/keys/${id}`);
|
||||
};
|
||||
|
||||
// --- Account Management (Admin) ---
|
||||
|
||||
/** List all accounts (admin only) */
|
||||
proto.listAccounts = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, params?: { page?: number; page_size?: number; role?: string; status?: string; search?: string }): Promise<PaginatedResponse<AccountPublic>> {
|
||||
const qs = new URLSearchParams();
|
||||
if (params?.page) qs.set('page', String(params.page));
|
||||
if (params?.page_size) qs.set('page_size', String(params.page_size));
|
||||
if (params?.role) qs.set('role', params.role);
|
||||
if (params?.status) qs.set('status', params.status);
|
||||
if (params?.search) qs.set('search', params.search);
|
||||
const query = qs.toString();
|
||||
return this.request<PaginatedResponse<AccountPublic>>('GET', `/api/v1/accounts${query ? '?' + query : ''}`);
|
||||
};
|
||||
|
||||
/** Get account by ID (admin or self) */
|
||||
proto.getAccount = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<AccountPublic> {
|
||||
return this.request<AccountPublic>('GET', `/api/v1/accounts/${id}`);
|
||||
};
|
||||
|
||||
/** Update account (admin or self) */
|
||||
proto.updateAccount = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string, data: UpdateAccountRequest): Promise<AccountPublic> {
|
||||
return this.request<AccountPublic>('PATCH', `/api/v1/accounts/${id}`, data);
|
||||
};
|
||||
|
||||
/** Update account status (admin only) */
|
||||
proto.updateAccountStatus = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string, status: 'active' | 'disabled' | 'suspended'): Promise<void> {
|
||||
await this.request<void>('PATCH', `/api/v1/accounts/${id}/status`, { status });
|
||||
};
|
||||
|
||||
// --- API Token Management ---
|
||||
|
||||
/** List API tokens for current account */
|
||||
proto.listTokens = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }): Promise<TokenInfo[]> {
|
||||
return this.request<TokenInfo[]>('GET', '/api/v1/tokens');
|
||||
};
|
||||
|
||||
/** Create a new API token */
|
||||
proto.createToken = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, data: CreateTokenRequest): Promise<TokenInfo> {
|
||||
return this.request<TokenInfo>('POST', '/api/v1/tokens', data);
|
||||
};
|
||||
|
||||
/** Revoke an API token */
|
||||
proto.revokeToken = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<void> {
|
||||
await this.request<void>('DELETE', `/api/v1/tokens/${id}`);
|
||||
};
|
||||
|
||||
// --- Operation Logs (Admin) ---
|
||||
|
||||
/** List operation logs (admin only) */
|
||||
proto.listOperationLogs = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, params?: { page?: number; page_size?: number }): Promise<OperationLogInfo[]> {
|
||||
const qs = new URLSearchParams();
|
||||
if (params?.page) qs.set('page', String(params.page));
|
||||
if (params?.page_size) qs.set('page_size', String(params.page_size));
|
||||
const query = qs.toString();
|
||||
return this.request<OperationLogInfo[]>('GET', `/api/v1/logs/operations${query ? '?' + query : ''}`);
|
||||
};
|
||||
|
||||
// --- Dashboard Statistics (Admin) ---
|
||||
|
||||
/** Get dashboard statistics (admin only) */
|
||||
proto.getDashboardStats = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }): Promise<DashboardStats> {
|
||||
return this.request<DashboardStats>('GET', '/api/v1/stats/dashboard');
|
||||
};
|
||||
|
||||
// --- Role Management (Admin) ---
|
||||
|
||||
/** List all roles */
|
||||
proto.listRoles = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }): Promise<RoleInfo[]> {
|
||||
return this.request<RoleInfo[]>('GET', '/api/v1/roles');
|
||||
};
|
||||
|
||||
/** Get role by ID */
|
||||
proto.getRole = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<RoleInfo> {
|
||||
return this.request<RoleInfo>('GET', `/api/v1/roles/${id}`);
|
||||
};
|
||||
|
||||
/** Create a new role (admin only) */
|
||||
proto.createRole = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, data: CreateRoleRequest): Promise<RoleInfo> {
|
||||
return this.request<RoleInfo>('POST', '/api/v1/roles', data);
|
||||
};
|
||||
|
||||
/** Update a role (admin only) */
|
||||
proto.updateRole = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string, data: UpdateRoleRequest): Promise<RoleInfo> {
|
||||
return this.request<RoleInfo>('PUT', `/api/v1/roles/${id}`, data);
|
||||
};
|
||||
|
||||
/** Delete a role (admin only) */
|
||||
proto.deleteRole = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<void> {
|
||||
await this.request<void>('DELETE', `/api/v1/roles/${id}`);
|
||||
};
|
||||
|
||||
// --- Permission Templates ---
|
||||
|
||||
/** List permission templates */
|
||||
proto.listPermissionTemplates = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }): Promise<PermissionTemplate[]> {
|
||||
return this.request<PermissionTemplate[]>('GET', '/api/v1/permission-templates');
|
||||
};
|
||||
|
||||
/** Get permission template by ID */
|
||||
proto.getPermissionTemplate = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<PermissionTemplate> {
|
||||
return this.request<PermissionTemplate>('GET', `/api/v1/permission-templates/${id}`);
|
||||
};
|
||||
|
||||
/** Create a permission template (admin only) */
|
||||
proto.createPermissionTemplate = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, data: CreateTemplateRequest): Promise<PermissionTemplate> {
|
||||
return this.request<PermissionTemplate>('POST', '/api/v1/permission-templates', data);
|
||||
};
|
||||
|
||||
/** Delete a permission template (admin only) */
|
||||
proto.deletePermissionTemplate = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, id: string): Promise<void> {
|
||||
await this.request<void>('DELETE', `/api/v1/permission-templates/${id}`);
|
||||
};
|
||||
|
||||
/** Apply permission template to accounts (admin only) */
|
||||
proto.applyPermissionTemplate = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, templateId: string, accountIds: string[]): Promise<{ ok: boolean; applied_count: number }> {
|
||||
return this.request<{ ok: boolean; applied_count: number }>('POST', `/api/v1/permission-templates/${templateId}/apply`, { account_ids: accountIds });
|
||||
};
|
||||
}
|
||||
97
desktop/src/lib/saas-auth.ts
Normal file
97
desktop/src/lib/saas-auth.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
/**
|
||||
* SaaS Auth Methods — Mixin
|
||||
*
|
||||
* Installs authentication-related methods onto SaaSClient.prototype.
|
||||
* Uses the same mixin pattern as gateway-api.ts.
|
||||
*/
|
||||
|
||||
import type {
|
||||
SaaSAccountInfo,
|
||||
SaaSLoginResponse,
|
||||
SaaSRefreshResponse,
|
||||
TotpSetupResponse,
|
||||
TotpResultResponse,
|
||||
} from './saas-types';
|
||||
|
||||
export function installAuthMethods(ClientClass: { prototype: any }): void {
|
||||
const proto = ClientClass.prototype;
|
||||
|
||||
/**
|
||||
* Login with username and password.
|
||||
* Auto-sets the client token on success.
|
||||
*/
|
||||
proto.login = async function (this: { token: string | null; request<T>(method: string, path: string, body?: unknown): Promise<T> }, username: string, password: string, totpCode?: string): Promise<SaaSLoginResponse> {
|
||||
const body: Record<string, string> = { username, password };
|
||||
if (totpCode) body.totp_code = totpCode;
|
||||
// Clear stale token before login — avoid sending expired token on auth endpoint
|
||||
this.token = null;
|
||||
const data = await this.request<SaaSLoginResponse>(
|
||||
'POST', '/api/v1/auth/login', body,
|
||||
);
|
||||
this.token = data.token;
|
||||
return data;
|
||||
};
|
||||
|
||||
/**
|
||||
* Register a new account.
|
||||
* Auto-sets the client token on success.
|
||||
*/
|
||||
proto.register = async function (this: { token: string | null; request<T>(method: string, path: string, body?: unknown): Promise<T> }, data: {
|
||||
username: string;
|
||||
email: string;
|
||||
password: string;
|
||||
display_name?: string;
|
||||
}): Promise<SaaSLoginResponse> {
|
||||
// Clear stale token before register
|
||||
this.token = null;
|
||||
const result = await this.request<SaaSLoginResponse>(
|
||||
'POST', '/api/v1/auth/register', data,
|
||||
);
|
||||
this.token = result.token;
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the current authenticated user's account info.
|
||||
*/
|
||||
proto.me = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }): Promise<SaaSAccountInfo> {
|
||||
return this.request<SaaSAccountInfo>('GET', '/api/v1/auth/me');
|
||||
};
|
||||
|
||||
/**
|
||||
* Refresh the current token.
|
||||
* Auto-updates the client token on success.
|
||||
*/
|
||||
proto.refreshToken = async function (this: { token: string | null; request<T>(method: string, path: string, body?: unknown): Promise<T> }): Promise<string> {
|
||||
const data = await this.request<SaaSRefreshResponse>('POST', '/api/v1/auth/refresh');
|
||||
this.token = data.token;
|
||||
return data.token;
|
||||
};
|
||||
|
||||
/**
|
||||
* Change the current user's password.
|
||||
*/
|
||||
proto.changePassword = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, oldPassword: string, newPassword: string): Promise<void> {
|
||||
await this.request<unknown>('PUT', '/api/v1/auth/password', {
|
||||
old_password: oldPassword,
|
||||
new_password: newPassword,
|
||||
});
|
||||
};
|
||||
|
||||
// --- TOTP Endpoints ---
|
||||
|
||||
/** Generate a TOTP secret and otpauth URI */
|
||||
proto.setupTotp = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }): Promise<TotpSetupResponse> {
|
||||
return this.request<TotpSetupResponse>('POST', '/api/v1/auth/totp/setup');
|
||||
};
|
||||
|
||||
/** Verify a TOTP code and enable 2FA */
|
||||
proto.verifyTotp = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, code: string): Promise<TotpResultResponse> {
|
||||
return this.request<TotpResultResponse>('POST', '/api/v1/auth/totp/verify', { code });
|
||||
};
|
||||
|
||||
/** Disable 2FA (requires password confirmation) */
|
||||
proto.disableTotp = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, password: string): Promise<TotpResultResponse> {
|
||||
return this.request<TotpResultResponse>('POST', '/api/v1/auth/totp/disable', { password });
|
||||
};
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
16
desktop/src/lib/saas-errors.ts
Normal file
16
desktop/src/lib/saas-errors.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* SaaS Error Class
|
||||
*
|
||||
* Custom error for SaaS API responses.
|
||||
*/
|
||||
|
||||
export class SaaSApiError extends Error {
|
||||
constructor(
|
||||
public readonly status: number,
|
||||
public readonly code: string,
|
||||
message: string,
|
||||
) {
|
||||
super(message);
|
||||
this.name = 'SaaSApiError';
|
||||
}
|
||||
}
|
||||
46
desktop/src/lib/saas-prompt.ts
Normal file
46
desktop/src/lib/saas-prompt.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* SaaS Prompt OTA Methods — Mixin
|
||||
*
|
||||
* Installs prompt OTA methods onto SaaSClient.prototype.
|
||||
* Uses the same mixin pattern as gateway-api.ts.
|
||||
*/
|
||||
|
||||
import type {
|
||||
PromptCheckResult,
|
||||
PromptTemplateInfo,
|
||||
PromptVersionInfo,
|
||||
PaginatedResponse,
|
||||
} from './saas-types';
|
||||
|
||||
export function installPromptMethods(ClientClass: { prototype: any }): void {
|
||||
const proto = ClientClass.prototype;
|
||||
|
||||
/** Check for prompt updates (OTA) */
|
||||
proto.checkPromptUpdates = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, deviceId: string, currentVersions: Record<string, number>): Promise<PromptCheckResult> {
|
||||
return this.request<PromptCheckResult>('POST', '/api/v1/prompts/check', {
|
||||
device_id: deviceId,
|
||||
versions: currentVersions,
|
||||
});
|
||||
};
|
||||
|
||||
/** List all prompt templates */
|
||||
proto.listPrompts = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, params?: { category?: string; source?: string; status?: string; page?: number; page_size?: number }): Promise<PaginatedResponse<PromptTemplateInfo>> {
|
||||
const qs = params ? '?' + new URLSearchParams(params as Record<string, string>).toString() : '';
|
||||
return this.request<PaginatedResponse<PromptTemplateInfo>>('GET', `/api/v1/prompts${qs}`);
|
||||
};
|
||||
|
||||
/** Get prompt template by name */
|
||||
proto.getPrompt = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, name: string): Promise<PromptTemplateInfo> {
|
||||
return this.request<PromptTemplateInfo>('GET', `/api/v1/prompts/${encodeURIComponent(name)}`);
|
||||
};
|
||||
|
||||
/** List prompt versions */
|
||||
proto.listPromptVersions = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, name: string): Promise<PromptVersionInfo[]> {
|
||||
return this.request<PromptVersionInfo[]>('GET', `/api/v1/prompts/${encodeURIComponent(name)}/versions`);
|
||||
};
|
||||
|
||||
/** Get specific prompt version */
|
||||
proto.getPromptVersion = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, name: string, version: number): Promise<PromptVersionInfo> {
|
||||
return this.request<PromptVersionInfo>('GET', `/api/v1/prompts/${encodeURIComponent(name)}/versions/${version}`);
|
||||
};
|
||||
}
|
||||
131
desktop/src/lib/saas-relay.ts
Normal file
131
desktop/src/lib/saas-relay.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
/**
|
||||
* SaaS Relay Methods — Mixin
|
||||
*
|
||||
* Installs relay-related methods (tasks, chat completion, usage) onto
|
||||
* SaaSClient.prototype. Uses the same mixin pattern as gateway-api.ts.
|
||||
*/
|
||||
|
||||
import type {
|
||||
RelayTaskInfo,
|
||||
UsageStats,
|
||||
} from './saas-types';
|
||||
import { createLogger } from './logger';
|
||||
const logger = createLogger('SaaSRelay');
|
||||
|
||||
export function installRelayMethods(ClientClass: { prototype: any }): void {
|
||||
const proto = ClientClass.prototype;
|
||||
|
||||
// --- Relay Task Management ---
|
||||
|
||||
/** List relay tasks for the current user */
|
||||
proto.listRelayTasks = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, query?: { status?: string; page?: number; page_size?: number }): Promise<RelayTaskInfo[]> {
|
||||
const params = new URLSearchParams();
|
||||
if (query?.status) params.set('status', query.status);
|
||||
if (query?.page) params.set('page', String(query.page));
|
||||
if (query?.page_size) params.set('page_size', String(query.page_size));
|
||||
const qs = params.toString();
|
||||
return this.request<RelayTaskInfo[]>('GET', `/api/v1/relay/tasks${qs ? '?' + qs : ''}`);
|
||||
};
|
||||
|
||||
/** Get a single relay task */
|
||||
proto.getRelayTask = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, taskId: string): Promise<RelayTaskInfo> {
|
||||
return this.request<RelayTaskInfo>('GET', `/api/v1/relay/tasks/${taskId}`);
|
||||
};
|
||||
|
||||
/** Retry a failed relay task (admin only) */
|
||||
proto.retryRelayTask = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, taskId: string): Promise<{ ok: boolean; task_id: string }> {
|
||||
return this.request<{ ok: boolean; task_id: string }>('POST', `/api/v1/relay/tasks/${taskId}/retry`);
|
||||
};
|
||||
|
||||
// --- Chat Relay ---
|
||||
|
||||
/**
|
||||
* Send a chat completion request via the SaaS relay.
|
||||
* Returns the raw Response object to support both streaming and non-streaming.
|
||||
*
|
||||
* Includes one retry on 401 (auto token refresh) and on network errors.
|
||||
* The caller is responsible for:
|
||||
* - Reading the response body (JSON or SSE stream)
|
||||
* - Handling errors from the response
|
||||
*/
|
||||
proto.chatCompletion = async function (
|
||||
this: {
|
||||
baseUrl: string;
|
||||
token: string | null;
|
||||
_serverReachable: boolean;
|
||||
_isAuthEndpoint(path: string): boolean;
|
||||
refreshToken(): Promise<string>;
|
||||
},
|
||||
body: unknown,
|
||||
signal?: AbortSignal,
|
||||
): Promise<Response> {
|
||||
const maxAttempts = 2; // 1 initial + 1 retry
|
||||
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
if (this.token) {
|
||||
headers['Authorization'] = `Bearer ${this.token}`;
|
||||
}
|
||||
|
||||
// Use caller's AbortSignal if provided, otherwise default 5min timeout
|
||||
const effectiveSignal = signal ?? AbortSignal.timeout(300_000);
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${this.baseUrl}/api/v1/relay/chat/completions`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers,
|
||||
credentials: 'include', // Send HttpOnly cookies
|
||||
body: JSON.stringify(body),
|
||||
signal: effectiveSignal,
|
||||
},
|
||||
);
|
||||
|
||||
// On 401, attempt token refresh once
|
||||
if (response.status === 401 && attempt === 0 && !this._isAuthEndpoint('/api/v1/relay/chat/completions')) {
|
||||
try {
|
||||
const newToken = await this.refreshToken();
|
||||
if (newToken) continue; // Retry with refreshed token
|
||||
} catch (e) {
|
||||
logger.debug('Token refresh failed', { error: e });
|
||||
// Refresh failed, return the 401 response
|
||||
}
|
||||
}
|
||||
|
||||
this._serverReachable = true;
|
||||
return response;
|
||||
} catch (err: unknown) {
|
||||
this._serverReachable = false;
|
||||
const isNetworkError = err instanceof TypeError
|
||||
&& (err.message.includes('Failed to fetch') || err.message.includes('NetworkError'));
|
||||
|
||||
if (isNetworkError && attempt < maxAttempts - 1) {
|
||||
// Brief backoff before retry
|
||||
await new Promise((r) => setTimeout(r, 1000 * (attempt + 1)));
|
||||
continue;
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
// Unreachable but TypeScript needs it
|
||||
throw new Error('chatCompletion: all attempts exhausted');
|
||||
};
|
||||
|
||||
// --- Usage Statistics ---
|
||||
|
||||
/** Get usage statistics for current account */
|
||||
proto.getUsage = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, params?: { from?: string; to?: string; provider_id?: string; model_id?: string }): Promise<UsageStats> {
|
||||
const qs = new URLSearchParams();
|
||||
if (params?.from) qs.set('from', params.from);
|
||||
if (params?.to) qs.set('to', params.to);
|
||||
if (params?.provider_id) qs.set('provider_id', params.provider_id);
|
||||
if (params?.model_id) qs.set('model_id', params.model_id);
|
||||
const query = qs.toString();
|
||||
return this.request<UsageStats>('GET', `/api/v1/usage${query ? '?' + query : ''}`);
|
||||
};
|
||||
}
|
||||
153
desktop/src/lib/saas-session.ts
Normal file
153
desktop/src/lib/saas-session.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
/**
|
||||
* SaaS Session Persistence
|
||||
*
|
||||
* Handles loading/saving SaaS auth session data.
|
||||
* Token is stored in secure storage (OS keyring), not plain localStorage.
|
||||
* Auth state is carried by HttpOnly cookies when possible (same-origin).
|
||||
*/
|
||||
|
||||
import type { SaaSAccountInfo } from './saas-types';
|
||||
import { createLogger } from './logger';
|
||||
|
||||
const logger = createLogger('saas-session');
|
||||
|
||||
// === Storage Keys ===
|
||||
const SAAS_TOKEN_SECURE_KEY = 'zclaw-saas-token'; // OS keyring key
|
||||
const SAASTOKEN_KEY = 'zclaw-saas-token'; // legacy localStorage — only used for cleanup
|
||||
const SAASURL_KEY = 'zclaw-saas-url';
|
||||
const SAASACCOUNT_KEY = 'zclaw-saas-account';
|
||||
const SAASMODE_KEY = 'zclaw-connection-mode';
|
||||
|
||||
// === Session Interface ===
|
||||
|
||||
export interface SaaSSession {
|
||||
token: string | null; // null when using cookie-based auth (page reload)
|
||||
account: SaaSAccountInfo | null;
|
||||
saasUrl: string;
|
||||
}
|
||||
|
||||
// === Session Functions ===
|
||||
|
||||
/**
|
||||
* Load a persisted SaaS session.
|
||||
* Token is stored in secure storage (OS keyring), not plain localStorage.
|
||||
* Returns null if no URL is stored (never logged in).
|
||||
*
|
||||
* NOTE: Token loading is async due to secure storage access.
|
||||
* For synchronous checks, use loadSaaSSessionSync() (URL + account only).
|
||||
*/
|
||||
export async function loadSaaSSession(): Promise<SaaSSession | null> {
|
||||
try {
|
||||
const saasUrl = localStorage.getItem(SAASURL_KEY);
|
||||
if (!saasUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Clean up any legacy plaintext token from localStorage
|
||||
const legacyToken = localStorage.getItem(SAASTOKEN_KEY);
|
||||
if (legacyToken) {
|
||||
localStorage.removeItem(SAASTOKEN_KEY);
|
||||
}
|
||||
|
||||
// Load token from secure storage
|
||||
let token: string | null = null;
|
||||
try {
|
||||
const { secureStorage } = await import('./secure-storage');
|
||||
token = await secureStorage.get(SAAS_TOKEN_SECURE_KEY);
|
||||
} catch (e) {
|
||||
logger.debug('Secure storage unavailable for token load', { error: e });
|
||||
// Secure storage unavailable — token stays null (cookie auth will be attempted)
|
||||
}
|
||||
|
||||
const accountRaw = localStorage.getItem(SAASACCOUNT_KEY);
|
||||
const account: SaaSAccountInfo | null = accountRaw
|
||||
? (JSON.parse(accountRaw) as SaaSAccountInfo)
|
||||
: null;
|
||||
|
||||
return { token, account, saasUrl };
|
||||
} catch (e) {
|
||||
logger.debug('Corrupted session data, clearing', { error: e });
|
||||
// Corrupted data - clear all
|
||||
clearSaaSSession();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronous version — returns URL + account only (no token).
|
||||
* Used during store initialization where async is not available.
|
||||
*/
|
||||
export function loadSaaSSessionSync(): { saasUrl: string; account: SaaSAccountInfo | null } | null {
|
||||
try {
|
||||
const saasUrl = localStorage.getItem(SAASURL_KEY);
|
||||
if (!saasUrl) return null;
|
||||
|
||||
// Clean up legacy plaintext token
|
||||
const legacyToken = localStorage.getItem(SAASTOKEN_KEY);
|
||||
if (legacyToken) {
|
||||
localStorage.removeItem(SAASTOKEN_KEY);
|
||||
}
|
||||
|
||||
const accountRaw = localStorage.getItem(SAASACCOUNT_KEY);
|
||||
const account: SaaSAccountInfo | null = accountRaw
|
||||
? (JSON.parse(accountRaw) as SaaSAccountInfo)
|
||||
: null;
|
||||
|
||||
return { saasUrl, account };
|
||||
} catch (e) {
|
||||
logger.debug('Failed to load sync session', { error: e });
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist SaaS session.
|
||||
* Token goes to secure storage (OS keyring), metadata to localStorage.
|
||||
*/
|
||||
export async function saveSaaSSession(session: SaaSSession): Promise<void> {
|
||||
// Store token in secure storage (OS keyring), not plain localStorage
|
||||
if (session.token) {
|
||||
try {
|
||||
const { secureStorage } = await import('./secure-storage');
|
||||
await secureStorage.set(SAAS_TOKEN_SECURE_KEY, session.token);
|
||||
} catch (e) {
|
||||
logger.debug('Secure storage unavailable for token save', { error: e });
|
||||
// Secure storage unavailable — token only in memory
|
||||
}
|
||||
}
|
||||
|
||||
localStorage.setItem(SAASURL_KEY, session.saasUrl);
|
||||
if (session.account) {
|
||||
localStorage.setItem(SAASACCOUNT_KEY, JSON.stringify(session.account));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the persisted SaaS session from all storage.
|
||||
*/
|
||||
export async function clearSaaSSession(): Promise<void> {
|
||||
// Remove from secure storage
|
||||
try {
|
||||
const { secureStorage } = await import('./secure-storage');
|
||||
await secureStorage.set(SAAS_TOKEN_SECURE_KEY, '');
|
||||
} catch (e) { logger.debug('Failed to clear secure storage token', { error: e }); }
|
||||
|
||||
localStorage.removeItem(SAASTOKEN_KEY);
|
||||
localStorage.removeItem(SAASURL_KEY);
|
||||
localStorage.removeItem(SAASACCOUNT_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist the connection mode to localStorage.
|
||||
*/
|
||||
export function saveConnectionMode(mode: string): void {
|
||||
localStorage.setItem(SAASMODE_KEY, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the connection mode from localStorage.
|
||||
* Returns null if not set.
|
||||
*/
|
||||
export function loadConnectionMode(): string | null {
|
||||
return localStorage.getItem(SAASMODE_KEY);
|
||||
}
|
||||
45
desktop/src/lib/saas-telemetry.ts
Normal file
45
desktop/src/lib/saas-telemetry.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* SaaS Telemetry Methods — Mixin
|
||||
*
|
||||
* Installs telemetry reporting methods onto SaaSClient.prototype.
|
||||
* Uses the same mixin pattern as gateway-api.ts.
|
||||
*/
|
||||
|
||||
export function installTelemetryMethods(ClientClass: { prototype: any }): void {
|
||||
const proto = ClientClass.prototype;
|
||||
|
||||
/** Report anonymous usage telemetry (token counts only, no content) */
|
||||
proto.reportTelemetry = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, data: {
|
||||
device_id: string;
|
||||
app_version: string;
|
||||
entries: Array<{
|
||||
model_id: string;
|
||||
input_tokens: number;
|
||||
output_tokens: number;
|
||||
latency_ms?: number;
|
||||
success: boolean;
|
||||
error_type?: string;
|
||||
timestamp: string;
|
||||
connection_mode: string;
|
||||
}>;
|
||||
}): Promise<{ accepted: number; rejected: number }> {
|
||||
return this.request<{ accepted: number; rejected: number }>(
|
||||
'POST', '/api/v1/telemetry/report', data,
|
||||
);
|
||||
};
|
||||
|
||||
/** Report audit log summary (action types and counts only, no content) */
|
||||
proto.reportAuditSummary = async function (this: { request<T>(method: string, path: string, body?: unknown): Promise<T> }, data: {
|
||||
device_id: string;
|
||||
entries: Array<{
|
||||
action: string;
|
||||
target: string;
|
||||
result: string;
|
||||
timestamp: string;
|
||||
}>;
|
||||
}): Promise<{ accepted: number; total: number }> {
|
||||
return this.request<{ accepted: number; total: number }>(
|
||||
'POST', '/api/v1/telemetry/audit', data,
|
||||
);
|
||||
};
|
||||
}
|
||||
462
desktop/src/lib/saas-types.ts
Normal file
462
desktop/src/lib/saas-types.ts
Normal file
@@ -0,0 +1,462 @@
|
||||
/**
|
||||
* SaaS Type Definitions
|
||||
*
|
||||
* All type/interface definitions for the ZCLAW SaaS client.
|
||||
* Extracted from saas-client.ts for modularity.
|
||||
*/
|
||||
|
||||
// === Account & Auth Types ===
|
||||
|
||||
/** Public account info returned by the SaaS backend */
|
||||
export interface SaaSAccountInfo {
|
||||
id: string;
|
||||
username: string;
|
||||
email: string;
|
||||
display_name: string;
|
||||
role: 'super_admin' | 'admin' | 'user';
|
||||
status: 'active' | 'disabled' | 'suspended';
|
||||
totp_enabled: boolean;
|
||||
created_at: string;
|
||||
llm_routing?: 'relay' | 'local';
|
||||
}
|
||||
|
||||
/** Lightweight template info for listing available templates */
|
||||
export interface AgentTemplateAvailable {
|
||||
id: string;
|
||||
name: string;
|
||||
category: string;
|
||||
emoji?: string;
|
||||
description?: string;
|
||||
source_id?: string;
|
||||
}
|
||||
|
||||
/** Full template details for creating an agent from template */
|
||||
export interface AgentTemplateFull {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
category: string;
|
||||
emoji?: string;
|
||||
personality?: string;
|
||||
system_prompt?: string;
|
||||
soul_content?: string;
|
||||
scenarios: string[];
|
||||
welcome_message?: string;
|
||||
quick_commands: Array<{ label: string; command: string }>;
|
||||
communication_style?: string;
|
||||
model?: string;
|
||||
tools: string[];
|
||||
temperature?: number;
|
||||
max_tokens?: number;
|
||||
source_id?: string;
|
||||
}
|
||||
|
||||
/** A model available for relay through the SaaS backend */
|
||||
export interface SaaSModelInfo {
|
||||
id: string;
|
||||
provider_id: string;
|
||||
alias: string;
|
||||
context_window: number;
|
||||
max_output_tokens: number;
|
||||
supports_streaming: boolean;
|
||||
supports_vision: boolean;
|
||||
}
|
||||
|
||||
/** Config item from the SaaS backend */
|
||||
export interface SaaSConfigItem {
|
||||
id: string;
|
||||
category: string;
|
||||
key_path: string;
|
||||
value_type: string;
|
||||
current_value: string | null;
|
||||
default_value: string | null;
|
||||
source: string;
|
||||
description: string | null;
|
||||
requires_restart: boolean;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
/** SaaS API error shape */
|
||||
export interface SaaSErrorResponse {
|
||||
error: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
/** Login response from POST /api/v1/auth/login */
|
||||
export interface SaaSLoginResponse {
|
||||
token: string;
|
||||
refresh_token: string;
|
||||
account: SaaSAccountInfo;
|
||||
}
|
||||
|
||||
/** Refresh response from POST /api/v1/auth/refresh */
|
||||
export interface SaaSRefreshResponse {
|
||||
token: string;
|
||||
}
|
||||
|
||||
/** TOTP setup response from POST /api/v1/auth/totp/setup */
|
||||
export interface TotpSetupResponse {
|
||||
otpauth_uri: string;
|
||||
secret: string;
|
||||
issuer: string;
|
||||
}
|
||||
|
||||
/** TOTP verify/disable response */
|
||||
export interface TotpResultResponse {
|
||||
ok: boolean;
|
||||
totp_enabled: boolean;
|
||||
message: string;
|
||||
}
|
||||
|
||||
/** Device info stored on the SaaS backend */
|
||||
export interface DeviceInfo {
|
||||
id: string;
|
||||
device_id: string;
|
||||
device_name: string | null;
|
||||
platform: string | null;
|
||||
app_version: string | null;
|
||||
last_seen_at: string;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
// === Relay & Config Types ===
|
||||
|
||||
/** Relay task info from GET /api/v1/relay/tasks */
|
||||
export interface RelayTaskInfo {
|
||||
id: string;
|
||||
account_id: string;
|
||||
provider_id: string;
|
||||
model_id: string;
|
||||
status: string;
|
||||
priority: number;
|
||||
attempt_count: number;
|
||||
max_attempts: number;
|
||||
input_tokens: number;
|
||||
output_tokens: number;
|
||||
error_message: string | null;
|
||||
queued_at: string;
|
||||
started_at: string | null;
|
||||
completed_at: string | null;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
/** Config diff request for POST /api/v1/config/diff and /sync */
|
||||
export interface SyncConfigRequest {
|
||||
client_fingerprint: string;
|
||||
action: 'push' | 'merge';
|
||||
config_keys: string[];
|
||||
client_values: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/** A single config diff entry */
|
||||
export interface ConfigDiffItem {
|
||||
key_path: string;
|
||||
client_value: string | null;
|
||||
saas_value: string | null;
|
||||
conflict: boolean;
|
||||
}
|
||||
|
||||
/** Config diff response */
|
||||
export interface ConfigDiffResponse {
|
||||
items: ConfigDiffItem[];
|
||||
total_keys: number;
|
||||
conflicts: number;
|
||||
}
|
||||
|
||||
/** Config sync result */
|
||||
export interface ConfigSyncResult {
|
||||
updated: number;
|
||||
created: number;
|
||||
skipped: number;
|
||||
}
|
||||
|
||||
/** Paginated response wrapper */
|
||||
export interface PaginatedResponse<T> {
|
||||
items: T[];
|
||||
total: number;
|
||||
page: number;
|
||||
page_size: number;
|
||||
}
|
||||
|
||||
// === Prompt OTA Types ===
|
||||
|
||||
/** Prompt template info */
|
||||
export interface PromptTemplateInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
category: string;
|
||||
description: string | null;
|
||||
source: string;
|
||||
current_version: number;
|
||||
status: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
/** Prompt version info */
|
||||
export interface PromptVersionInfo {
|
||||
id: string;
|
||||
template_id: string;
|
||||
version: number;
|
||||
system_prompt: string;
|
||||
user_prompt_template: string | null;
|
||||
variables: PromptVariable[];
|
||||
changelog: string | null;
|
||||
min_app_version: string | null;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
/** Prompt variable definition */
|
||||
export interface PromptVariable {
|
||||
name: string;
|
||||
type: string;
|
||||
default_value?: string;
|
||||
description?: string;
|
||||
required?: boolean;
|
||||
}
|
||||
|
||||
/** OTA update check result */
|
||||
export interface PromptCheckResult {
|
||||
updates: PromptUpdatePayload[];
|
||||
server_time: string;
|
||||
}
|
||||
|
||||
/** Single OTA update payload */
|
||||
export interface PromptUpdatePayload {
|
||||
name: string;
|
||||
version: number;
|
||||
system_prompt: string;
|
||||
user_prompt_template: string | null;
|
||||
variables: PromptVariable[];
|
||||
source: string;
|
||||
min_app_version: string | null;
|
||||
changelog: string | null;
|
||||
}
|
||||
|
||||
// === Admin Types: Providers ===
|
||||
|
||||
/** Provider info from GET /api/v1/providers */
|
||||
export interface ProviderInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
display_name: string;
|
||||
base_url: string;
|
||||
api_protocol: string;
|
||||
enabled: boolean;
|
||||
rate_limit_rpm: number | null;
|
||||
rate_limit_tpm: number | null;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
/** Create provider request */
|
||||
export interface CreateProviderRequest {
|
||||
name: string;
|
||||
display_name: string;
|
||||
base_url: string;
|
||||
api_protocol?: string;
|
||||
api_key?: string;
|
||||
rate_limit_rpm?: number;
|
||||
rate_limit_tpm?: number;
|
||||
}
|
||||
|
||||
/** Update provider request */
|
||||
export interface UpdateProviderRequest {
|
||||
display_name?: string;
|
||||
base_url?: string;
|
||||
api_key?: string;
|
||||
rate_limit_rpm?: number;
|
||||
rate_limit_tpm?: number;
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
// === Admin Types: Models ===
|
||||
|
||||
/** Model info from GET /api/v1/models */
|
||||
export interface ModelInfo {
|
||||
id: string;
|
||||
provider_id: string;
|
||||
model_id: string;
|
||||
alias: string;
|
||||
context_window: number;
|
||||
max_output_tokens: number;
|
||||
supports_streaming: boolean;
|
||||
supports_vision: boolean;
|
||||
enabled: boolean;
|
||||
pricing_input: number;
|
||||
pricing_output: number;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
/** Create model request */
|
||||
export interface CreateModelRequest {
|
||||
provider_id: string;
|
||||
model_id: string;
|
||||
alias: string;
|
||||
context_window?: number;
|
||||
max_output_tokens?: number;
|
||||
supports_streaming?: boolean;
|
||||
supports_vision?: boolean;
|
||||
pricing_input?: number;
|
||||
pricing_output?: number;
|
||||
}
|
||||
|
||||
/** Update model request */
|
||||
export interface UpdateModelRequest {
|
||||
alias?: string;
|
||||
context_window?: number;
|
||||
max_output_tokens?: number;
|
||||
supports_streaming?: boolean;
|
||||
supports_vision?: boolean;
|
||||
enabled?: boolean;
|
||||
pricing_input?: number;
|
||||
pricing_output?: number;
|
||||
}
|
||||
|
||||
// === Admin Types: API Keys ===
|
||||
|
||||
/** Account API key info */
|
||||
export interface AccountApiKeyInfo {
|
||||
id: string;
|
||||
provider_id: string;
|
||||
key_label: string | null;
|
||||
permissions: string[];
|
||||
enabled: boolean;
|
||||
last_used_at: string | null;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
/** Create API key request */
|
||||
export interface CreateApiKeyRequest {
|
||||
provider_id: string;
|
||||
key_value: string;
|
||||
key_label?: string;
|
||||
permissions?: string[];
|
||||
}
|
||||
|
||||
// === Admin Types: Usage & Accounts ===
|
||||
|
||||
/** Usage statistics */
|
||||
export interface UsageStats {
|
||||
total_input_tokens: number;
|
||||
total_output_tokens: number;
|
||||
total_requests: number;
|
||||
by_provider: Record<string, { input_tokens: number; output_tokens: number; requests: number }>;
|
||||
by_model: Record<string, { input_tokens: number; output_tokens: number; requests: number }>;
|
||||
daily: Array<{ date: string; input_tokens: number; output_tokens: number; requests: number }>;
|
||||
}
|
||||
|
||||
/** Account public info (extended) */
|
||||
export interface AccountPublic {
|
||||
id: string;
|
||||
username: string;
|
||||
email: string;
|
||||
display_name: string;
|
||||
role: 'super_admin' | 'admin' | 'user';
|
||||
status: 'active' | 'disabled' | 'suspended';
|
||||
totp_enabled: boolean;
|
||||
last_login_at: string | null;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
/** Update account request */
|
||||
export interface UpdateAccountRequest {
|
||||
display_name?: string;
|
||||
email?: string;
|
||||
role?: string;
|
||||
avatar_url?: string;
|
||||
}
|
||||
|
||||
// === Admin Types: Tokens ===
|
||||
|
||||
/** Token info */
|
||||
export interface TokenInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
token_prefix: string;
|
||||
permissions: string[];
|
||||
last_used_at: string | null;
|
||||
expires_at: string | null;
|
||||
created_at: string;
|
||||
token?: string;
|
||||
}
|
||||
|
||||
/** Create token request */
|
||||
export interface CreateTokenRequest {
|
||||
name: string;
|
||||
permissions: string[];
|
||||
expires_days?: number;
|
||||
}
|
||||
|
||||
// === Admin Types: Logs & Dashboard ===
|
||||
|
||||
/** Operation log info */
|
||||
export interface OperationLogInfo {
|
||||
id: number;
|
||||
account_id: string | null;
|
||||
action: string;
|
||||
target_type: string | null;
|
||||
target_id: string | null;
|
||||
details: Record<string, unknown> | null;
|
||||
ip_address: string | null;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
/** Dashboard statistics */
|
||||
export interface DashboardStats {
|
||||
total_accounts: number;
|
||||
active_accounts: number;
|
||||
tasks_today: number;
|
||||
active_providers: number;
|
||||
active_models: number;
|
||||
tokens_today_input: number;
|
||||
tokens_today_output: number;
|
||||
}
|
||||
|
||||
// === Admin Types: Roles & Permissions ===
|
||||
|
||||
/** Role info */
|
||||
export interface RoleInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string | null;
|
||||
permissions: string[];
|
||||
is_system: boolean;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
/** Create role request */
|
||||
export interface CreateRoleRequest {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
permissions: string[];
|
||||
}
|
||||
|
||||
/** Update role request */
|
||||
export interface UpdateRoleRequest {
|
||||
name?: string;
|
||||
description?: string;
|
||||
permissions?: string[];
|
||||
}
|
||||
|
||||
/** Permission template */
|
||||
export interface PermissionTemplate {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string | null;
|
||||
permissions: string[];
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
/** Create template request */
|
||||
export interface CreateTemplateRequest {
|
||||
name: string;
|
||||
description?: string;
|
||||
permissions: string[];
|
||||
}
|
||||
@@ -22,6 +22,9 @@ import {
|
||||
arrayToBase64,
|
||||
base64ToArray,
|
||||
} from './crypto-utils';
|
||||
import { createLogger } from './logger';
|
||||
|
||||
const logger = createLogger('secure-storage');
|
||||
|
||||
// Cache for keyring availability check
|
||||
let keyringAvailable: boolean | null = null;
|
||||
@@ -145,7 +148,8 @@ function isEncrypted(value: string): boolean {
|
||||
try {
|
||||
const parsed = JSON.parse(value);
|
||||
return parsed && typeof parsed.iv === 'string' && typeof parsed.data === 'string';
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('isEncrypted check failed', { error: e });
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -157,7 +161,8 @@ function isV2Encrypted(value: string): boolean {
|
||||
try {
|
||||
const parsed = JSON.parse(value);
|
||||
return parsed && parsed.version === 2 && typeof parsed.salt === 'string' && typeof parsed.iv === 'string' && typeof parsed.data === 'string';
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('isV2Encrypted check failed', { error: e });
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -254,7 +259,8 @@ async function readEncryptedLocalStorage(key: string): Promise<string | null> {
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('readEncryptedLocalStorage failed', { error: e });
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -266,8 +272,8 @@ function clearLocalStorageBackup(key: string): void {
|
||||
try {
|
||||
localStorage.removeItem(ENCRYPTED_PREFIX + key);
|
||||
localStorage.removeItem(key);
|
||||
} catch {
|
||||
// Ignore localStorage failures
|
||||
} catch (e) {
|
||||
logger.debug('clearLocalStorageBackup failed', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -279,15 +285,16 @@ function writeLocalStorageBackup(key: string, value: string): void {
|
||||
} else {
|
||||
localStorage.removeItem(key);
|
||||
}
|
||||
} catch {
|
||||
// Ignore localStorage failures
|
||||
} catch (e) {
|
||||
logger.debug('writeLocalStorageBackup failed', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
function readLocalStorageBackup(key: string): string | null {
|
||||
try {
|
||||
return localStorage.getItem(key);
|
||||
} catch {
|
||||
} catch (e) {
|
||||
logger.debug('readLocalStorageBackup failed', { error: e });
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -400,8 +407,8 @@ export async function storeDeviceKeys(
|
||||
// Clear legacy format if present
|
||||
try {
|
||||
localStorage.removeItem(DEVICE_KEYS_LEGACY);
|
||||
} catch {
|
||||
// Ignore
|
||||
} catch (e) {
|
||||
logger.debug('Failed to clear legacy device keys from localStorage', { error: e });
|
||||
}
|
||||
} else {
|
||||
// Fallback: store in localStorage (less secure, but better than nothing)
|
||||
@@ -477,8 +484,8 @@ export async function deleteDeviceKeys(): Promise<void> {
|
||||
localStorage.removeItem(DEVICE_KEYS_PUBLIC_KEY);
|
||||
localStorage.removeItem(DEVICE_KEYS_CREATED);
|
||||
localStorage.removeItem(DEVICE_KEYS_LEGACY);
|
||||
} catch {
|
||||
// Ignore localStorage errors
|
||||
} catch (e) {
|
||||
logger.debug('Failed to delete device keys from localStorage', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -512,8 +519,8 @@ export async function getDeviceKeysCreatedAt(): Promise<number | null> {
|
||||
if (typeof parsed.createdAt === 'number' || typeof parsed.createdAt === 'string') {
|
||||
return parseInt(String(parsed.createdAt), 10);
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
} catch (e) {
|
||||
logger.debug('Failed to parse legacy device keys createdAt', { error: e });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user