feat: 新增技能编排引擎和工作流构建器组件
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
refactor: 统一Hands系统常量到单个源文件 refactor: 更新Hands中文名称和描述 fix: 修复技能市场在连接状态变化时重新加载 fix: 修复身份变更提案的错误处理逻辑 docs: 更新多个功能文档的验证状态和实现位置 docs: 更新Hands系统文档 test: 添加测试文件验证工作区路径
This commit is contained in:
@@ -31,10 +31,13 @@
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@dagrejs/dagre": "^3.0.0",
|
||||
"@tauri-apps/api": "^2",
|
||||
"@tauri-apps/plugin-opener": "^2",
|
||||
"@xstate/react": "^6.1.0",
|
||||
"@xyflow/react": "^12.10.1",
|
||||
"clsx": "^2.1.1",
|
||||
"dagre": "^0.8.5",
|
||||
"date-fns": "^4.1.0",
|
||||
"framer-motion": "^12.36.0",
|
||||
"lucide-react": "^0.577.0",
|
||||
@@ -55,6 +58,7 @@
|
||||
"@tauri-apps/cli": "^2",
|
||||
"@testing-library/jest-dom": "6.6.3",
|
||||
"@testing-library/react": "16.1.0",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/react": "^19.1.8",
|
||||
"@types/react-dom": "^19.1.6",
|
||||
"@types/react-window": "^2.0.0",
|
||||
|
||||
216
desktop/pnpm-lock.yaml
generated
216
desktop/pnpm-lock.yaml
generated
@@ -8,6 +8,9 @@ importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@dagrejs/dagre':
|
||||
specifier: ^3.0.0
|
||||
version: 3.0.0
|
||||
'@tauri-apps/api':
|
||||
specifier: ^2
|
||||
version: 2.10.1
|
||||
@@ -17,9 +20,15 @@ importers:
|
||||
'@xstate/react':
|
||||
specifier: ^6.1.0
|
||||
version: 6.1.0(@types/react@19.2.14)(react@19.2.4)(xstate@5.28.0)
|
||||
'@xyflow/react':
|
||||
specifier: ^12.10.1
|
||||
version: 12.10.1(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
clsx:
|
||||
specifier: ^2.1.1
|
||||
version: 2.1.1
|
||||
dagre:
|
||||
specifier: ^0.8.5
|
||||
version: 0.8.5
|
||||
date-fns:
|
||||
specifier: ^4.1.0
|
||||
version: 4.1.0
|
||||
@@ -75,6 +84,9 @@ importers:
|
||||
'@testing-library/react':
|
||||
specifier: 16.1.0
|
||||
version: 16.1.0(@testing-library/dom@10.4.1)(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
'@types/js-yaml':
|
||||
specifier: ^4.0.9
|
||||
version: 4.0.9
|
||||
'@types/react':
|
||||
specifier: ^19.1.8
|
||||
version: 19.2.14
|
||||
@@ -248,6 +260,12 @@ packages:
|
||||
resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
'@dagrejs/dagre@3.0.0':
|
||||
resolution: {integrity: sha512-ZzhnTy1rfuoew9Ez3EIw4L2znPGnYYhfn8vc9c4oB8iw6QAsszbiU0vRhlxWPFnmmNSFAkrYeF1PhM5m4lAN0Q==}
|
||||
|
||||
'@dagrejs/graphlib@4.0.1':
|
||||
resolution: {integrity: sha512-IvcV6FduIIAmLwnH+yun+QtV36SC7mERqa86aClNqmMN09WhmPPYU8ckHrZBozErf+UvHPWOTJYaGYiIcs0DgA==}
|
||||
|
||||
'@esbuild/aix-ppc64@0.21.5':
|
||||
resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==}
|
||||
engines: {node: '>=12'}
|
||||
@@ -930,9 +948,30 @@ packages:
|
||||
'@types/babel__traverse@7.28.0':
|
||||
resolution: {integrity: sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==}
|
||||
|
||||
'@types/d3-color@3.1.3':
|
||||
resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==}
|
||||
|
||||
'@types/d3-drag@3.0.7':
|
||||
resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==}
|
||||
|
||||
'@types/d3-interpolate@3.0.4':
|
||||
resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==}
|
||||
|
||||
'@types/d3-selection@3.0.11':
|
||||
resolution: {integrity: sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==}
|
||||
|
||||
'@types/d3-transition@3.0.9':
|
||||
resolution: {integrity: sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==}
|
||||
|
||||
'@types/d3-zoom@3.0.8':
|
||||
resolution: {integrity: sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==}
|
||||
|
||||
'@types/estree@1.0.8':
|
||||
resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==}
|
||||
|
||||
'@types/js-yaml@4.0.9':
|
||||
resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==}
|
||||
|
||||
'@types/react-dom@19.2.3':
|
||||
resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==}
|
||||
peerDependencies:
|
||||
@@ -1004,6 +1043,15 @@ packages:
|
||||
xstate:
|
||||
optional: true
|
||||
|
||||
'@xyflow/react@12.10.1':
|
||||
resolution: {integrity: sha512-5eSWtIK/+rkldOuFbOOz44CRgQRjtS9v5nufk77DV+XBnfCGL9HAQ8PG00o2ZYKqkEU/Ak6wrKC95Tu+2zuK3Q==}
|
||||
peerDependencies:
|
||||
react: '>=17'
|
||||
react-dom: '>=17'
|
||||
|
||||
'@xyflow/system@0.0.75':
|
||||
resolution: {integrity: sha512-iXs+AGFLi8w/VlAoc/iSxk+CxfT6o64Uw/k0CKASOPqjqz6E0rb5jFZgJtXGZCpfQI6OQpu5EnumP5fGxQheaQ==}
|
||||
|
||||
agent-base@7.1.4:
|
||||
resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==}
|
||||
engines: {node: '>= 14'}
|
||||
@@ -1096,6 +1144,9 @@ packages:
|
||||
resolution: {integrity: sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==}
|
||||
engines: {node: '>= 16'}
|
||||
|
||||
classcat@5.0.5:
|
||||
resolution: {integrity: sha512-JhZUT7JFcQy/EzW605k/ktHtncoo9vnyW/2GspNYwFlN1C/WmjuV/xtS04e9SOkL2sTdw0VAZ2UGCcQ9lR6p6w==}
|
||||
|
||||
clsx@2.1.1:
|
||||
resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==}
|
||||
engines: {node: '>=6'}
|
||||
@@ -1128,6 +1179,47 @@ packages:
|
||||
csstype@3.2.3:
|
||||
resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==}
|
||||
|
||||
d3-color@3.1.0:
|
||||
resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
d3-dispatch@3.0.1:
|
||||
resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
d3-drag@3.0.0:
|
||||
resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
d3-ease@3.0.1:
|
||||
resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
d3-interpolate@3.0.1:
|
||||
resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
d3-selection@3.0.0:
|
||||
resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
d3-timer@3.0.1:
|
||||
resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
d3-transition@3.0.1:
|
||||
resolution: {integrity: sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==}
|
||||
engines: {node: '>=12'}
|
||||
peerDependencies:
|
||||
d3-selection: 2 - 3
|
||||
|
||||
d3-zoom@3.0.0:
|
||||
resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
dagre@0.8.5:
|
||||
resolution: {integrity: sha512-/aTqmnRta7x7MCCpExk7HQL2O4owCT2h8NT//9I1OQ9vt29Pa0BzSAkR5lwFUcQ7491yVi/3CXU9jQ5o0Mn2Sw==}
|
||||
|
||||
data-urls@5.0.0:
|
||||
resolution: {integrity: sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==}
|
||||
engines: {node: '>=18'}
|
||||
@@ -1304,6 +1396,9 @@ packages:
|
||||
graceful-fs@4.2.11:
|
||||
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
|
||||
|
||||
graphlib@2.1.8:
|
||||
resolution: {integrity: sha512-jcLLfkpoVGmH7/InMC/1hIvOPSUh38oJtGhvrOFGzioE1DZ+0YW16RgmOJhHiuWTvGiJQ9Z1Ik43JvkRPRvE+A==}
|
||||
|
||||
has-flag@4.0.0:
|
||||
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
|
||||
engines: {node: '>=8'}
|
||||
@@ -1982,6 +2077,21 @@ packages:
|
||||
yallist@3.1.1:
|
||||
resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==}
|
||||
|
||||
zustand@4.5.7:
|
||||
resolution: {integrity: sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw==}
|
||||
engines: {node: '>=12.7.0'}
|
||||
peerDependencies:
|
||||
'@types/react': '>=16.8'
|
||||
immer: '>=9.0.6'
|
||||
react: '>=16.8'
|
||||
peerDependenciesMeta:
|
||||
'@types/react':
|
||||
optional: true
|
||||
immer:
|
||||
optional: true
|
||||
react:
|
||||
optional: true
|
||||
|
||||
zustand@5.0.11:
|
||||
resolution: {integrity: sha512-fdZY+dk7zn/vbWNCYmzZULHRrss0jx5pPFiOuMZ/5HJN6Yv3u+1Wswy/4MpZEkEGhtNH+pwxZB8OKgUBPzYAGg==}
|
||||
engines: {node: '>=12.20.0'}
|
||||
@@ -2153,6 +2263,12 @@ snapshots:
|
||||
|
||||
'@csstools/css-tokenizer@3.0.4': {}
|
||||
|
||||
'@dagrejs/dagre@3.0.0':
|
||||
dependencies:
|
||||
'@dagrejs/graphlib': 4.0.1
|
||||
|
||||
'@dagrejs/graphlib@4.0.1': {}
|
||||
|
||||
'@esbuild/aix-ppc64@0.21.5':
|
||||
optional: true
|
||||
|
||||
@@ -2589,8 +2705,31 @@ snapshots:
|
||||
dependencies:
|
||||
'@babel/types': 7.29.0
|
||||
|
||||
'@types/d3-color@3.1.3': {}
|
||||
|
||||
'@types/d3-drag@3.0.7':
|
||||
dependencies:
|
||||
'@types/d3-selection': 3.0.11
|
||||
|
||||
'@types/d3-interpolate@3.0.4':
|
||||
dependencies:
|
||||
'@types/d3-color': 3.1.3
|
||||
|
||||
'@types/d3-selection@3.0.11': {}
|
||||
|
||||
'@types/d3-transition@3.0.9':
|
||||
dependencies:
|
||||
'@types/d3-selection': 3.0.11
|
||||
|
||||
'@types/d3-zoom@3.0.8':
|
||||
dependencies:
|
||||
'@types/d3-interpolate': 3.0.4
|
||||
'@types/d3-selection': 3.0.11
|
||||
|
||||
'@types/estree@1.0.8': {}
|
||||
|
||||
'@types/js-yaml@4.0.9': {}
|
||||
|
||||
'@types/react-dom@19.2.3(@types/react@19.2.14)':
|
||||
dependencies:
|
||||
'@types/react': 19.2.14
|
||||
@@ -2692,6 +2831,29 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- '@types/react'
|
||||
|
||||
'@xyflow/react@12.10.1(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)':
|
||||
dependencies:
|
||||
'@xyflow/system': 0.0.75
|
||||
classcat: 5.0.5
|
||||
react: 19.2.4
|
||||
react-dom: 19.2.4(react@19.2.4)
|
||||
zustand: 4.5.7(@types/react@19.2.14)(react@19.2.4)
|
||||
transitivePeerDependencies:
|
||||
- '@types/react'
|
||||
- immer
|
||||
|
||||
'@xyflow/system@0.0.75':
|
||||
dependencies:
|
||||
'@types/d3-drag': 3.0.7
|
||||
'@types/d3-interpolate': 3.0.4
|
||||
'@types/d3-selection': 3.0.11
|
||||
'@types/d3-transition': 3.0.9
|
||||
'@types/d3-zoom': 3.0.8
|
||||
d3-drag: 3.0.0
|
||||
d3-interpolate: 3.0.1
|
||||
d3-selection: 3.0.0
|
||||
d3-zoom: 3.0.0
|
||||
|
||||
agent-base@7.1.4: {}
|
||||
|
||||
ansi-regex@5.0.1: {}
|
||||
@@ -2771,6 +2933,8 @@ snapshots:
|
||||
|
||||
check-error@2.1.3: {}
|
||||
|
||||
classcat@5.0.5: {}
|
||||
|
||||
clsx@2.1.1: {}
|
||||
|
||||
color-convert@2.0.1:
|
||||
@@ -2800,6 +2964,47 @@ snapshots:
|
||||
|
||||
csstype@3.2.3: {}
|
||||
|
||||
d3-color@3.1.0: {}
|
||||
|
||||
d3-dispatch@3.0.1: {}
|
||||
|
||||
d3-drag@3.0.0:
|
||||
dependencies:
|
||||
d3-dispatch: 3.0.1
|
||||
d3-selection: 3.0.0
|
||||
|
||||
d3-ease@3.0.1: {}
|
||||
|
||||
d3-interpolate@3.0.1:
|
||||
dependencies:
|
||||
d3-color: 3.1.0
|
||||
|
||||
d3-selection@3.0.0: {}
|
||||
|
||||
d3-timer@3.0.1: {}
|
||||
|
||||
d3-transition@3.0.1(d3-selection@3.0.0):
|
||||
dependencies:
|
||||
d3-color: 3.1.0
|
||||
d3-dispatch: 3.0.1
|
||||
d3-ease: 3.0.1
|
||||
d3-interpolate: 3.0.1
|
||||
d3-selection: 3.0.0
|
||||
d3-timer: 3.0.1
|
||||
|
||||
d3-zoom@3.0.0:
|
||||
dependencies:
|
||||
d3-dispatch: 3.0.1
|
||||
d3-drag: 3.0.0
|
||||
d3-interpolate: 3.0.1
|
||||
d3-selection: 3.0.0
|
||||
d3-transition: 3.0.1(d3-selection@3.0.0)
|
||||
|
||||
dagre@0.8.5:
|
||||
dependencies:
|
||||
graphlib: 2.1.8
|
||||
lodash: 4.17.23
|
||||
|
||||
data-urls@5.0.0:
|
||||
dependencies:
|
||||
whatwg-mimetype: 4.0.0
|
||||
@@ -2995,6 +3200,10 @@ snapshots:
|
||||
|
||||
graceful-fs@4.2.11: {}
|
||||
|
||||
graphlib@2.1.8:
|
||||
dependencies:
|
||||
lodash: 4.17.23
|
||||
|
||||
has-flag@4.0.0: {}
|
||||
|
||||
has-symbols@1.1.0: {}
|
||||
@@ -3573,6 +3782,13 @@ snapshots:
|
||||
|
||||
yallist@3.1.1: {}
|
||||
|
||||
zustand@4.5.7(@types/react@19.2.14)(react@19.2.4):
|
||||
dependencies:
|
||||
use-sync-external-store: 1.6.0(react@19.2.4)
|
||||
optionalDependencies:
|
||||
'@types/react': 19.2.14
|
||||
react: 19.2.4
|
||||
|
||||
zustand@5.0.11(@types/react@19.2.14)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)):
|
||||
optionalDependencies:
|
||||
'@types/react': 19.2.14
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
//! Phase 2 of Intelligence Layer Migration.
|
||||
//! Reference: ZCLAW_AGENT_INTELLIGENCE_EVOLUTION.md §6.4.1
|
||||
|
||||
use chrono::{DateTime, Local, Timelike};
|
||||
use chrono::{Local, Timelike};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
@@ -342,6 +342,10 @@ static CORRECTION_COUNTERS: OnceLock<RwLock<StdHashMap<String, usize>>> = OnceLo
|
||||
/// Key: agent_id, Value: (task_count, total_memories, storage_bytes)
|
||||
static MEMORY_STATS_CACHE: OnceLock<RwLock<StdHashMap<String, MemoryStatsCache>>> = OnceLock::new();
|
||||
|
||||
/// Global last interaction timestamps
|
||||
/// Key: agent_id, Value: last interaction timestamp (RFC3339)
|
||||
static LAST_INTERACTION: OnceLock<RwLock<StdHashMap<String, String>>> = OnceLock::new();
|
||||
|
||||
/// Cached memory stats for an agent
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct MemoryStatsCache {
|
||||
@@ -359,6 +363,18 @@ fn get_memory_stats_cache() -> &'static RwLock<StdHashMap<String, MemoryStatsCac
|
||||
MEMORY_STATS_CACHE.get_or_init(|| RwLock::new(StdHashMap::new()))
|
||||
}
|
||||
|
||||
fn get_last_interaction_map() -> &'static RwLock<StdHashMap<String, String>> {
|
||||
LAST_INTERACTION.get_or_init(|| RwLock::new(StdHashMap::new()))
|
||||
}
|
||||
|
||||
/// Record an interaction for an agent (call from frontend when user sends message)
|
||||
pub fn record_interaction(agent_id: &str) {
|
||||
let map = get_last_interaction_map();
|
||||
if let Ok(mut map) = map.write() {
|
||||
map.insert(agent_id.to_string(), chrono::Utc::now().to_rfc3339());
|
||||
}
|
||||
}
|
||||
|
||||
/// Update memory stats cache for an agent
|
||||
/// Call this from frontend via Tauri command after fetching memory stats
|
||||
pub fn update_memory_stats_cache(agent_id: &str, task_count: usize, total_entries: usize, storage_size_bytes: usize) {
|
||||
@@ -433,10 +449,10 @@ fn check_correction_patterns(agent_id: &str) -> Vec<HeartbeatAlert> {
|
||||
/// Check for pending task memories
|
||||
/// Uses cached memory stats to detect task backlog
|
||||
fn check_pending_tasks(agent_id: &str) -> Option<HeartbeatAlert> {
|
||||
if let Some(stats) = get_cached_memory_stats(agent_id) {
|
||||
// Alert if there are 5+ pending tasks
|
||||
if stats.task_count >= 5 {
|
||||
return Some(HeartbeatAlert {
|
||||
match get_cached_memory_stats(agent_id) {
|
||||
Some(stats) if stats.task_count >= 5 => {
|
||||
// Alert if there are 5+ pending tasks
|
||||
Some(HeartbeatAlert {
|
||||
title: "待办任务积压".to_string(),
|
||||
content: format!("当前有 {} 个待办任务未完成,建议处理或重新评估优先级", stats.task_count),
|
||||
urgency: if stats.task_count >= 10 {
|
||||
@@ -446,51 +462,102 @@ fn check_pending_tasks(agent_id: &str) -> Option<HeartbeatAlert> {
|
||||
},
|
||||
source: "pending-tasks".to_string(),
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
});
|
||||
})
|
||||
},
|
||||
Some(_) => None, // Stats available but no alert needed
|
||||
None => {
|
||||
// Cache is empty - warn about missing sync
|
||||
tracing::warn!("[Heartbeat] Memory stats cache is empty for agent {}, waiting for frontend sync", agent_id);
|
||||
Some(HeartbeatAlert {
|
||||
title: "记忆统计未同步".to_string(),
|
||||
content: "心跳引擎未能获取记忆统计信息,部分检查被跳过。请确保记忆系统正常运行。".to_string(),
|
||||
urgency: Urgency::Low,
|
||||
source: "pending-tasks".to_string(),
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
})
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Check memory storage health
|
||||
/// Uses cached memory stats to detect storage issues
|
||||
fn check_memory_health(agent_id: &str) -> Option<HeartbeatAlert> {
|
||||
if let Some(stats) = get_cached_memory_stats(agent_id) {
|
||||
// Alert if storage is very large (> 50MB)
|
||||
if stats.storage_size_bytes > 50 * 1024 * 1024 {
|
||||
return Some(HeartbeatAlert {
|
||||
title: "记忆存储过大".to_string(),
|
||||
content: format!(
|
||||
"记忆存储已达 {:.1}MB,建议清理低重要性记忆或归档旧记忆",
|
||||
stats.storage_size_bytes as f64 / (1024.0 * 1024.0)
|
||||
),
|
||||
urgency: Urgency::Medium,
|
||||
source: "memory-health".to_string(),
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
});
|
||||
}
|
||||
match get_cached_memory_stats(agent_id) {
|
||||
Some(stats) => {
|
||||
// Alert if storage is very large (> 50MB)
|
||||
if stats.storage_size_bytes > 50 * 1024 * 1024 {
|
||||
return Some(HeartbeatAlert {
|
||||
title: "记忆存储过大".to_string(),
|
||||
content: format!(
|
||||
"记忆存储已达 {:.1}MB,建议清理低重要性记忆或归档旧记忆",
|
||||
stats.storage_size_bytes as f64 / (1024.0 * 1024.0)
|
||||
),
|
||||
urgency: Urgency::Medium,
|
||||
source: "memory-health".to_string(),
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
});
|
||||
}
|
||||
|
||||
// Alert if too many memories (> 1000)
|
||||
if stats.total_entries > 1000 {
|
||||
return Some(HeartbeatAlert {
|
||||
title: "记忆条目过多".to_string(),
|
||||
content: format!(
|
||||
"当前有 {} 条记忆,可能影响检索效率,建议清理或归档",
|
||||
stats.total_entries
|
||||
),
|
||||
urgency: Urgency::Low,
|
||||
source: "memory-health".to_string(),
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
});
|
||||
// Alert if too many memories (> 1000)
|
||||
if stats.total_entries > 1000 {
|
||||
return Some(HeartbeatAlert {
|
||||
title: "记忆条目过多".to_string(),
|
||||
content: format!(
|
||||
"当前有 {} 条记忆,可能影响检索效率,建议清理或归档",
|
||||
stats.total_entries
|
||||
),
|
||||
urgency: Urgency::Low,
|
||||
source: "memory-health".to_string(),
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
});
|
||||
}
|
||||
None
|
||||
},
|
||||
None => {
|
||||
// Cache is empty - skip check (already reported in check_pending_tasks)
|
||||
None
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Check if user has been idle (placeholder)
|
||||
fn check_idle_greeting(_agent_id: &str) -> Option<HeartbeatAlert> {
|
||||
// In full implementation, this would check last interaction time
|
||||
None
|
||||
/// Check if user has been idle and might benefit from a greeting
|
||||
fn check_idle_greeting(agent_id: &str) -> Option<HeartbeatAlert> {
|
||||
let map = get_last_interaction_map();
|
||||
|
||||
// Try to get the last interaction time
|
||||
let last_interaction = {
|
||||
let read_result = map.read();
|
||||
match read_result {
|
||||
Ok(map) => map.get(agent_id).cloned(),
|
||||
Err(_) => return None, // Skip if lock fails
|
||||
}
|
||||
};
|
||||
|
||||
// If no interaction recorded yet, skip
|
||||
let last_interaction = last_interaction?;
|
||||
|
||||
// Parse the timestamp and convert to UTC for comparison
|
||||
let last_time = chrono::DateTime::parse_from_rfc3339(&last_interaction)
|
||||
.ok()?
|
||||
.with_timezone(&chrono::Utc);
|
||||
let now = chrono::Utc::now();
|
||||
let idle_hours = (now - last_time).num_hours();
|
||||
|
||||
// Alert if idle for more than 24 hours
|
||||
if idle_hours >= 24 {
|
||||
Some(HeartbeatAlert {
|
||||
title: "用户长时间未互动".to_string(),
|
||||
content: format!(
|
||||
"距离上次互动已过去 {} 小时,可以考虑主动问候或检查用户是否需要帮助",
|
||||
idle_hours
|
||||
),
|
||||
urgency: Urgency::Low,
|
||||
source: "idle-greeting".to_string(),
|
||||
timestamp: now.to_rfc3339(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Check for personality improvement opportunities
|
||||
@@ -665,6 +732,16 @@ pub async fn heartbeat_record_correction(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Record a user interaction for idle greeting detection
|
||||
/// Call this from frontend whenever user sends a message
|
||||
#[tauri::command]
|
||||
pub async fn heartbeat_record_interaction(
|
||||
agent_id: String,
|
||||
) -> Result<(), String> {
|
||||
record_interaction(&agent_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -10,12 +10,12 @@
|
||||
//! Phase 3 of Intelligence Layer Migration.
|
||||
//! Reference: ZCLAW_AGENT_INTELLIGENCE_EVOLUTION.md §6.2.3
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use chrono::Utc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use tracing::{error, info, warn};
|
||||
use tracing::{error, warn};
|
||||
|
||||
// === Types ===
|
||||
|
||||
|
||||
@@ -29,24 +29,10 @@ pub mod reflection;
|
||||
pub mod identity;
|
||||
|
||||
// Re-export main types for convenience
|
||||
pub use heartbeat::{
|
||||
HeartbeatConfig, HeartbeatEngine, HeartbeatEngineState,
|
||||
HeartbeatAlert, HeartbeatResult, HeartbeatStatus,
|
||||
Urgency, NotifyChannel, ProactivityLevel,
|
||||
};
|
||||
pub use compactor::{
|
||||
CompactionConfig, ContextCompactor, CompactableMessage,
|
||||
CompactionResult, CompactionCheck, CompactionUrgency,
|
||||
estimate_tokens, estimate_messages_tokens,
|
||||
};
|
||||
pub use heartbeat::HeartbeatEngineState;
|
||||
pub use reflection::{
|
||||
ReflectionConfig, ReflectionEngine, ReflectionEngineState,
|
||||
ReflectionResult, ReflectionState, ReflectionResult as ReflectionOutput,
|
||||
PatternObservation, ImprovementSuggestion, IdentityChangeProposal as ReflectionIdentityChangeProposal,
|
||||
Sentiment, Priority, MemoryEntryForAnalysis,
|
||||
ReflectionEngine, ReflectionEngineState,
|
||||
};
|
||||
pub use identity::{
|
||||
AgentIdentityManager, IdentityManagerState,
|
||||
IdentityFiles, IdentityChangeProposal, IdentitySnapshot,
|
||||
IdentityFile, ProposalStatus,
|
||||
};
|
||||
|
||||
@@ -174,6 +174,13 @@ pub async fn kernel_init(
|
||||
zclaw_kernel::config::KernelConfig::default()
|
||||
};
|
||||
|
||||
// Debug: print skills directory
|
||||
if let Some(ref skills_dir) = config.skills_dir {
|
||||
println!("[kernel_init] Skills directory: {} (exists: {})", skills_dir.display(), skills_dir.exists());
|
||||
} else {
|
||||
println!("[kernel_init] No skills directory configured");
|
||||
}
|
||||
|
||||
let base_url = config.llm.base_url.clone();
|
||||
let model = config.llm.model.clone();
|
||||
|
||||
@@ -353,6 +360,8 @@ pub enum StreamChatEvent {
|
||||
ToolStart { name: String, input: serde_json::Value },
|
||||
/// Tool use completed
|
||||
ToolEnd { name: String, output: serde_json::Value },
|
||||
/// New iteration started (multi-turn tool calling)
|
||||
IterationStart { iteration: usize, max_iterations: usize },
|
||||
/// Stream completed
|
||||
Complete { input_tokens: u32, output_tokens: u32 },
|
||||
/// Error occurred
|
||||
@@ -406,24 +415,38 @@ pub async fn agent_chat_stream(
|
||||
tokio::spawn(async move {
|
||||
use zclaw_runtime::LoopEvent;
|
||||
|
||||
println!("[agent_chat_stream] Starting to process stream events for session: {}", session_id);
|
||||
|
||||
while let Some(event) = rx.recv().await {
|
||||
println!("[agent_chat_stream] Received event: {:?}", event);
|
||||
|
||||
let stream_event = match event {
|
||||
LoopEvent::Delta(delta) => {
|
||||
println!("[agent_chat_stream] Delta: {} bytes", delta.len());
|
||||
StreamChatEvent::Delta { delta }
|
||||
}
|
||||
LoopEvent::ToolStart { name, input } => {
|
||||
println!("[agent_chat_stream] ToolStart: {} input={:?}", name, input);
|
||||
StreamChatEvent::ToolStart { name, input }
|
||||
}
|
||||
LoopEvent::ToolEnd { name, output } => {
|
||||
println!("[agent_chat_stream] ToolEnd: {} output={:?}", name, output);
|
||||
StreamChatEvent::ToolEnd { name, output }
|
||||
}
|
||||
LoopEvent::IterationStart { iteration, max_iterations } => {
|
||||
println!("[agent_chat_stream] IterationStart: {}/{}", iteration, max_iterations);
|
||||
StreamChatEvent::IterationStart { iteration, max_iterations }
|
||||
}
|
||||
LoopEvent::Complete(result) => {
|
||||
println!("[agent_chat_stream] Complete: input_tokens={}, output_tokens={}",
|
||||
result.input_tokens, result.output_tokens);
|
||||
StreamChatEvent::Complete {
|
||||
input_tokens: result.input_tokens,
|
||||
output_tokens: result.output_tokens,
|
||||
}
|
||||
}
|
||||
LoopEvent::Error(message) => {
|
||||
println!("[agent_chat_stream] Error: {}", message);
|
||||
StreamChatEvent::Error { message }
|
||||
}
|
||||
};
|
||||
@@ -434,6 +457,8 @@ pub async fn agent_chat_stream(
|
||||
"event": stream_event
|
||||
}));
|
||||
}
|
||||
|
||||
println!("[agent_chat_stream] Stream ended for session: {}", session_id);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -460,6 +485,8 @@ pub struct SkillInfoResponse {
|
||||
pub tags: Vec<String>,
|
||||
pub mode: String,
|
||||
pub enabled: bool,
|
||||
pub triggers: Vec<String>,
|
||||
pub category: Option<String>,
|
||||
}
|
||||
|
||||
impl From<zclaw_skills::SkillManifest> for SkillInfoResponse {
|
||||
@@ -473,6 +500,8 @@ impl From<zclaw_skills::SkillManifest> for SkillInfoResponse {
|
||||
tags: manifest.tags,
|
||||
mode: format!("{:?}", manifest.mode),
|
||||
enabled: manifest.enabled,
|
||||
triggers: manifest.triggers,
|
||||
category: manifest.category,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -491,6 +520,10 @@ pub async fn skill_list(
|
||||
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
|
||||
|
||||
let skills = kernel.list_skills().await;
|
||||
println!("[skill_list] Found {} skills", skills.len());
|
||||
for skill in &skills {
|
||||
println!("[skill_list] - {} ({})", skill.name, skill.id);
|
||||
}
|
||||
Ok(skills.into_iter().map(SkillInfoResponse::from).collect())
|
||||
}
|
||||
|
||||
@@ -603,22 +636,67 @@ pub struct HandInfoResponse {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub status: String,
|
||||
pub requirements_met: bool,
|
||||
pub needs_approval: bool,
|
||||
pub dependencies: Vec<String>,
|
||||
pub tags: Vec<String>,
|
||||
pub enabled: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub category: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub icon: Option<String>,
|
||||
#[serde(default)]
|
||||
pub tool_count: u32,
|
||||
#[serde(default)]
|
||||
pub metric_count: u32,
|
||||
}
|
||||
|
||||
impl From<zclaw_hands::HandConfig> for HandInfoResponse {
|
||||
fn from(config: zclaw_hands::HandConfig) -> Self {
|
||||
// Determine status based on enabled and dependencies
|
||||
let status = if !config.enabled {
|
||||
"unavailable".to_string()
|
||||
} else if config.needs_approval {
|
||||
"needs_approval".to_string()
|
||||
} else {
|
||||
"idle".to_string()
|
||||
};
|
||||
|
||||
// Extract category from tags if present
|
||||
let category = config.tags.iter().find(|t| {
|
||||
["research", "automation", "browser", "data", "media", "communication"].contains(&t.as_str())
|
||||
}).cloned();
|
||||
|
||||
// Map tags to icon
|
||||
let icon = if config.tags.contains(&"browser".to_string()) {
|
||||
Some("globe".to_string())
|
||||
} else if config.tags.contains(&"research".to_string()) {
|
||||
Some("search".to_string())
|
||||
} else if config.tags.contains(&"media".to_string()) {
|
||||
Some("video".to_string())
|
||||
} else if config.tags.contains(&"data".to_string()) {
|
||||
Some("database".to_string())
|
||||
} else if config.tags.contains(&"communication".to_string()) {
|
||||
Some("message-circle".to_string())
|
||||
} else {
|
||||
Some("zap".to_string())
|
||||
};
|
||||
|
||||
Self {
|
||||
id: config.id,
|
||||
name: config.name,
|
||||
description: config.description,
|
||||
status,
|
||||
requirements_met: config.enabled && config.dependencies.is_empty(),
|
||||
needs_approval: config.needs_approval,
|
||||
dependencies: config.dependencies,
|
||||
tags: config.tags,
|
||||
enabled: config.enabled,
|
||||
category,
|
||||
icon,
|
||||
tool_count: 0,
|
||||
metric_count: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,13 +13,7 @@ pub mod persistent;
|
||||
pub mod crypto;
|
||||
|
||||
// Re-export main types for convenience
|
||||
pub use extractor::{SessionExtractor, ExtractedMemory, ExtractionConfig};
|
||||
pub use context_builder::{ContextBuilder, EnhancedContext, ContextLevel};
|
||||
pub use persistent::{
|
||||
PersistentMemory, PersistentMemoryStore, MemorySearchQuery, MemoryStats,
|
||||
generate_memory_id,
|
||||
};
|
||||
pub use crypto::{
|
||||
CryptoError, KEY_SIZE, MEMORY_ENCRYPTION_KEY_NAME,
|
||||
derive_key, generate_key, encrypt, decrypt,
|
||||
};
|
||||
|
||||
@@ -15,7 +15,7 @@ use tokio::sync::Mutex;
|
||||
use uuid::Uuid;
|
||||
use tauri::Manager;
|
||||
use sqlx::{SqliteConnection, Connection, Row, sqlite::SqliteRow};
|
||||
use chrono::{DateTime, Utc};
|
||||
use chrono::Utc;
|
||||
|
||||
/// Memory entry stored in SQLite
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
use crate::memory::{PersistentMemory, PersistentMemoryStore, MemorySearchQuery, MemoryStats, generate_memory_id};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Manager, State};
|
||||
use tauri::{AppHandle, State};
|
||||
use tokio::sync::Mutex;
|
||||
use chrono::Utc;
|
||||
|
||||
|
||||
@@ -211,6 +211,28 @@ function App() {
|
||||
|
||||
await intelligenceClient.heartbeat.start(defaultAgentId);
|
||||
console.log('[App] Heartbeat engine started for self-evolution');
|
||||
|
||||
// Set up periodic memory stats sync (every 5 minutes)
|
||||
const MEMORY_STATS_SYNC_INTERVAL = 5 * 60 * 1000;
|
||||
const statsSyncInterval = setInterval(async () => {
|
||||
try {
|
||||
const stats = await intelligenceClient.memory.stats();
|
||||
const taskCount = stats.byType?.['task'] || 0;
|
||||
await intelligenceClient.heartbeat.updateMemoryStats(
|
||||
defaultAgentId,
|
||||
taskCount,
|
||||
stats.totalEntries,
|
||||
stats.storageSizeBytes
|
||||
);
|
||||
console.log('[App] Memory stats synced (periodic)');
|
||||
} catch (err) {
|
||||
console.warn('[App] Periodic memory stats sync failed:', err);
|
||||
}
|
||||
}, MEMORY_STATS_SYNC_INTERVAL);
|
||||
|
||||
// Store interval for cleanup
|
||||
// @ts-expect-error - Global cleanup reference
|
||||
window.__ZCLAW_STATS_SYNC_INTERVAL__ = statsSyncInterval;
|
||||
} catch (err) {
|
||||
console.warn('[App] Failed to start heartbeat engine:', err);
|
||||
// Non-critical, continue without heartbeat
|
||||
@@ -229,6 +251,12 @@ function App() {
|
||||
|
||||
return () => {
|
||||
mounted = false;
|
||||
// Clean up periodic stats sync interval
|
||||
// @ts-expect-error - Global cleanup reference
|
||||
if (window.__ZCLAW_STATS_SYNC_INTERVAL__) {
|
||||
// @ts-expect-error - Global cleanup reference
|
||||
clearInterval(window.__ZCLAW_STATS_SYNC_INTERVAL__);
|
||||
}
|
||||
};
|
||||
}, [connect, onboardingNeeded, onboardingLoading]);
|
||||
|
||||
@@ -282,8 +310,41 @@ function App() {
|
||||
return (
|
||||
<AgentOnboardingWizard
|
||||
isOpen={true}
|
||||
onClose={() => {
|
||||
// Skip onboarding and mark as completed with default values
|
||||
onClose={async () => {
|
||||
// Skip onboarding but still create a default agent with default personality
|
||||
try {
|
||||
const { getGatewayClient } = await import('./lib/gateway-client');
|
||||
const client = getGatewayClient();
|
||||
if (client) {
|
||||
// Create default agent with versatile assistant personality
|
||||
const defaultAgent = await client.createClone({
|
||||
name: '全能助手',
|
||||
role: '全能型 AI 助手',
|
||||
nickname: '小龙',
|
||||
emoji: '🦞',
|
||||
personality: 'friendly',
|
||||
scenarios: ['coding', 'writing', 'research', 'product', 'data'],
|
||||
userName: 'User',
|
||||
userRole: 'user',
|
||||
communicationStyle: '亲切、耐心、善解人意,用易懂的语言解释复杂概念',
|
||||
});
|
||||
|
||||
if (defaultAgent?.clone) {
|
||||
setCurrentAgent({
|
||||
id: defaultAgent.clone.id,
|
||||
name: defaultAgent.clone.name,
|
||||
icon: defaultAgent.clone.emoji || '🦞',
|
||||
color: 'bg-gradient-to-br from-orange-500 to-red-500',
|
||||
lastMessage: defaultAgent.clone.role || '全能型 AI 助手',
|
||||
time: '',
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn('[App] Failed to create default agent on skip:', err);
|
||||
}
|
||||
|
||||
// Mark onboarding as completed
|
||||
markCompleted({
|
||||
userName: 'User',
|
||||
userRole: 'user',
|
||||
|
||||
@@ -30,6 +30,30 @@ import {
|
||||
import { useChatStore } from '../store/chatStore';
|
||||
import { Button, Badge } from './ui';
|
||||
|
||||
// === Error Parsing Utility ===
|
||||
|
||||
type ProposalOperation = 'approval' | 'rejection' | 'restore';
|
||||
|
||||
function parseProposalError(err: unknown, operation: ProposalOperation): string {
|
||||
const errorMessage = err instanceof Error ? err.message : String(err);
|
||||
|
||||
if (errorMessage.includes('not found') || errorMessage.includes('不存在')) {
|
||||
return '提案不存在或已被处理,请刷新页面';
|
||||
}
|
||||
if (errorMessage.includes('not pending') || errorMessage.includes('已处理')) {
|
||||
return '该提案已被处理,请刷新页面';
|
||||
}
|
||||
if (errorMessage.includes('network') || errorMessage.includes('fetch') || errorMessage.includes('网络')) {
|
||||
return '网络连接失败,请检查网络后重试';
|
||||
}
|
||||
if (errorMessage.includes('timeout') || errorMessage.includes('超时')) {
|
||||
return '操作超时,请重试';
|
||||
}
|
||||
|
||||
const operationName = operation === 'approval' ? '审批' : operation === 'rejection' ? '拒绝' : '恢复';
|
||||
return `${operationName}失败: ${errorMessage}`;
|
||||
}
|
||||
|
||||
// === Diff View Component ===
|
||||
|
||||
function DiffView({
|
||||
@@ -331,8 +355,7 @@ export function IdentityChangeProposalPanel() {
|
||||
setSnapshots(agentSnapshots);
|
||||
} catch (err) {
|
||||
console.error('[IdentityChangeProposal] Failed to approve:', err);
|
||||
const message = err instanceof Error ? err.message : '审批失败,请重试';
|
||||
setError(`审批失败: ${message}`);
|
||||
setError(parseProposalError(err, 'approval'));
|
||||
} finally {
|
||||
setProcessingId(null);
|
||||
}
|
||||
@@ -349,8 +372,7 @@ export function IdentityChangeProposalPanel() {
|
||||
setProposals(pendingProposals);
|
||||
} catch (err) {
|
||||
console.error('[IdentityChangeProposal] Failed to reject:', err);
|
||||
const message = err instanceof Error ? err.message : '拒绝失败,请重试';
|
||||
setError(`拒绝失败: ${message}`);
|
||||
setError(parseProposalError(err, 'rejection'));
|
||||
} finally {
|
||||
setProcessingId(null);
|
||||
}
|
||||
@@ -367,8 +389,7 @@ export function IdentityChangeProposalPanel() {
|
||||
setSnapshots(agentSnapshots);
|
||||
} catch (err) {
|
||||
console.error('[IdentityChangeProposal] Failed to restore:', err);
|
||||
const message = err instanceof Error ? err.message : '恢复失败,请重试';
|
||||
setError(`恢复失败: ${message}`);
|
||||
setError(parseProposalError(err, 'restore'));
|
||||
} finally {
|
||||
setProcessingId(null);
|
||||
}
|
||||
|
||||
@@ -112,7 +112,7 @@ export function RightPanel() {
|
||||
() => clones.find((clone) => clone.id === currentAgent?.id),
|
||||
[clones, currentAgent?.id]
|
||||
);
|
||||
const focusAreas = selectedClone?.scenarios?.length ? selectedClone.scenarios : ['coding', 'research'];
|
||||
const focusAreas = selectedClone?.scenarios?.length ? selectedClone.scenarios : ['coding', 'writing', 'research', 'product', 'data'];
|
||||
const bootstrapFiles = selectedClone?.bootstrapFiles || [];
|
||||
const gatewayUrl = quickConfig.gatewayUrl || getStoredGatewayUrl();
|
||||
|
||||
@@ -172,8 +172,8 @@ export function RightPanel() {
|
||||
const assistantMsgCount = messages.filter(m => m.role === 'assistant').length;
|
||||
const toolCallCount = messages.filter(m => m.role === 'tool').length;
|
||||
const runtimeSummary = connected ? '已连接' : connectionState === 'connecting' ? '连接中...' : connectionState === 'reconnecting' ? '重连中...' : '未连接';
|
||||
const userNameDisplay = selectedClone?.userName || quickConfig.userName || '未设置';
|
||||
const userAddressing = selectedClone?.nickname || selectedClone?.userName || quickConfig.userName || '未设置';
|
||||
const userNameDisplay = selectedClone?.userName || quickConfig.userName || 'User';
|
||||
const userAddressing = selectedClone?.nickname || selectedClone?.userName || quickConfig.userName || 'User';
|
||||
const localTimezone = Intl.DateTimeFormat().resolvedOptions().timeZone || '系统时区';
|
||||
|
||||
// Extract code blocks from all messages (both from codeBlocks property and content parsing)
|
||||
@@ -342,23 +342,27 @@ export function RightPanel() {
|
||||
>
|
||||
<div className="flex items-start justify-between gap-3">
|
||||
<div className="flex items-center gap-3">
|
||||
<div className="w-12 h-12 rounded-full bg-gradient-to-br from-cyan-400 to-blue-500 flex items-center justify-center text-white text-lg font-semibold">
|
||||
<div className="w-12 h-12 rounded-full bg-gradient-to-br from-orange-400 to-red-500 flex items-center justify-center text-white text-lg font-semibold">
|
||||
{selectedClone?.emoji ? (
|
||||
<span className="text-2xl">{selectedClone.emoji}</span>
|
||||
) : (
|
||||
<span>{(selectedClone?.nickname || currentAgent?.name || 'Z').slice(0, 1)}</span>
|
||||
<span>🦞</span>
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
<div className="text-base font-semibold text-gray-900 dark:text-gray-100 flex items-center gap-2">
|
||||
{selectedClone?.name || currentAgent?.name || 'ZCLAW'}
|
||||
{selectedClone?.personality && (
|
||||
{selectedClone?.name || currentAgent?.name || '全能助手'}
|
||||
{selectedClone?.personality ? (
|
||||
<Badge variant="default" className="text-xs ml-1">
|
||||
{getPersonalityById(selectedClone.personality)?.label || selectedClone.personality}
|
||||
</Badge>
|
||||
) : (
|
||||
<Badge variant="default" className="text-xs ml-1">
|
||||
友好亲切
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
<div className="text-sm text-gray-500 dark:text-gray-400">{selectedClone?.role || 'AI coworker'}</div>
|
||||
<div className="text-sm text-gray-500 dark:text-gray-400">{selectedClone?.role || '全能型 AI 助手'}</div>
|
||||
</div>
|
||||
</div>
|
||||
{selectedClone ? (
|
||||
@@ -410,10 +414,10 @@ export function RightPanel() {
|
||||
</div>
|
||||
) : (
|
||||
<div className="space-y-3 text-sm">
|
||||
<AgentRow label="Role" value={selectedClone?.role || '-'} />
|
||||
<AgentRow label="Nickname" value={selectedClone?.nickname || '-'} />
|
||||
<AgentRow label="Role" value={selectedClone?.role || '全能型 AI 助手'} />
|
||||
<AgentRow label="Nickname" value={selectedClone?.nickname || '小龙'} />
|
||||
<AgentRow label="Model" value={selectedClone?.model || currentModel} />
|
||||
<AgentRow label="Emoji" value={selectedClone?.nickname?.slice(0, 1) || '🦞'} />
|
||||
<AgentRow label="Emoji" value={selectedClone?.emoji || '🦞'} />
|
||||
</div>
|
||||
)}
|
||||
</motion.div>
|
||||
|
||||
@@ -25,6 +25,7 @@ import {
|
||||
RefreshCw,
|
||||
} from 'lucide-react';
|
||||
import { useConfigStore } from '../store/configStore';
|
||||
import { useConnectionStore } from '../store/connectionStore';
|
||||
import {
|
||||
adaptSkillsCatalog,
|
||||
type SkillDisplay,
|
||||
@@ -250,6 +251,9 @@ export function SkillMarket({
|
||||
const loadSkillsCatalog = useConfigStore((s) => s.loadSkillsCatalog);
|
||||
const updateSkill = useConfigStore((s) => s.updateSkill);
|
||||
|
||||
// Watch connection state to reload skills when connected
|
||||
const connectionState = useConnectionStore((s) => s.connectionState);
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
const [categoryFilter, setCategoryFilter] = useState<CategoryFilter>('all');
|
||||
const [expandedSkillId, setExpandedSkillId] = useState<string | null>(null);
|
||||
@@ -258,10 +262,12 @@ export function SkillMarket({
|
||||
// Adapt skills to display format
|
||||
const skills = useMemo(() => adaptSkillsCatalog(skillsCatalog), [skillsCatalog]);
|
||||
|
||||
// Load skills on mount
|
||||
// Load skills on mount and when connection state changes to 'connected'
|
||||
useEffect(() => {
|
||||
loadSkillsCatalog();
|
||||
}, [loadSkillsCatalog]);
|
||||
if (connectionState === 'connected') {
|
||||
loadSkillsCatalog();
|
||||
}
|
||||
}, [loadSkillsCatalog, connectionState]);
|
||||
|
||||
// Filter skills
|
||||
const filteredSkills = useMemo(() => {
|
||||
|
||||
92
desktop/src/components/WorkflowBuilder/NodePalette.tsx
Normal file
92
desktop/src/components/WorkflowBuilder/NodePalette.tsx
Normal file
@@ -0,0 +1,92 @@
|
||||
/**
|
||||
* Node Palette Component
|
||||
*
|
||||
* Draggable palette of available node types.
|
||||
*/
|
||||
|
||||
import React, { DragEvent } from 'react';
|
||||
import type { NodePaletteItem, NodeCategory } from '../../lib/workflow-builder/types';
|
||||
|
||||
interface NodePaletteProps {
|
||||
categories: Record<NodeCategory, NodePaletteItem[]>;
|
||||
onDragStart: (type: string) => void;
|
||||
onDragEnd: () => void;
|
||||
}
|
||||
|
||||
const categoryLabels: Record<NodeCategory, { label: string; color: string }> = {
|
||||
input: { label: 'Input', color: 'emerald' },
|
||||
ai: { label: 'AI & Skills', color: 'violet' },
|
||||
action: { label: 'Actions', color: 'amber' },
|
||||
control: { label: 'Control Flow', color: 'orange' },
|
||||
output: { label: 'Output', color: 'blue' },
|
||||
};
|
||||
|
||||
export function NodePalette({ categories, onDragStart, onDragEnd }: NodePaletteProps) {
|
||||
const handleDragStart = (event: DragEvent, type: string) => {
|
||||
event.dataTransfer.setData('application/reactflow', type);
|
||||
event.dataTransfer.effectAllowed = 'move';
|
||||
onDragStart(type);
|
||||
};
|
||||
|
||||
const handleDragEnd = () => {
|
||||
onDragEnd();
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="w-64 bg-white border-r border-gray-200 overflow-y-auto">
|
||||
<div className="p-4 border-b border-gray-200">
|
||||
<h2 className="font-semibold text-gray-800">Nodes</h2>
|
||||
<p className="text-sm text-gray-500">Drag nodes to canvas</p>
|
||||
</div>
|
||||
|
||||
<div className="p-2">
|
||||
{(Object.keys(categories) as NodeCategory[]).map((category) => {
|
||||
const items = categories[category];
|
||||
if (items.length === 0) return null;
|
||||
|
||||
const { label, color } = categoryLabels[category];
|
||||
|
||||
return (
|
||||
<div key={category} className="mb-4">
|
||||
<h3
|
||||
className={`text-sm font-medium text-${color}-700 mb-2 px-2`}
|
||||
>
|
||||
{label}
|
||||
</h3>
|
||||
|
||||
<div className="space-y-1">
|
||||
{items.map((item) => (
|
||||
<div
|
||||
key={item.type}
|
||||
draggable
|
||||
onDragStart={(e) => handleDragStart(e, item.type)}
|
||||
onDragEnd={handleDragEnd}
|
||||
className={`
|
||||
flex items-center gap-3 px-3 py-2 rounded-lg
|
||||
bg-gray-50 hover:bg-gray-100 cursor-grab
|
||||
border border-transparent hover:border-gray-200
|
||||
transition-all duration-150
|
||||
active:cursor-grabbing
|
||||
`}
|
||||
>
|
||||
<span className="text-lg">{item.icon}</span>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="font-medium text-gray-700 text-sm">
|
||||
{item.label}
|
||||
</div>
|
||||
<div className="text-xs text-gray-500 truncate">
|
||||
{item.description}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default NodePalette;
|
||||
295
desktop/src/components/WorkflowBuilder/PropertyPanel.tsx
Normal file
295
desktop/src/components/WorkflowBuilder/PropertyPanel.tsx
Normal file
@@ -0,0 +1,295 @@
|
||||
/**
|
||||
* Property Panel Component
|
||||
*
|
||||
* Panel for editing node properties.
|
||||
*/
|
||||
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import type { WorkflowNodeData } from '../../lib/workflow-builder/types';
|
||||
|
||||
interface PropertyPanelProps {
|
||||
nodeId: string;
|
||||
nodeData: WorkflowNodeData | undefined;
|
||||
onUpdate: (data: Partial<WorkflowNodeData>) => void;
|
||||
onDelete: () => void;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
export function PropertyPanel({
|
||||
nodeId,
|
||||
nodeData,
|
||||
onUpdate,
|
||||
onDelete,
|
||||
onClose,
|
||||
}: PropertyPanelProps) {
|
||||
const [localData, setLocalData] = useState<Partial<WorkflowNodeData>>({});
|
||||
|
||||
useEffect(() => {
|
||||
if (nodeData) {
|
||||
setLocalData(nodeData);
|
||||
}
|
||||
}, [nodeData]);
|
||||
|
||||
if (!nodeData) return null;
|
||||
|
||||
const handleChange = (field: string, value: unknown) => {
|
||||
const updated = { ...localData, [field]: value };
|
||||
setLocalData(updated);
|
||||
onUpdate({ [field]: value } as Partial<WorkflowNodeData>);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="w-80 bg-white border-l border-gray-200 overflow-y-auto">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between px-4 py-3 border-b border-gray-200">
|
||||
<h2 className="font-semibold text-gray-800">Properties</h2>
|
||||
<button
|
||||
onClick={onClose}
|
||||
className="text-gray-400 hover:text-gray-600"
|
||||
>
|
||||
✕
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Content */}
|
||||
<div className="p-4 space-y-4">
|
||||
{/* Common Fields */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Label
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={localData.label || ''}
|
||||
onChange={(e) => handleChange('label', e.target.value)}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Type-specific Fields */}
|
||||
{renderTypeSpecificFields(nodeData.type, localData, handleChange)}
|
||||
|
||||
{/* Delete Button */}
|
||||
<div className="pt-4 border-t border-gray-200">
|
||||
<button
|
||||
onClick={onDelete}
|
||||
className="w-full px-4 py-2 text-red-600 bg-red-50 border border-red-200 rounded-lg hover:bg-red-100"
|
||||
>
|
||||
Delete Node
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function renderTypeSpecificFields(
|
||||
type: string,
|
||||
data: Partial<WorkflowNodeData>,
|
||||
onChange: (field: string, value: unknown) => void
|
||||
) {
|
||||
switch (type) {
|
||||
case 'input':
|
||||
return (
|
||||
<>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Variable Name
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={(data as any).variableName || ''}
|
||||
onChange={(e) => onChange('variableName', e.target.value)}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg font-mono"
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Default Value
|
||||
</label>
|
||||
<textarea
|
||||
value={(data as any).defaultValue || ''}
|
||||
onChange={(e) => {
|
||||
try {
|
||||
const parsed = JSON.parse(e.target.value);
|
||||
onChange('defaultValue', parsed);
|
||||
} catch {
|
||||
onChange('defaultValue', e.target.value);
|
||||
}
|
||||
}}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg font-mono text-sm"
|
||||
rows={3}
|
||||
placeholder="JSON or string value"
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
||||
case 'llm':
|
||||
return (
|
||||
<>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Template
|
||||
</label>
|
||||
<textarea
|
||||
value={(data as any).template || ''}
|
||||
onChange={(e) => onChange('template', e.target.value)}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg font-mono text-sm"
|
||||
rows={6}
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Model Override
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={(data as any).model || ''}
|
||||
onChange={(e) => onChange('model', e.target.value)}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg"
|
||||
placeholder="e.g., gpt-4"
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Temperature
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
min="0"
|
||||
max="2"
|
||||
step="0.1"
|
||||
value={(data as any).temperature ?? ''}
|
||||
onChange={(e) => onChange('temperature', parseFloat(e.target.value))}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={(data as any).jsonMode || false}
|
||||
onChange={(e) => onChange('jsonMode', e.target.checked)}
|
||||
className="w-4 h-4 text-blue-600 rounded"
|
||||
/>
|
||||
<label className="text-sm text-gray-700">JSON Mode</label>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
||||
case 'skill':
|
||||
return (
|
||||
<>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Skill ID
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={(data as any).skillId || ''}
|
||||
onChange={(e) => onChange('skillId', e.target.value)}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg font-mono"
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Input Mappings (JSON)
|
||||
</label>
|
||||
<textarea
|
||||
value={JSON.stringify((data as any).inputMappings || {}, null, 2)}
|
||||
onChange={(e) => {
|
||||
try {
|
||||
const parsed = JSON.parse(e.target.value);
|
||||
onChange('inputMappings', parsed);
|
||||
} catch {
|
||||
// Invalid JSON, ignore
|
||||
}
|
||||
}}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg font-mono text-sm"
|
||||
rows={4}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
||||
case 'hand':
|
||||
return (
|
||||
<>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Hand ID
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={(data as any).handId || ''}
|
||||
onChange={(e) => onChange('handId', e.target.value)}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg font-mono"
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Action
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={(data as any).action || ''}
|
||||
onChange={(e) => onChange('action', e.target.value)}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg"
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
||||
case 'export':
|
||||
return (
|
||||
<>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Formats
|
||||
</label>
|
||||
<div className="space-y-2">
|
||||
{['json', 'markdown', 'html', 'pptx', 'pdf'].map((format) => (
|
||||
<label key={format} className="flex items-center gap-2">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={((data as any).formats || []).includes(format)}
|
||||
onChange={(e) => {
|
||||
const formats = (data as any).formats || [];
|
||||
if (e.target.checked) {
|
||||
onChange('formats', [...formats, format]);
|
||||
} else {
|
||||
onChange('formats', formats.filter((f: string) => f !== format));
|
||||
}
|
||||
}}
|
||||
className="w-4 h-4 text-blue-600 rounded"
|
||||
/>
|
||||
<span className="text-sm text-gray-700 capitalize">{format}</span>
|
||||
</label>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Output Directory
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={(data as any).outputDir || ''}
|
||||
onChange={(e) => onChange('outputDir', e.target.value)}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg"
|
||||
placeholder="./output"
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
||||
default:
|
||||
return (
|
||||
<div className="text-sm text-gray-500 italic">
|
||||
No additional properties for this node type.
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default PropertyPanel;
|
||||
324
desktop/src/components/WorkflowBuilder/WorkflowBuilder.tsx
Normal file
324
desktop/src/components/WorkflowBuilder/WorkflowBuilder.tsx
Normal file
@@ -0,0 +1,324 @@
|
||||
/**
|
||||
* Workflow Builder Component
|
||||
*
|
||||
* Visual workflow editor using React Flow for creating and editing
|
||||
* Pipeline DSL configurations.
|
||||
*/
|
||||
|
||||
import React, { useCallback, useRef, useEffect } from 'react';
|
||||
import {
|
||||
ReactFlow,
|
||||
Controls,
|
||||
Background,
|
||||
MiniMap,
|
||||
BackgroundVariant,
|
||||
Connection,
|
||||
addEdge,
|
||||
useNodesState,
|
||||
useEdgesState,
|
||||
Node,
|
||||
Edge,
|
||||
NodeTypes,
|
||||
Panel,
|
||||
ReactFlowProvider,
|
||||
useReactFlow,
|
||||
} from '@xyflow/react';
|
||||
import '@xyflow/react/dist/style.css';
|
||||
|
||||
import { useWorkflowBuilderStore, nodePaletteItems, paletteCategories } from '../../store/workflowBuilderStore';
|
||||
import type { WorkflowNodeType, WorkflowNodeData } from '../../lib/workflow-builder/types';
|
||||
import { validateCanvas } from '../../lib/workflow-builder/yaml-converter';
|
||||
|
||||
// Import custom node components
|
||||
import { InputNode } from './nodes/InputNode';
|
||||
import { LlmNode } from './nodes/LlmNode';
|
||||
import { SkillNode } from './nodes/SkillNode';
|
||||
import { HandNode } from './nodes/HandNode';
|
||||
import { ConditionNode } from './nodes/ConditionNode';
|
||||
import { ParallelNode } from './nodes/ParallelNode';
|
||||
import { ExportNode } from './nodes/ExportNode';
|
||||
import { HttpNode } from './nodes/HttpNode';
|
||||
import { OrchestrationNode } from './nodes/OrchestrationNode';
|
||||
|
||||
import { NodePalette } from './NodePalette';
|
||||
import { PropertyPanel } from './PropertyPanel';
|
||||
import { WorkflowToolbar } from './WorkflowToolbar';
|
||||
|
||||
// =============================================================================
|
||||
// Node Types Configuration
|
||||
// =============================================================================
|
||||
|
||||
const nodeTypes: NodeTypes = {
|
||||
input: InputNode,
|
||||
llm: LlmNode,
|
||||
skill: SkillNode,
|
||||
hand: HandNode,
|
||||
condition: ConditionNode,
|
||||
parallel: ParallelNode,
|
||||
export: ExportNode,
|
||||
http: HttpNode,
|
||||
orchestration: OrchestrationNode,
|
||||
};
|
||||
|
||||
// =============================================================================
|
||||
// Main Component
|
||||
// =============================================================================
|
||||
|
||||
export function WorkflowBuilderInternal() {
|
||||
const reactFlowWrapper = useRef<HTMLDivElement>(null);
|
||||
const { screenToFlowPosition, fitView } = useReactFlow();
|
||||
|
||||
const {
|
||||
canvas,
|
||||
isDirty,
|
||||
selectedNodeId,
|
||||
validation,
|
||||
addNode,
|
||||
updateNode,
|
||||
deleteNode,
|
||||
addEdge: addStoreEdge,
|
||||
selectNode,
|
||||
saveWorkflow,
|
||||
validate,
|
||||
setDragging,
|
||||
} = useWorkflowBuilderStore();
|
||||
|
||||
// Local state for React Flow
|
||||
const [nodes, setNodes, onNodesChange] = useNodesState([]);
|
||||
const [edges, setEdges, onEdgesChange] = useEdgesState([]);
|
||||
|
||||
// Sync canvas state with React Flow
|
||||
useEffect(() => {
|
||||
if (canvas) {
|
||||
setNodes(canvas.nodes.map(n => ({
|
||||
id: n.id,
|
||||
type: n.type,
|
||||
position: n.position,
|
||||
data: n.data,
|
||||
})));
|
||||
setEdges(canvas.edges.map(e => ({
|
||||
id: e.id,
|
||||
source: e.source,
|
||||
target: e.target,
|
||||
type: e.type || 'default',
|
||||
animated: true,
|
||||
})));
|
||||
} else {
|
||||
setNodes([]);
|
||||
setEdges([]);
|
||||
}
|
||||
}, [canvas?.id]);
|
||||
|
||||
// Handle node changes (position, selection)
|
||||
const handleNodesChange = useCallback(
|
||||
(changes) => {
|
||||
onNodesChange(changes);
|
||||
|
||||
// Sync position changes back to store
|
||||
for (const change of changes) {
|
||||
if (change.type === 'position' && change.position) {
|
||||
const node = nodes.find(n => n.id === change.id);
|
||||
if (node) {
|
||||
// Position updates are handled by React Flow internally
|
||||
}
|
||||
}
|
||||
if (change.type === 'select') {
|
||||
selectNode(change.selected ? change.id : null);
|
||||
}
|
||||
}
|
||||
},
|
||||
[onNodesChange, nodes, selectNode]
|
||||
);
|
||||
|
||||
// Handle edge changes
|
||||
const handleEdgesChange = useCallback(
|
||||
(changes) => {
|
||||
onEdgesChange(changes);
|
||||
},
|
||||
[onEdgesChange]
|
||||
);
|
||||
|
||||
// Handle new connections
|
||||
const onConnect = useCallback(
|
||||
(connection: Connection) => {
|
||||
if (connection.source && connection.target) {
|
||||
addStoreEdge(connection.source, connection.target);
|
||||
setEdges((eds) =>
|
||||
addEdge(
|
||||
{
|
||||
...connection,
|
||||
type: 'default',
|
||||
animated: true,
|
||||
},
|
||||
eds
|
||||
)
|
||||
);
|
||||
}
|
||||
},
|
||||
[addStoreEdge, setEdges]
|
||||
);
|
||||
|
||||
// Handle node click
|
||||
const onNodeClick = useCallback(
|
||||
(_event: React.MouseEvent, node: Node) => {
|
||||
selectNode(node.id);
|
||||
},
|
||||
[selectNode]
|
||||
);
|
||||
|
||||
// Handle pane click (deselect)
|
||||
const onPaneClick = useCallback(() => {
|
||||
selectNode(null);
|
||||
}, [selectNode]);
|
||||
|
||||
// Handle drag over for palette items
|
||||
const onDragOver = useCallback((event: React.DragEvent) => {
|
||||
event.preventDefault();
|
||||
event.dataTransfer.dropEffect = 'move';
|
||||
}, []);
|
||||
|
||||
// Handle drop from palette
|
||||
const onDrop = useCallback(
|
||||
(event: React.DragEvent) => {
|
||||
event.preventDefault();
|
||||
|
||||
const type = event.dataTransfer.getData('application/reactflow') as WorkflowNodeType;
|
||||
if (!type) return;
|
||||
|
||||
const position = screenToFlowPosition({
|
||||
x: event.clientX,
|
||||
y: event.clientY,
|
||||
});
|
||||
|
||||
addNode(type, position);
|
||||
},
|
||||
[screenToFlowPosition, addNode]
|
||||
);
|
||||
|
||||
// Handle keyboard shortcuts
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (event: KeyboardEvent) => {
|
||||
// Delete selected node
|
||||
if ((event.key === 'Delete' || event.key === 'Backspace') && selectedNodeId) {
|
||||
deleteNode(selectedNodeId);
|
||||
}
|
||||
|
||||
// Save workflow
|
||||
if ((event.ctrlKey || event.metaKey) && event.key === 's') {
|
||||
event.preventDefault();
|
||||
saveWorkflow();
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener('keydown', handleKeyDown);
|
||||
return () => window.removeEventListener('keydown', handleKeyDown);
|
||||
}, [selectedNodeId, deleteNode, saveWorkflow]);
|
||||
|
||||
if (!canvas) {
|
||||
return (
|
||||
<div className="flex items-center justify-center h-full bg-gray-50">
|
||||
<div className="text-center">
|
||||
<p className="text-gray-500 mb-4">No workflow loaded</p>
|
||||
<button
|
||||
onClick={() => useWorkflowBuilderStore.getState().createNewWorkflow('New Workflow')}
|
||||
className="px-4 py-2 bg-blue-500 text-white rounded hover:bg-blue-600"
|
||||
>
|
||||
Create New Workflow
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex h-full">
|
||||
{/* Node Palette */}
|
||||
<NodePalette
|
||||
categories={paletteCategories}
|
||||
onDragStart={(type) => {
|
||||
setDragging(true);
|
||||
}}
|
||||
onDragEnd={() => {
|
||||
setDragging(false);
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Canvas */}
|
||||
<div className="flex-1 flex flex-col">
|
||||
<WorkflowToolbar
|
||||
workflowName={canvas.name}
|
||||
isDirty={isDirty}
|
||||
validation={validation}
|
||||
onSave={saveWorkflow}
|
||||
onValidate={validate}
|
||||
/>
|
||||
|
||||
<div ref={reactFlowWrapper} className="flex-1">
|
||||
<ReactFlow
|
||||
nodes={nodes}
|
||||
edges={edges}
|
||||
onNodesChange={handleNodesChange}
|
||||
onEdgesChange={handleEdgesChange}
|
||||
onConnect={onConnect}
|
||||
onNodeClick={onNodeClick}
|
||||
onPaneClick={onPaneClick}
|
||||
onDragOver={onDragOver}
|
||||
onDrop={onDrop}
|
||||
nodeTypes={nodeTypes}
|
||||
fitView
|
||||
snapToGrid
|
||||
snapGrid={[15, 15]}
|
||||
defaultEdgeOptions={{
|
||||
animated: true,
|
||||
type: 'smoothstep',
|
||||
}}
|
||||
>
|
||||
<Controls />
|
||||
<MiniMap
|
||||
nodeColor={(node) => {
|
||||
switch (node.type) {
|
||||
case 'input':
|
||||
return '#10b981';
|
||||
case 'llm':
|
||||
return '#8b5cf6';
|
||||
case 'skill':
|
||||
return '#f59e0b';
|
||||
case 'hand':
|
||||
return '#ef4444';
|
||||
case 'export':
|
||||
return '#3b82f6';
|
||||
default:
|
||||
return '#6b7280';
|
||||
}
|
||||
}}
|
||||
maskColor="rgba(0, 0, 0, 0.1)"
|
||||
/>
|
||||
<Background variant={BackgroundVariant.Dots} gap={20} size={1} />
|
||||
</ReactFlow>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Property Panel */}
|
||||
{selectedNodeId && (
|
||||
<PropertyPanel
|
||||
nodeId={selectedNodeId}
|
||||
nodeData={nodes.find(n => n.id === selectedNodeId)?.data as WorkflowNodeData}
|
||||
onUpdate={(data) => updateNode(selectedNodeId, data)}
|
||||
onDelete={() => deleteNode(selectedNodeId)}
|
||||
onClose={() => selectNode(null)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Export with provider
|
||||
export function WorkflowBuilder() {
|
||||
return (
|
||||
<ReactFlowProvider>
|
||||
<WorkflowBuilderInternal />
|
||||
</ReactFlowProvider>
|
||||
);
|
||||
}
|
||||
|
||||
export default WorkflowBuilder;
|
||||
166
desktop/src/components/WorkflowBuilder/WorkflowToolbar.tsx
Normal file
166
desktop/src/components/WorkflowBuilder/WorkflowToolbar.tsx
Normal file
@@ -0,0 +1,166 @@
|
||||
/**
|
||||
* Workflow Toolbar Component
|
||||
*
|
||||
* Toolbar with actions for the workflow builder.
|
||||
*/
|
||||
|
||||
import React, { useState } from 'react';
|
||||
import type { ValidationResult } from '../../lib/workflow-builder/types';
|
||||
import { canvasToYaml } from '../../lib/workflow-builder/yaml-converter';
|
||||
import { useWorkflowBuilderStore } from '../../store/workflowBuilderStore';
|
||||
|
||||
interface WorkflowToolbarProps {
|
||||
workflowName: string;
|
||||
isDirty: boolean;
|
||||
validation: ValidationResult | null;
|
||||
onSave: () => void;
|
||||
onValidate: () => ValidationResult;
|
||||
}
|
||||
|
||||
export function WorkflowToolbar({
|
||||
workflowName,
|
||||
isDirty,
|
||||
validation,
|
||||
onSave,
|
||||
onValidate,
|
||||
}: WorkflowToolbarProps) {
|
||||
const [isPreviewOpen, setIsPreviewOpen] = useState(false);
|
||||
const [yamlPreview, setYamlPreview] = useState('');
|
||||
const canvas = useWorkflowBuilderStore(state => state.canvas);
|
||||
|
||||
const handlePreviewYaml = () => {
|
||||
if (canvas) {
|
||||
const yaml = canvasToYaml(canvas);
|
||||
setYamlPreview(yaml);
|
||||
setIsPreviewOpen(true);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCopyYaml = async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(yamlPreview);
|
||||
alert('YAML copied to clipboard!');
|
||||
} catch (err) {
|
||||
console.error('Failed to copy:', err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDownloadYaml = () => {
|
||||
const blob = new Blob([yamlPreview], { type: 'text/yaml' });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = `${workflowName.replace(/\s+/g, '-').toLowerCase()}.yaml`;
|
||||
a.click();
|
||||
URL.revokeObjectURL(url);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="flex items-center justify-between px-4 py-2 bg-white border-b border-gray-200">
|
||||
{/* Left: Workflow Name */}
|
||||
<div className="flex items-center gap-3">
|
||||
<h1 className="font-semibold text-gray-800">{workflowName}</h1>
|
||||
{isDirty && (
|
||||
<span className="text-sm text-amber-600 flex items-center gap-1">
|
||||
<span className="w-2 h-2 bg-amber-400 rounded-full animate-pulse" />
|
||||
Unsaved
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Center: Validation Status */}
|
||||
{validation && (
|
||||
<div className="flex items-center gap-2">
|
||||
{validation.valid ? (
|
||||
<span className="text-sm text-green-600 flex items-center gap-1">
|
||||
✓ Valid
|
||||
</span>
|
||||
) : (
|
||||
<span className="text-sm text-red-600 flex items-center gap-1">
|
||||
✕ {validation.errors.length} error(s)
|
||||
</span>
|
||||
)}
|
||||
{validation.warnings.length > 0 && (
|
||||
<span className="text-sm text-amber-600">
|
||||
{validation.warnings.length} warning(s)
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Right: Actions */}
|
||||
<div className="flex items-center gap-2">
|
||||
<button
|
||||
onClick={onValidate}
|
||||
className="px-3 py-1.5 text-sm text-gray-600 hover:text-gray-800 hover:bg-gray-100 rounded-lg"
|
||||
>
|
||||
Validate
|
||||
</button>
|
||||
|
||||
<button
|
||||
onClick={handlePreviewYaml}
|
||||
className="px-3 py-1.5 text-sm text-gray-600 hover:text-gray-800 hover:bg-gray-100 rounded-lg"
|
||||
>
|
||||
Preview YAML
|
||||
</button>
|
||||
|
||||
<button
|
||||
onClick={onSave}
|
||||
disabled={!isDirty}
|
||||
className={`
|
||||
px-4 py-1.5 text-sm rounded-lg font-medium
|
||||
${isDirty
|
||||
? 'bg-blue-500 text-white hover:bg-blue-600'
|
||||
: 'bg-gray-100 text-gray-400 cursor-not-allowed'
|
||||
}
|
||||
`}
|
||||
>
|
||||
Save
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* YAML Preview Modal */}
|
||||
{isPreviewOpen && (
|
||||
<div className="fixed inset-0 z-50 flex items-center justify-center bg-black/50">
|
||||
<div className="bg-white rounded-xl shadow-xl w-[800px] max-h-[80vh] overflow-hidden">
|
||||
{/* Modal Header */}
|
||||
<div className="flex items-center justify-between px-4 py-3 border-b border-gray-200">
|
||||
<h2 className="font-semibold text-gray-800">Pipeline YAML</h2>
|
||||
<div className="flex items-center gap-2">
|
||||
<button
|
||||
onClick={handleCopyYaml}
|
||||
className="px-3 py-1.5 text-sm text-gray-600 hover:bg-gray-100 rounded-lg"
|
||||
>
|
||||
Copy
|
||||
</button>
|
||||
<button
|
||||
onClick={handleDownloadYaml}
|
||||
className="px-3 py-1.5 text-sm text-gray-600 hover:bg-gray-100 rounded-lg"
|
||||
>
|
||||
Download
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setIsPreviewOpen(false)}
|
||||
className="px-3 py-1.5 text-sm text-gray-400 hover:text-gray-600"
|
||||
>
|
||||
✕
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* YAML Content */}
|
||||
<div className="p-4 overflow-y-auto max-h-[60vh]">
|
||||
<pre className="text-sm font-mono text-gray-800 whitespace-pre-wrap">
|
||||
{yamlPreview}
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default WorkflowToolbar;
|
||||
21
desktop/src/components/WorkflowBuilder/index.ts
Normal file
21
desktop/src/components/WorkflowBuilder/index.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* Workflow Builder Components
|
||||
*
|
||||
* Export all workflow builder components.
|
||||
*/
|
||||
|
||||
export { WorkflowBuilder, WorkflowBuilderInternal } from './WorkflowBuilder';
|
||||
export { NodePalette } from './NodePalette';
|
||||
export { PropertyPanel } from './PropertyPanel';
|
||||
export { WorkflowToolbar } from './WorkflowToolbar';
|
||||
|
||||
// Node components
|
||||
export { InputNode } from './nodes/InputNode';
|
||||
export { LlmNode } from './nodes/LlmNode';
|
||||
export { SkillNode } from './nodes/SkillNode';
|
||||
export { HandNode } from './nodes/HandNode';
|
||||
export { ConditionNode } from './nodes/ConditionNode';
|
||||
export { ParallelNode } from './nodes/ParallelNode';
|
||||
export { ExportNode } from './nodes/ExportNode';
|
||||
export { HttpNode } from './nodes/HttpNode';
|
||||
export { OrchestrationNode } from './nodes/OrchestrationNode';
|
||||
@@ -0,0 +1,79 @@
|
||||
/**
|
||||
* Condition Node Component
|
||||
*
|
||||
* Node for conditional branching.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import type { ConditionNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const ConditionNode = memo(({ data, selected }: NodeProps<ConditionNodeData>) => {
|
||||
const branchCount = data.branches.length + (data.hasDefault ? 1 : 0);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
px-4 py-3 rounded-lg border-2 min-w-[200px]
|
||||
bg-orange-50 border-orange-300
|
||||
${selected ? 'border-orange-500 shadow-lg shadow-orange-200' : ''}
|
||||
`}
|
||||
>
|
||||
{/* Input Handle */}
|
||||
<Handle
|
||||
type="target"
|
||||
position={Position.Left}
|
||||
className="w-3 h-3 bg-orange-400 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Header */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className="text-lg">🔀</span>
|
||||
<span className="font-medium text-orange-800">{data.label}</span>
|
||||
</div>
|
||||
|
||||
{/* Condition Preview */}
|
||||
<div className="text-sm text-orange-600 bg-orange-100 rounded px-2 py-1 font-mono mb-2">
|
||||
{data.condition || 'No condition'}
|
||||
</div>
|
||||
|
||||
{/* Branches */}
|
||||
<div className="space-y-1">
|
||||
{data.branches.map((branch, index) => (
|
||||
<div key={index} className="flex items-center justify-between">
|
||||
<div className="relative">
|
||||
{/* Branch Output Handle */}
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
id={`branch-${index}`}
|
||||
style={{ top: `${((index + 1) / (branchCount + 1)) * 100}%` }}
|
||||
className="w-3 h-3 bg-orange-400 border-2 border-white"
|
||||
/>
|
||||
</div>
|
||||
<span className="text-xs text-orange-500 truncate max-w-[120px]">
|
||||
{branch.label || branch.when}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
|
||||
{data.hasDefault && (
|
||||
<div className="flex items-center justify-between">
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
id="default"
|
||||
style={{ top: '100%' }}
|
||||
className="w-3 h-3 bg-gray-400 border-2 border-white"
|
||||
/>
|
||||
<span className="text-xs text-gray-500">Default</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
ConditionNode.displayName = 'ConditionNode';
|
||||
|
||||
export default ConditionNode;
|
||||
72
desktop/src/components/WorkflowBuilder/nodes/ExportNode.tsx
Normal file
72
desktop/src/components/WorkflowBuilder/nodes/ExportNode.tsx
Normal file
@@ -0,0 +1,72 @@
|
||||
/**
|
||||
* Export Node Component
|
||||
*
|
||||
* Node for exporting workflow results to various formats.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import type { ExportNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const ExportNode = memo(({ data, selected }: NodeProps<ExportNodeData>) => {
|
||||
const formatLabels: Record<string, string> = {
|
||||
pptx: 'PowerPoint',
|
||||
html: 'HTML',
|
||||
pdf: 'PDF',
|
||||
markdown: 'Markdown',
|
||||
json: 'JSON',
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
px-4 py-3 rounded-lg border-2 min-w-[180px]
|
||||
bg-blue-50 border-blue-300
|
||||
${selected ? 'border-blue-500 shadow-lg shadow-blue-200' : ''}
|
||||
`}
|
||||
>
|
||||
{/* Input Handle */}
|
||||
<Handle
|
||||
type="target"
|
||||
position={Position.Left}
|
||||
className="w-3 h-3 bg-blue-400 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Output Handle */}
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
className="w-3 h-3 bg-blue-500 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Header */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className="text-lg">📤</span>
|
||||
<span className="font-medium text-blue-800">{data.label}</span>
|
||||
</div>
|
||||
|
||||
{/* Formats */}
|
||||
<div className="flex flex-wrap gap-1">
|
||||
{data.formats.map((format) => (
|
||||
<span
|
||||
key={format}
|
||||
className="text-xs bg-blue-100 text-blue-700 px-2 py-0.5 rounded"
|
||||
>
|
||||
{formatLabels[format] || format}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Output Directory */}
|
||||
{data.outputDir && (
|
||||
<div className="text-xs text-blue-500 mt-2 truncate">
|
||||
📁 {data.outputDir}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
ExportNode.displayName = 'ExportNode';
|
||||
|
||||
export default ExportNode;
|
||||
74
desktop/src/components/WorkflowBuilder/nodes/HandNode.tsx
Normal file
74
desktop/src/components/WorkflowBuilder/nodes/HandNode.tsx
Normal file
@@ -0,0 +1,74 @@
|
||||
/**
|
||||
* Hand Node Component
|
||||
*
|
||||
* Node for executing hand actions.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import type { HandNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const HandNode = memo(({ data, selected }: NodeProps<HandNodeData>) => {
|
||||
const hasHand = Boolean(data.handId);
|
||||
const hasAction = Boolean(data.action);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
px-4 py-3 rounded-lg border-2 min-w-[180px]
|
||||
bg-rose-50 border-rose-300
|
||||
${selected ? 'border-rose-500 shadow-lg shadow-rose-200' : ''}
|
||||
`}
|
||||
>
|
||||
{/* Input Handle */}
|
||||
<Handle
|
||||
type="target"
|
||||
position={Position.Left}
|
||||
className="w-3 h-3 bg-rose-400 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Output Handle */}
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
className="w-3 h-3 bg-rose-500 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Header */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className="text-lg">✋</span>
|
||||
<span className="font-medium text-rose-800">{data.label}</span>
|
||||
</div>
|
||||
|
||||
{/* Hand Info */}
|
||||
<div className="space-y-1">
|
||||
<div className={`text-sm ${hasHand ? 'text-rose-600' : 'text-rose-400 italic'}`}>
|
||||
{hasHand ? (
|
||||
<span className="font-mono bg-rose-100 px-1.5 py-0.5 rounded">
|
||||
{data.handName || data.handId}
|
||||
</span>
|
||||
) : (
|
||||
'No hand selected'
|
||||
)}
|
||||
</div>
|
||||
|
||||
{hasAction && (
|
||||
<div className="text-xs text-rose-500">
|
||||
Action: <span className="font-mono">{data.action}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Params Count */}
|
||||
{Object.keys(data.params).length > 0 && (
|
||||
<div className="text-xs text-rose-500 mt-1">
|
||||
{Object.keys(data.params).length} param(s)
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
HandNode.displayName = 'HandNode';
|
||||
|
||||
export default HandNode;
|
||||
81
desktop/src/components/WorkflowBuilder/nodes/HttpNode.tsx
Normal file
81
desktop/src/components/WorkflowBuilder/nodes/HttpNode.tsx
Normal file
@@ -0,0 +1,81 @@
|
||||
/**
|
||||
* HTTP Node Component
|
||||
*
|
||||
* Node for making HTTP requests.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import type { HttpNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
const methodColors: Record<string, string> = {
|
||||
GET: 'bg-green-100 text-green-700',
|
||||
POST: 'bg-blue-100 text-blue-700',
|
||||
PUT: 'bg-yellow-100 text-yellow-700',
|
||||
DELETE: 'bg-red-100 text-red-700',
|
||||
PATCH: 'bg-purple-100 text-purple-700',
|
||||
};
|
||||
|
||||
export const HttpNode = memo(({ data, selected }: NodeProps<HttpNodeData>) => {
|
||||
const hasUrl = Boolean(data.url);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
px-4 py-3 rounded-lg border-2 min-w-[200px]
|
||||
bg-slate-50 border-slate-300
|
||||
${selected ? 'border-slate-500 shadow-lg shadow-slate-200' : ''}
|
||||
`}
|
||||
>
|
||||
{/* Input Handle */}
|
||||
<Handle
|
||||
type="target"
|
||||
position={Position.Left}
|
||||
className="w-3 h-3 bg-slate-400 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Output Handle */}
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
className="w-3 h-3 bg-slate-500 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Header */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className="text-lg">🌐</span>
|
||||
<span className="font-medium text-slate-800">{data.label}</span>
|
||||
</div>
|
||||
|
||||
{/* Method Badge */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className={`text-xs font-bold px-2 py-0.5 rounded ${methodColors[data.method]}`}>
|
||||
{data.method}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* URL */}
|
||||
<div className={`text-sm font-mono bg-slate-100 rounded px-2 py-1 truncate ${hasUrl ? 'text-slate-600' : 'text-slate-400 italic'}`}>
|
||||
{hasUrl ? data.url : 'No URL specified'}
|
||||
</div>
|
||||
|
||||
{/* Headers Count */}
|
||||
{Object.keys(data.headers).length > 0 && (
|
||||
<div className="text-xs text-slate-500 mt-2">
|
||||
{Object.keys(data.headers).length} header(s)
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Body Indicator */}
|
||||
{data.body && (
|
||||
<div className="text-xs text-slate-500 mt-1">
|
||||
Has body content
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
HttpNode.displayName = 'HttpNode';
|
||||
|
||||
export default HttpNode;
|
||||
54
desktop/src/components/WorkflowBuilder/nodes/InputNode.tsx
Normal file
54
desktop/src/components/WorkflowBuilder/nodes/InputNode.tsx
Normal file
@@ -0,0 +1,54 @@
|
||||
/**
|
||||
* Input Node Component
|
||||
*
|
||||
* Node for defining workflow input variables.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import type { InputNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const InputNode = memo(({ data, selected }: NodeProps<InputNodeData>) => {
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
px-4 py-3 rounded-lg border-2 min-w-[180px]
|
||||
bg-emerald-50 border-emerald-300
|
||||
${selected ? 'border-emerald-500 shadow-lg shadow-emerald-200' : ''}
|
||||
`}
|
||||
>
|
||||
{/* Output Handle */}
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
className="w-3 h-3 bg-emerald-500 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Header */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className="text-lg">📥</span>
|
||||
<span className="font-medium text-emerald-800">{data.label}</span>
|
||||
</div>
|
||||
|
||||
{/* Variable Name */}
|
||||
<div className="text-sm text-emerald-600">
|
||||
<span className="font-mono bg-emerald-100 px-1.5 py-0.5 rounded">
|
||||
{data.variableName}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Default Value Indicator */}
|
||||
{data.defaultValue !== undefined && (
|
||||
<div className="text-xs text-emerald-500 mt-1">
|
||||
default: {typeof data.defaultValue === 'string'
|
||||
? `"${data.defaultValue}"`
|
||||
: JSON.stringify(data.defaultValue)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
InputNode.displayName = 'InputNode';
|
||||
|
||||
export default InputNode;
|
||||
70
desktop/src/components/WorkflowBuilder/nodes/LlmNode.tsx
Normal file
70
desktop/src/components/WorkflowBuilder/nodes/LlmNode.tsx
Normal file
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* LLM Node Component
|
||||
*
|
||||
* Node for LLM generation actions.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import type { LlmNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const LlmNode = memo(({ data, selected }: NodeProps<LlmNodeData>) => {
|
||||
const templatePreview = data.template.length > 50
|
||||
? data.template.slice(0, 50) + '...'
|
||||
: data.template || 'No template';
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
px-4 py-3 rounded-lg border-2 min-w-[200px]
|
||||
bg-violet-50 border-violet-300
|
||||
${selected ? 'border-violet-500 shadow-lg shadow-violet-200' : ''}
|
||||
`}
|
||||
>
|
||||
{/* Input Handle */}
|
||||
<Handle
|
||||
type="target"
|
||||
position={Position.Left}
|
||||
className="w-3 h-3 bg-violet-400 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Output Handle */}
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
className="w-3 h-3 bg-violet-500 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Header */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className="text-lg">🤖</span>
|
||||
<span className="font-medium text-violet-800">{data.label}</span>
|
||||
{data.jsonMode && (
|
||||
<span className="text-xs bg-violet-200 text-violet-700 px-1.5 py-0.5 rounded">
|
||||
JSON
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Template Preview */}
|
||||
<div className="text-sm text-violet-600 bg-violet-100 rounded px-2 py-1 font-mono">
|
||||
{data.isTemplateFile ? '📄 ' : ''}
|
||||
{templatePreview}
|
||||
</div>
|
||||
|
||||
{/* Model Info */}
|
||||
{(data.model || data.temperature !== undefined) && (
|
||||
<div className="flex gap-2 mt-2 text-xs text-violet-500">
|
||||
{data.model && <span>Model: {data.model}</span>}
|
||||
{data.temperature !== undefined && (
|
||||
<span>Temp: {data.temperature}</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
LlmNode.displayName = 'LlmNode';
|
||||
|
||||
export default LlmNode;
|
||||
@@ -0,0 +1,81 @@
|
||||
/**
|
||||
* Orchestration Node Component
|
||||
*
|
||||
* Node for executing skill orchestration graphs (DAGs).
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import type { OrchestrationNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const OrchestrationNode = memo(({ data, selected }: NodeProps<OrchestrationNodeData>) => {
|
||||
const hasGraphId = Boolean(data.graphId);
|
||||
const hasGraph = Boolean(data.graph);
|
||||
const inputCount = Object.keys(data.inputMappings).length;
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
px-4 py-3 rounded-lg border-2 min-w-[200px]
|
||||
bg-gradient-to-br from-indigo-50 to-purple-50
|
||||
border-indigo-300
|
||||
${selected ? 'border-indigo-500 shadow-lg shadow-indigo-200' : ''}
|
||||
`}
|
||||
>
|
||||
{/* Input Handle */}
|
||||
<Handle
|
||||
type="target"
|
||||
position={Position.Left}
|
||||
className="w-3 h-3 bg-indigo-400 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Output Handle */}
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
className="w-3 h-3 bg-indigo-500 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Header */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className="text-lg">🔀</span>
|
||||
<span className="font-medium text-indigo-800">{data.label}</span>
|
||||
</div>
|
||||
|
||||
{/* Graph Reference */}
|
||||
<div className={`text-sm mb-2 ${hasGraphId || hasGraph ? 'text-indigo-600' : 'text-indigo-400 italic'}`}>
|
||||
{hasGraphId ? (
|
||||
<div className="flex items-center gap-1.5 bg-indigo-100 rounded px-2 py-1">
|
||||
<span className="text-xs">📋</span>
|
||||
<span className="font-mono text-xs">{data.graphId}</span>
|
||||
</div>
|
||||
) : hasGraph ? (
|
||||
<div className="flex items-center gap-1.5 bg-indigo-100 rounded px-2 py-1">
|
||||
<span className="text-xs">📊</span>
|
||||
<span className="text-xs">Inline graph</span>
|
||||
</div>
|
||||
) : (
|
||||
'No graph configured'
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Input Mappings */}
|
||||
{inputCount > 0 && (
|
||||
<div className="text-xs text-indigo-500 mt-2">
|
||||
{inputCount} input mapping(s)
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Description */}
|
||||
{data.description && (
|
||||
<div className="text-xs text-indigo-400 mt-2 line-clamp-2">
|
||||
{data.description}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
OrchestrationNode.displayName = 'OrchestrationNode';
|
||||
|
||||
export default OrchestrationNode;
|
||||
@@ -0,0 +1,55 @@
|
||||
/**
|
||||
* Parallel Node Component
|
||||
*
|
||||
* Node for parallel execution of steps.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import type { ParallelNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const ParallelNode = memo(({ data, selected }: NodeProps<ParallelNodeData>) => {
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
px-4 py-3 rounded-lg border-2 min-w-[180px]
|
||||
bg-cyan-50 border-cyan-300
|
||||
${selected ? 'border-cyan-500 shadow-lg shadow-cyan-200' : ''}
|
||||
`}
|
||||
>
|
||||
{/* Input Handle */}
|
||||
<Handle
|
||||
type="target"
|
||||
position={Position.Left}
|
||||
className="w-3 h-3 bg-cyan-400 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Output Handle */}
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
className="w-3 h-3 bg-cyan-500 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Header */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className="text-lg">⚡</span>
|
||||
<span className="font-medium text-cyan-800">{data.label}</span>
|
||||
</div>
|
||||
|
||||
{/* Each Expression */}
|
||||
<div className="text-sm text-cyan-600 bg-cyan-100 rounded px-2 py-1 font-mono">
|
||||
each: {data.each || '${inputs.items}'}
|
||||
</div>
|
||||
|
||||
{/* Max Workers */}
|
||||
<div className="text-xs text-cyan-500 mt-2">
|
||||
Max workers: {data.maxWorkers}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
ParallelNode.displayName = 'ParallelNode';
|
||||
|
||||
export default ParallelNode;
|
||||
65
desktop/src/components/WorkflowBuilder/nodes/SkillNode.tsx
Normal file
65
desktop/src/components/WorkflowBuilder/nodes/SkillNode.tsx
Normal file
@@ -0,0 +1,65 @@
|
||||
/**
|
||||
* Skill Node Component
|
||||
*
|
||||
* Node for executing skills.
|
||||
*/
|
||||
|
||||
import React, { memo } from 'react';
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react';
|
||||
import type { SkillNodeData } from '../../../lib/workflow-builder/types';
|
||||
|
||||
export const SkillNode = memo(({ data, selected }: NodeProps<SkillNodeData>) => {
|
||||
const hasSkill = Boolean(data.skillId);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
px-4 py-3 rounded-lg border-2 min-w-[180px]
|
||||
bg-amber-50 border-amber-300
|
||||
${selected ? 'border-amber-500 shadow-lg shadow-amber-200' : ''}
|
||||
`}
|
||||
>
|
||||
{/* Input Handle */}
|
||||
<Handle
|
||||
type="target"
|
||||
position={Position.Left}
|
||||
className="w-3 h-3 bg-amber-400 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Output Handle */}
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
className="w-3 h-3 bg-amber-500 border-2 border-white"
|
||||
/>
|
||||
|
||||
{/* Header */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className="text-lg">⚡</span>
|
||||
<span className="font-medium text-amber-800">{data.label}</span>
|
||||
</div>
|
||||
|
||||
{/* Skill ID */}
|
||||
<div className={`text-sm ${hasSkill ? 'text-amber-600' : 'text-amber-400 italic'}`}>
|
||||
{hasSkill ? (
|
||||
<span className="font-mono bg-amber-100 px-1.5 py-0.5 rounded">
|
||||
{data.skillName || data.skillId}
|
||||
</span>
|
||||
) : (
|
||||
'No skill selected'
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Input Mappings Count */}
|
||||
{Object.keys(data.inputMappings).length > 0 && (
|
||||
<div className="text-xs text-amber-500 mt-1">
|
||||
{Object.keys(data.inputMappings).length} input mapping(s)
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
SkillNode.displayName = 'SkillNode';
|
||||
|
||||
export default SkillNode;
|
||||
81
desktop/src/constants/api-urls.ts
Normal file
81
desktop/src/constants/api-urls.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
/**
|
||||
* API URL Constants - Single Source of Truth
|
||||
*
|
||||
* All API URLs should reference this file.
|
||||
* Backend (Rust) should use the same values in config.rs
|
||||
*/
|
||||
|
||||
// === LLM Provider URLs ===
|
||||
|
||||
/**
|
||||
* LLM Provider API URLs
|
||||
*/
|
||||
export const LLM_PROVIDER_URLS = {
|
||||
// OpenAI
|
||||
OPENAI: 'https://api.openai.com/v1',
|
||||
|
||||
// Anthropic
|
||||
ANTHROPIC: 'https://api.anthropic.com',
|
||||
|
||||
// Gemini
|
||||
GEMINI: 'https://generativelanguage.googleapis.com/v1beta',
|
||||
|
||||
// DeepSeek
|
||||
DEEPSEEK: 'https://api.deepseek.com/v1',
|
||||
|
||||
// 智谱 (Zhipu)
|
||||
ZHIPU: 'https://open.bigmodel.cn/api/paas/v4',
|
||||
ZHIPU_CODING: 'https://open.bigmodel.cn/api/coding/paas/v4',
|
||||
|
||||
// Kimi (Moonshot)
|
||||
KIMI: 'https://api.moonshot.cn/v1',
|
||||
KIMI_CODING: 'https://api.kimi.com/coding/v1',
|
||||
|
||||
// 百炼 (Qwen/Bailian)
|
||||
QWEN: 'https://dashscope.aliyuncs.com/compatible-mode/v1',
|
||||
QWEN_CODING: 'https://coding.dashscope.aliyuncs.com/v1',
|
||||
|
||||
// 火山引擎 (Volcengine/Doubao)
|
||||
VOLCENGINE: 'https://ark.cn-beijing.volces.com/api/v3',
|
||||
|
||||
// Local/OLLama
|
||||
OLLAMA: 'http://localhost:11434/v1',
|
||||
LM_STUDIO: 'http://localhost:1234/v1',
|
||||
VLLM: 'http://localhost:8000/v1',
|
||||
} as const;
|
||||
|
||||
// === ZCLAW Gateway URLs ===
|
||||
|
||||
/**
|
||||
* ZCLAW Gateway default URLs
|
||||
*/
|
||||
export const GATEWAY_URLS = {
|
||||
DEFAULT_HTTP: 'http://127.0.0.1:50051',
|
||||
DEFAULT_WS: 'ws://127.0.0.1:50051/ws',
|
||||
FALLBACK_HTTP: 'http://127.0.0.1:4200',
|
||||
FALLBACK_WS: 'ws://127.0.0.1:4200/ws',
|
||||
} as const;
|
||||
|
||||
// === Helper Functions ===
|
||||
|
||||
/**
|
||||
* Get provider URL by name
|
||||
*/
|
||||
export function getProviderUrl(provider: string): string {
|
||||
const key = provider.toUpperCase().replace(/-/g, '_') as keyof typeof LLM_PROVIDER_URLS;
|
||||
return LLM_PROVIDER_URLS[key] || LLM_PROVIDER_URLS.OPENAI;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if URL is a coding plan endpoint
|
||||
*/
|
||||
export function isCodingUrl(url: string): boolean {
|
||||
return url.includes('/coding/') || url.includes('-coding');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if URL is a local endpoint
|
||||
*/
|
||||
export function isLocalUrl(url: string): boolean {
|
||||
return url.includes('localhost') || url.includes('127.0.0.1') || url.includes('[::1]');
|
||||
}
|
||||
79
desktop/src/constants/hands.ts
Normal file
79
desktop/src/constants/hands.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
/**
|
||||
* Hand ID Constants - Single Source of Truth
|
||||
*
|
||||
* All Hand-related constants should reference this file.
|
||||
* Do NOT hardcode Hand IDs elsewhere.
|
||||
*/
|
||||
|
||||
// === Hand IDs (must match backend zclaw-hands) ===
|
||||
|
||||
export const HAND_IDS = {
|
||||
BROWSER: 'browser',
|
||||
RESEARCHER: 'researcher',
|
||||
COLLECTOR: 'collector',
|
||||
PREDICTOR: 'predictor',
|
||||
LEAD: 'lead',
|
||||
TRADER: 'trader',
|
||||
CLIP: 'clip',
|
||||
TWITTER: 'twitter',
|
||||
// Additional hands from backend
|
||||
SLIDESHOW: 'slideshow',
|
||||
SPEECH: 'speech',
|
||||
QUIZ: 'quiz',
|
||||
WHITEBOARD: 'whiteboard',
|
||||
} as const;
|
||||
|
||||
export type HandIdType = typeof HAND_IDS[keyof typeof HAND_IDS];
|
||||
|
||||
// === Hand Categories ===
|
||||
|
||||
export const HAND_CATEGORIES = {
|
||||
RESEARCH: 'research',
|
||||
DATA: 'data',
|
||||
AUTOMATION: 'automation',
|
||||
COMMUNICATION: 'communication',
|
||||
CONTENT: 'content',
|
||||
PRODUCTIVITY: 'productivity',
|
||||
} as const;
|
||||
|
||||
export type HandCategoryType = typeof HAND_CATEGORIES[keyof typeof HAND_CATEGORIES];
|
||||
|
||||
// === Hand ID to Category Mapping ===
|
||||
|
||||
export const HAND_CATEGORY_MAP: Record<string, HandCategoryType> = {
|
||||
[HAND_IDS.BROWSER]: HAND_CATEGORIES.RESEARCH,
|
||||
[HAND_IDS.RESEARCHER]: HAND_CATEGORIES.RESEARCH,
|
||||
[HAND_IDS.COLLECTOR]: HAND_CATEGORIES.DATA,
|
||||
[HAND_IDS.PREDICTOR]: HAND_CATEGORIES.DATA,
|
||||
[HAND_IDS.TRADER]: HAND_CATEGORIES.DATA,
|
||||
[HAND_IDS.LEAD]: HAND_CATEGORIES.COMMUNICATION,
|
||||
[HAND_IDS.TWITTER]: HAND_CATEGORIES.COMMUNICATION,
|
||||
[HAND_IDS.CLIP]: HAND_CATEGORIES.CONTENT,
|
||||
[HAND_IDS.SLIDESHOW]: HAND_CATEGORIES.CONTENT,
|
||||
[HAND_IDS.SPEECH]: HAND_CATEGORIES.CONTENT,
|
||||
[HAND_IDS.QUIZ]: HAND_CATEGORIES.PRODUCTIVITY,
|
||||
[HAND_IDS.WHITEBOARD]: HAND_CATEGORIES.PRODUCTIVITY,
|
||||
};
|
||||
|
||||
// === Helper Functions ===
|
||||
|
||||
/**
|
||||
* Get the category for a Hand ID
|
||||
*/
|
||||
export function getHandCategory(handId: string): HandCategoryType {
|
||||
return HAND_CATEGORY_MAP[handId] || HAND_CATEGORIES.PRODUCTIVITY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a Hand ID is valid
|
||||
*/
|
||||
export function isValidHandId(id: string): id is HandIdType {
|
||||
return Object.values(HAND_IDS).includes(id as HandIdType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all Hand IDs as an array
|
||||
*/
|
||||
export function getAllHandIds(): string[] {
|
||||
return Object.values(HAND_IDS);
|
||||
}
|
||||
9
desktop/src/constants/index.ts
Normal file
9
desktop/src/constants/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* Constants Index - Single Source of Truth
|
||||
*
|
||||
* Re-export all constants from a single entry point.
|
||||
*/
|
||||
|
||||
export * from './hands';
|
||||
export * from './models';
|
||||
export * from './api-urls';
|
||||
112
desktop/src/constants/models.ts
Normal file
112
desktop/src/constants/models.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
/**
|
||||
* Model Default Constants - Single Source of Truth
|
||||
*
|
||||
* All model-related defaults should reference this file.
|
||||
* Backend (Rust) should use the same values in kernel_commands.rs
|
||||
*/
|
||||
|
||||
// === Default Model Configuration ===
|
||||
|
||||
/**
|
||||
* Default model ID when user hasn't configured one
|
||||
* Using gpt-4o-mini as it's cost-effective and capable
|
||||
*/
|
||||
export const DEFAULT_MODEL_ID = 'gpt-4o-mini' as const;
|
||||
|
||||
/**
|
||||
* Default provider when user hasn't configured one
|
||||
*/
|
||||
export const DEFAULT_PROVIDER = 'openai' as const;
|
||||
|
||||
/**
|
||||
* Default max tokens for responses
|
||||
*/
|
||||
export const DEFAULT_MAX_TOKENS = 4096 as const;
|
||||
|
||||
/**
|
||||
* Default temperature for responses
|
||||
*/
|
||||
export const DEFAULT_TEMPERATURE = 0.7 as const;
|
||||
|
||||
/**
|
||||
* Default base URL for OpenAI API
|
||||
*/
|
||||
export const DEFAULT_OPENAI_BASE_URL = 'https://api.openai.com/v1' as const;
|
||||
|
||||
/**
|
||||
* Default base URL for Anthropic API
|
||||
*/
|
||||
export const DEFAULT_ANTHROPIC_BASE_URL = 'https://api.anthropic.com' as const;
|
||||
|
||||
// === Provider-Specific Defaults ===
|
||||
|
||||
export const PROVIDER_DEFAULTS = {
|
||||
openai: {
|
||||
baseUrl: 'https://api.openai.com/v1',
|
||||
defaultModel: 'gpt-4o-mini',
|
||||
},
|
||||
anthropic: {
|
||||
baseUrl: 'https://api.anthropic.com',
|
||||
defaultModel: 'claude-sonnet-4-20250514',
|
||||
},
|
||||
zhipu: {
|
||||
baseUrl: 'https://open.bigmodel.cn/api/paas/v4',
|
||||
defaultModel: 'glm-4-flash',
|
||||
},
|
||||
zhipu_coding: {
|
||||
baseUrl: 'https://open.bigmodel.cn/api/coding/paas/v4',
|
||||
defaultModel: 'glm-4-flash',
|
||||
},
|
||||
kimi: {
|
||||
baseUrl: 'https://api.moonshot.cn/v1',
|
||||
defaultModel: 'moonshot-v1-8k',
|
||||
},
|
||||
kimi_coding: {
|
||||
baseUrl: 'https://api.kimi.com/coding/v1',
|
||||
defaultModel: 'kimi-for-coding',
|
||||
},
|
||||
qwen: {
|
||||
baseUrl: 'https://dashscope.aliyuncs.com/compatible-mode/v1',
|
||||
defaultModel: 'qwen-turbo',
|
||||
},
|
||||
qwen_coding: {
|
||||
baseUrl: 'https://coding.dashscope.aliyuncs.com/v1',
|
||||
defaultModel: 'qwen3-coder-next',
|
||||
},
|
||||
deepseek: {
|
||||
baseUrl: 'https://api.deepseek.com/v1',
|
||||
defaultModel: 'deepseek-chat',
|
||||
},
|
||||
gemini: {
|
||||
baseUrl: 'https://generativelanguage.googleapis.com/v1beta',
|
||||
defaultModel: 'gemini-2.0-flash',
|
||||
},
|
||||
local: {
|
||||
baseUrl: 'http://localhost:11434/v1',
|
||||
defaultModel: 'llama3',
|
||||
},
|
||||
} as const;
|
||||
|
||||
export type ProviderType = keyof typeof PROVIDER_DEFAULTS;
|
||||
|
||||
// === Helper Functions ===
|
||||
|
||||
/**
|
||||
* Get provider default configuration
|
||||
*/
|
||||
export function getProviderDefaults(provider: string): {
|
||||
baseUrl: string;
|
||||
defaultModel: string;
|
||||
} {
|
||||
return PROVIDER_DEFAULTS[provider as ProviderType] || {
|
||||
baseUrl: DEFAULT_OPENAI_BASE_URL,
|
||||
defaultModel: DEFAULT_MODEL_ID,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a provider is a coding plan provider
|
||||
*/
|
||||
export function isCodingProvider(provider: string): boolean {
|
||||
return provider.endsWith('-coding') || provider === 'zhipu-coding';
|
||||
}
|
||||
@@ -7,6 +7,7 @@
|
||||
*/
|
||||
|
||||
import { tomlUtils, TomlParseError } from './toml-utils';
|
||||
import { DEFAULT_MODEL_ID, DEFAULT_PROVIDER } from '../constants/models';
|
||||
import type {
|
||||
OpenFangConfig,
|
||||
ConfigValidationResult,
|
||||
@@ -74,12 +75,12 @@ const DEFAULT_CONFIG: Partial<OpenFangConfig> = {
|
||||
agent: {
|
||||
defaults: {
|
||||
workspace: '~/.openfang/workspace',
|
||||
default_model: 'gpt-4',
|
||||
default_model: DEFAULT_MODEL_ID,
|
||||
},
|
||||
},
|
||||
llm: {
|
||||
default_provider: 'openai',
|
||||
default_model: 'gpt-4',
|
||||
default_provider: DEFAULT_PROVIDER,
|
||||
default_model: DEFAULT_MODEL_ID,
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -66,11 +66,11 @@ export interface MemorySearchOptions {
|
||||
}
|
||||
|
||||
export interface MemoryStats {
|
||||
total_memories: number;
|
||||
total_entries: number;
|
||||
by_type: Record<string, number>;
|
||||
by_agent: Record<string, number>;
|
||||
oldest_memory: string | null;
|
||||
newest_memory: string | null;
|
||||
oldest_entry: string | null;
|
||||
newest_entry: string | null;
|
||||
storage_size_bytes: number;
|
||||
}
|
||||
|
||||
|
||||
@@ -185,11 +185,11 @@ export function toBackendSearchOptions(options: MemorySearchOptions): BackendSea
|
||||
*/
|
||||
export function toFrontendStats(backend: BackendMemoryStats): MemoryStats {
|
||||
return {
|
||||
totalEntries: backend.total_memories,
|
||||
totalEntries: backend.total_entries,
|
||||
byType: backend.by_type,
|
||||
byAgent: backend.by_agent,
|
||||
oldestEntry: backend.oldest_memory,
|
||||
newestEntry: backend.newest_memory,
|
||||
oldestEntry: backend.oldest_entry,
|
||||
newestEntry: backend.newest_entry,
|
||||
storageSizeBytes: backend.storage_size_bytes ?? 0,
|
||||
};
|
||||
}
|
||||
@@ -325,13 +325,22 @@ const fallbackMemory = {
|
||||
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
|
||||
);
|
||||
|
||||
// Estimate storage size from serialized data
|
||||
let storageSizeBytes = 0;
|
||||
try {
|
||||
const serialized = JSON.stringify(store.memories);
|
||||
storageSizeBytes = new Blob([serialized]).size;
|
||||
} catch {
|
||||
// Ignore serialization errors
|
||||
}
|
||||
|
||||
return {
|
||||
totalEntries: store.memories.length,
|
||||
byType,
|
||||
byAgent,
|
||||
oldestEntry: sorted[0]?.createdAt ?? null,
|
||||
newestEntry: sorted[sorted.length - 1]?.createdAt ?? null,
|
||||
storageSizeBytes: 0, // localStorage-based fallback doesn't track storage size
|
||||
storageSizeBytes,
|
||||
};
|
||||
},
|
||||
|
||||
@@ -994,10 +1003,10 @@ export const intelligenceClient = {
|
||||
): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await invoke('heartbeat_update_memory_stats', {
|
||||
agentId,
|
||||
taskCount,
|
||||
totalEntries,
|
||||
storageSizeBytes,
|
||||
agent_id: agentId,
|
||||
task_count: taskCount,
|
||||
total_entries: totalEntries,
|
||||
storage_size_bytes: storageSizeBytes,
|
||||
});
|
||||
}
|
||||
// Fallback: store in localStorage for non-Tauri environment
|
||||
@@ -1012,7 +1021,10 @@ export const intelligenceClient = {
|
||||
|
||||
recordCorrection: async (agentId: string, correctionType: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await invoke('heartbeat_record_correction', { agentId, correctionType });
|
||||
await invoke('heartbeat_record_correction', {
|
||||
agent_id: agentId,
|
||||
correction_type: correctionType,
|
||||
});
|
||||
}
|
||||
// Fallback: store in localStorage for non-Tauri environment
|
||||
const key = `zclaw-corrections-${agentId}`;
|
||||
@@ -1021,6 +1033,16 @@ export const intelligenceClient = {
|
||||
counters[correctionType] = (counters[correctionType] || 0) + 1;
|
||||
localStorage.setItem(key, JSON.stringify(counters));
|
||||
},
|
||||
|
||||
recordInteraction: async (agentId: string): Promise<void> => {
|
||||
if (isTauriEnv()) {
|
||||
await invoke('heartbeat_record_interaction', {
|
||||
agent_id: agentId,
|
||||
});
|
||||
}
|
||||
// Fallback: store in localStorage for non-Tauri environment
|
||||
localStorage.setItem(`zclaw-last-interaction-${agentId}`, new Date().toISOString());
|
||||
},
|
||||
},
|
||||
|
||||
compactor: {
|
||||
|
||||
@@ -87,6 +87,12 @@ export interface StreamEventToolEnd {
|
||||
output: unknown;
|
||||
}
|
||||
|
||||
export interface StreamEventIterationStart {
|
||||
type: 'iteration_start';
|
||||
iteration: number;
|
||||
maxIterations: number;
|
||||
}
|
||||
|
||||
export interface StreamEventComplete {
|
||||
type: 'complete';
|
||||
inputTokens: number;
|
||||
@@ -102,6 +108,7 @@ export type StreamChatEvent =
|
||||
| StreamEventDelta
|
||||
| StreamEventToolStart
|
||||
| StreamEventToolEnd
|
||||
| StreamEventIterationStart
|
||||
| StreamEventComplete
|
||||
| StreamEventError;
|
||||
|
||||
@@ -424,6 +431,7 @@ export class KernelClient {
|
||||
break;
|
||||
|
||||
case 'tool_start':
|
||||
console.log('[KernelClient] Tool started:', streamEvent.name, streamEvent.input);
|
||||
if (callbacks.onTool) {
|
||||
callbacks.onTool(
|
||||
streamEvent.name,
|
||||
@@ -434,6 +442,7 @@ export class KernelClient {
|
||||
break;
|
||||
|
||||
case 'tool_end':
|
||||
console.log('[KernelClient] Tool ended:', streamEvent.name, streamEvent.output);
|
||||
if (callbacks.onTool) {
|
||||
callbacks.onTool(
|
||||
streamEvent.name,
|
||||
@@ -443,7 +452,13 @@ export class KernelClient {
|
||||
}
|
||||
break;
|
||||
|
||||
case 'iteration_start':
|
||||
console.log('[KernelClient] Iteration started:', streamEvent.iteration, '/', streamEvent.maxIterations);
|
||||
// Don't need to notify user about iterations
|
||||
break;
|
||||
|
||||
case 'complete':
|
||||
console.log('[KernelClient] Stream complete:', streamEvent.inputTokens, streamEvent.outputTokens);
|
||||
callbacks.onComplete(streamEvent.inputTokens, streamEvent.outputTokens);
|
||||
// Clean up listener
|
||||
if (unlisten) {
|
||||
@@ -453,6 +468,7 @@ export class KernelClient {
|
||||
break;
|
||||
|
||||
case 'error':
|
||||
console.error('[KernelClient] Stream error:', streamEvent.message);
|
||||
callbacks.onError(streamEvent.message);
|
||||
// Clean up listener
|
||||
if (unlisten) {
|
||||
@@ -539,6 +555,236 @@ export class KernelClient {
|
||||
};
|
||||
}
|
||||
|
||||
// === Hands API ===
|
||||
|
||||
/**
|
||||
* List all available hands
|
||||
*/
|
||||
async listHands(): Promise<{
|
||||
hands: {
|
||||
id?: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
status?: string;
|
||||
requirements_met?: boolean;
|
||||
category?: string;
|
||||
icon?: string;
|
||||
tool_count?: number;
|
||||
tools?: string[];
|
||||
metric_count?: number;
|
||||
metrics?: string[];
|
||||
}[]
|
||||
}> {
|
||||
const hands = await invoke<Array<{
|
||||
id?: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
status?: string;
|
||||
requirements_met?: boolean;
|
||||
category?: string;
|
||||
icon?: string;
|
||||
tool_count?: number;
|
||||
tools?: string[];
|
||||
metric_count?: number;
|
||||
metrics?: string[];
|
||||
}>>('hand_list');
|
||||
return { hands: hands || [] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get hand details
|
||||
*/
|
||||
async getHand(name: string): Promise<{
|
||||
id?: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
status?: string;
|
||||
requirements_met?: boolean;
|
||||
category?: string;
|
||||
icon?: string;
|
||||
provider?: string;
|
||||
model?: string;
|
||||
requirements?: { description?: string; name?: string; met?: boolean; satisfied?: boolean; details?: string; hint?: string }[];
|
||||
tools?: string[];
|
||||
metrics?: string[];
|
||||
config?: Record<string, unknown>;
|
||||
tool_count?: number;
|
||||
metric_count?: number;
|
||||
}> {
|
||||
return invoke('hand_get', { name });
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger/execute a hand
|
||||
*/
|
||||
async triggerHand(name: string, params?: Record<string, unknown>): Promise<{ runId: string; status: string }> {
|
||||
const result = await invoke<{ instance_id: string; status: string }>('hand_execute', {
|
||||
id: name,
|
||||
input: params || {},
|
||||
});
|
||||
return { runId: result.instance_id, status: result.status };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get hand run status
|
||||
*/
|
||||
async getHandStatus(name: string, runId: string): Promise<{ status: string; result?: unknown }> {
|
||||
return invoke('hand_run_status', { handName: name, runId });
|
||||
}
|
||||
|
||||
/**
|
||||
* Approve a hand execution
|
||||
*/
|
||||
async approveHand(name: string, runId: string, approved: boolean, reason?: string): Promise<{ status: string }> {
|
||||
return invoke('hand_approve', { handName: name, runId, approved, reason });
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel a hand execution
|
||||
*/
|
||||
async cancelHand(name: string, runId: string): Promise<{ status: string }> {
|
||||
return invoke('hand_cancel', { handName: name, runId });
|
||||
}
|
||||
|
||||
/**
|
||||
* List hand runs (execution history)
|
||||
*/
|
||||
async listHandRuns(name: string, opts?: { limit?: number; offset?: number }): Promise<{
|
||||
runs: {
|
||||
runId?: string;
|
||||
run_id?: string;
|
||||
id?: string;
|
||||
status?: string;
|
||||
startedAt?: string;
|
||||
started_at?: string;
|
||||
completedAt?: string;
|
||||
completed_at?: string;
|
||||
result?: unknown;
|
||||
error?: string;
|
||||
}[]
|
||||
}> {
|
||||
// Hand run history API may not exist yet, return empty array
|
||||
try {
|
||||
return await invoke('hand_run_list', { handName: name, ...opts });
|
||||
} catch {
|
||||
return { runs: [] };
|
||||
}
|
||||
}
|
||||
|
||||
// === Skills API ===
|
||||
|
||||
/**
|
||||
* List all discovered skills
|
||||
*/
|
||||
async listSkills(): Promise<{
|
||||
skills: {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
version: string;
|
||||
capabilities: string[];
|
||||
tags: string[];
|
||||
mode: string;
|
||||
enabled: boolean;
|
||||
triggers: string[];
|
||||
category?: string;
|
||||
}[]
|
||||
}> {
|
||||
const skills = await invoke<Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
version: string;
|
||||
capabilities: string[];
|
||||
tags: string[];
|
||||
mode: string;
|
||||
enabled: boolean;
|
||||
triggers: string[];
|
||||
category?: string;
|
||||
}>>('skill_list');
|
||||
return { skills: skills || [] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh skills from directory
|
||||
*/
|
||||
async refreshSkills(skillDir?: string): Promise<{
|
||||
skills: {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
version: string;
|
||||
capabilities: string[];
|
||||
tags: string[];
|
||||
mode: string;
|
||||
enabled: boolean;
|
||||
triggers: string[];
|
||||
category?: string;
|
||||
}[]
|
||||
}> {
|
||||
const skills = await invoke<Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
version: string;
|
||||
capabilities: string[];
|
||||
tags: string[];
|
||||
mode: string;
|
||||
enabled: boolean;
|
||||
triggers: string[];
|
||||
category?: string;
|
||||
}>>('skill_refresh', { skillDir: skillDir || null });
|
||||
return { skills: skills || [] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a skill
|
||||
*/
|
||||
async executeSkill(id: string, input?: Record<string, unknown>): Promise<{
|
||||
success: boolean;
|
||||
output?: unknown;
|
||||
error?: string;
|
||||
durationMs?: number;
|
||||
}> {
|
||||
return invoke('skill_execute', {
|
||||
id,
|
||||
context: {},
|
||||
input: input || {},
|
||||
});
|
||||
}
|
||||
|
||||
// === Triggers API (stubs for compatibility) ===
|
||||
|
||||
async listTriggers(): Promise<{ triggers?: { id: string; type: string; enabled: boolean }[] }> {
|
||||
return { triggers: [] };
|
||||
}
|
||||
|
||||
async getTrigger(_id: string): Promise<{ id: string; type: string; enabled: boolean } | null> {
|
||||
return null;
|
||||
}
|
||||
|
||||
async createTrigger(_trigger: { type: string; name?: string; enabled?: boolean; config?: Record<string, unknown>; handName?: string; workflowId?: string }): Promise<{ id?: string } | null> {
|
||||
return null;
|
||||
}
|
||||
|
||||
async updateTrigger(_id: string, _updates: { name?: string; enabled?: boolean; config?: Record<string, unknown>; handName?: string; workflowId?: string }): Promise<{ id: string }> {
|
||||
throw new Error('Triggers not implemented');
|
||||
}
|
||||
|
||||
async deleteTrigger(_id: string): Promise<{ status: string }> {
|
||||
throw new Error('Triggers not implemented');
|
||||
}
|
||||
|
||||
// === Approvals API (stubs for compatibility) ===
|
||||
|
||||
async listApprovals(_status?: string): Promise<{ approvals?: unknown[] }> {
|
||||
return { approvals: [] };
|
||||
}
|
||||
|
||||
async respondToApproval(_approvalId: string, _approved: boolean, _reason?: string): Promise<{ status: string }> {
|
||||
throw new Error('Approvals not implemented');
|
||||
}
|
||||
|
||||
/**
|
||||
* REST API compatibility methods
|
||||
*/
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
* Part of ZCLAW L4 Self-Evolution capability.
|
||||
*/
|
||||
|
||||
import { DEFAULT_MODEL_ID, DEFAULT_OPENAI_BASE_URL } from '../constants/models';
|
||||
|
||||
// === Types ===
|
||||
|
||||
export type LLMProvider = 'openai' | 'volcengine' | 'gateway' | 'mock';
|
||||
@@ -54,8 +56,8 @@ export interface LLMServiceAdapter {
|
||||
const DEFAULT_CONFIGS: Record<LLMProvider, LLMConfig> = {
|
||||
openai: {
|
||||
provider: 'openai',
|
||||
model: 'gpt-4o-mini',
|
||||
apiBase: 'https://api.openai.com/v1',
|
||||
model: DEFAULT_MODEL_ID,
|
||||
apiBase: DEFAULT_OPENAI_BASE_URL,
|
||||
maxTokens: 2000,
|
||||
temperature: 0.7,
|
||||
timeout: 30000,
|
||||
|
||||
@@ -65,14 +65,22 @@ function extractTriggers(triggers?: ConfigSkillInfo['triggers']): string[] {
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract capabilities from actions
|
||||
* Extract capabilities from actions or capabilities field
|
||||
*/
|
||||
function extractCapabilities(actions?: ConfigSkillInfo['actions']): string[] {
|
||||
if (!actions) return [];
|
||||
function extractCapabilities(skill: ConfigSkillInfo): string[] {
|
||||
// Prefer explicit capabilities field if available
|
||||
if (skill.capabilities && skill.capabilities.length > 0) {
|
||||
return skill.capabilities;
|
||||
}
|
||||
|
||||
return actions
|
||||
.map(a => a.type)
|
||||
.filter((t): t is string => Boolean(t));
|
||||
// Fall back to extracting from actions
|
||||
if (skill.actions) {
|
||||
return skill.actions
|
||||
.map(a => a.type)
|
||||
.filter((t): t is string => Boolean(t));
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -112,7 +120,7 @@ export function adaptSkillInfo(skill: ConfigSkillInfo): UISkillInfo {
|
||||
name: skill.name,
|
||||
description: skill.description || '',
|
||||
triggers: extractTriggers(skill.triggers),
|
||||
capabilities: extractCapabilities(skill.actions),
|
||||
capabilities: extractCapabilities(skill),
|
||||
toolDeps: extractToolDeps(skill.actions),
|
||||
installed: skill.enabled ?? false,
|
||||
category: inferCategory(skill),
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
* - Recommend skills based on recent conversation patterns
|
||||
* - Manage skill installation lifecycle (with user approval)
|
||||
*
|
||||
* Scans the local `skills/` directory for SKILL.md manifests and indexes them.
|
||||
* Dynamically loads skills from the backend Kernel's SkillRegistry.
|
||||
*
|
||||
* Reference: ZCLAW_AGENT_INTELLIGENCE_EVOLUTION.md §6.5.2
|
||||
*/
|
||||
@@ -26,6 +26,20 @@ export interface SkillInfo {
|
||||
installed: boolean;
|
||||
category?: string;
|
||||
path?: string;
|
||||
version?: string;
|
||||
mode?: string;
|
||||
}
|
||||
|
||||
/** Backend skill response format */
|
||||
interface BackendSkillInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
version: string;
|
||||
capabilities: string[];
|
||||
tags: string[];
|
||||
mode: string;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
export interface SkillSuggestion {
|
||||
@@ -51,151 +65,89 @@ export interface ConversationContext {
|
||||
const SKILL_INDEX_KEY = 'zclaw-skill-index';
|
||||
const SKILL_SUGGESTIONS_KEY = 'zclaw-skill-suggestions';
|
||||
|
||||
// === Built-in Skill Registry ===
|
||||
|
||||
/**
|
||||
* Pre-indexed skills from the skills/ directory.
|
||||
* In production, this would be dynamically scanned from SKILL.md files.
|
||||
* For Phase 4, we maintain a static registry that can be refreshed.
|
||||
*/
|
||||
const BUILT_IN_SKILLS: SkillInfo[] = [
|
||||
{
|
||||
id: 'code-review',
|
||||
name: 'Code Review',
|
||||
description: '审查代码、分析代码质量、提供改进建议',
|
||||
triggers: ['审查代码', '代码审查', 'code review', 'PR review', '检查代码'],
|
||||
capabilities: ['代码质量分析', '架构评估', '安全审计', '最佳实践检查'],
|
||||
toolDeps: ['read', 'grep', 'glob'],
|
||||
installed: true,
|
||||
category: 'development',
|
||||
},
|
||||
{
|
||||
id: 'frontend-developer',
|
||||
name: 'Frontend Developer',
|
||||
description: '前端开发专家,擅长 React/Vue/CSS/TypeScript',
|
||||
triggers: ['前端开发', '页面开发', 'UI开发', 'React', 'Vue', 'CSS'],
|
||||
capabilities: ['组件开发', '样式调整', '性能优化', '响应式设计'],
|
||||
toolDeps: ['read', 'write', 'shell'],
|
||||
installed: true,
|
||||
category: 'development',
|
||||
},
|
||||
{
|
||||
id: 'backend-architect',
|
||||
name: 'Backend Architect',
|
||||
description: '后端架构设计、API设计、数据库建模',
|
||||
triggers: ['后端架构', 'API设计', '数据库设计', '系统架构', '微服务'],
|
||||
capabilities: ['架构设计', 'API规范', '数据库建模', '性能优化'],
|
||||
toolDeps: ['read', 'write', 'shell'],
|
||||
installed: true,
|
||||
category: 'development',
|
||||
},
|
||||
{
|
||||
id: 'security-engineer',
|
||||
name: 'Security Engineer',
|
||||
description: '安全工程师,负责安全审计、漏洞检测、合规检查',
|
||||
triggers: ['安全审计', '漏洞检测', '安全检查', 'security', '渗透测试'],
|
||||
capabilities: ['漏洞扫描', '合规检查', '安全加固', '威胁建模'],
|
||||
toolDeps: ['read', 'grep', 'shell'],
|
||||
installed: true,
|
||||
category: 'security',
|
||||
},
|
||||
{
|
||||
id: 'data-analysis',
|
||||
name: 'Data Analysis',
|
||||
description: '数据分析、可视化、报告生成',
|
||||
triggers: ['数据分析', '数据可视化', '报表', '统计', 'analytics'],
|
||||
capabilities: ['数据清洗', '统计分析', '可视化图表', '报告生成'],
|
||||
toolDeps: ['read', 'write', 'shell'],
|
||||
installed: true,
|
||||
category: 'analytics',
|
||||
},
|
||||
{
|
||||
id: 'chinese-writing',
|
||||
name: 'Chinese Writing',
|
||||
description: '中文写作、文案创作、内容优化',
|
||||
triggers: ['写文章', '文案', '写作', '中文创作', '内容优化'],
|
||||
capabilities: ['文案创作', '文章润色', '标题优化', 'SEO写作'],
|
||||
toolDeps: ['read', 'write'],
|
||||
installed: true,
|
||||
category: 'content',
|
||||
},
|
||||
{
|
||||
id: 'devops-automator',
|
||||
name: 'DevOps Automator',
|
||||
description: 'CI/CD、Docker、K8s、自动化部署',
|
||||
triggers: ['DevOps', 'CI/CD', 'Docker', '部署', '自动化', 'K8s'],
|
||||
capabilities: ['CI/CD配置', '容器化', '自动化部署', '监控告警'],
|
||||
toolDeps: ['shell', 'read', 'write'],
|
||||
installed: true,
|
||||
category: 'ops',
|
||||
},
|
||||
{
|
||||
id: 'senior-pm',
|
||||
name: 'Senior PM',
|
||||
description: '项目管理、需求分析、迭代规划',
|
||||
triggers: ['项目管理', '需求分析', '迭代规划', '产品设计', 'PRD'],
|
||||
capabilities: ['需求拆解', '迭代排期', '风险评估', '文档撰写'],
|
||||
toolDeps: ['read', 'write'],
|
||||
installed: true,
|
||||
category: 'management',
|
||||
},
|
||||
{
|
||||
id: 'git',
|
||||
name: 'Git Operations',
|
||||
description: 'Git 版本控制操作、分支管理、冲突解决',
|
||||
triggers: ['git', '版本控制', '分支', '合并', 'commit', 'merge'],
|
||||
capabilities: ['分支管理', '冲突解决', 'rebase', 'cherry-pick'],
|
||||
toolDeps: ['shell'],
|
||||
installed: true,
|
||||
category: 'development',
|
||||
},
|
||||
{
|
||||
id: 'api-tester',
|
||||
name: 'API Tester',
|
||||
description: 'API 测试、接口调试、自动化测试脚本',
|
||||
triggers: ['API测试', '接口测试', '接口调试', 'Postman', 'curl'],
|
||||
capabilities: ['接口调试', '自动化测试', '性能测试', '断言验证'],
|
||||
toolDeps: ['shell', 'read', 'write'],
|
||||
installed: true,
|
||||
category: 'testing',
|
||||
},
|
||||
{
|
||||
id: 'finance-tracker',
|
||||
name: 'Finance Tracker',
|
||||
description: '财务追踪、预算管理、报表分析',
|
||||
triggers: ['财务', '预算', '记账', '报销', '财务报表'],
|
||||
capabilities: ['收支分析', '预算规划', '报表生成', '趋势预测'],
|
||||
toolDeps: ['read', 'write'],
|
||||
installed: true,
|
||||
category: 'business',
|
||||
},
|
||||
{
|
||||
id: 'social-media-strategist',
|
||||
name: 'Social Media Strategist',
|
||||
description: '社交媒体运营策略、内容规划、数据分析',
|
||||
triggers: ['社交媒体', '运营', '小红书', '抖音', '微博', '内容运营'],
|
||||
capabilities: ['内容策划', '发布排期', '数据分析', '竞品监控'],
|
||||
toolDeps: ['read', 'write'],
|
||||
installed: true,
|
||||
category: 'marketing',
|
||||
},
|
||||
];
|
||||
|
||||
// === Skill Discovery Engine ===
|
||||
|
||||
export class SkillDiscoveryEngine {
|
||||
private skills: SkillInfo[] = [];
|
||||
private suggestionHistory: SkillSuggestion[] = [];
|
||||
private loadedFromBackend: boolean = false;
|
||||
|
||||
constructor() {
|
||||
this.loadIndex();
|
||||
this.loadSuggestions();
|
||||
if (this.skills.length === 0) {
|
||||
this.skills = [...BUILT_IN_SKILLS];
|
||||
// Try to load from backend, fallback to cache
|
||||
this.loadFromBackend();
|
||||
}
|
||||
|
||||
/**
|
||||
* Load skills from backend Tauri command.
|
||||
* Falls back to cached skills if backend is unavailable.
|
||||
*/
|
||||
private async loadFromBackend(): Promise<void> {
|
||||
try {
|
||||
// Dynamic import to avoid bundling issues in non-Tauri environments
|
||||
const { invoke } = await import('@tauri-apps/api/core');
|
||||
const backendSkills = await invoke<BackendSkillInfo[]>('skill_list');
|
||||
|
||||
// Convert backend format to frontend format
|
||||
this.skills = backendSkills.map(this.convertFromBackend);
|
||||
this.loadedFromBackend = true;
|
||||
this.saveIndex();
|
||||
console.log(`[SkillDiscovery] Loaded ${this.skills.length} skills from backend`);
|
||||
} catch (error) {
|
||||
console.warn('[SkillDiscovery] Failed to load skills from backend:', error);
|
||||
// Keep using cached skills (loaded in loadIndex)
|
||||
this.loadedFromBackend = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert backend skill format to frontend format.
|
||||
*/
|
||||
private convertFromBackend(backend: BackendSkillInfo): SkillInfo {
|
||||
return {
|
||||
id: backend.id,
|
||||
name: backend.name,
|
||||
description: backend.description,
|
||||
version: backend.version,
|
||||
triggers: backend.tags, // Use tags as triggers
|
||||
capabilities: backend.capabilities,
|
||||
mode: backend.mode,
|
||||
toolDeps: [], // Backend doesn't have this field
|
||||
installed: backend.enabled,
|
||||
category: backend.tags[0] || 'general',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh skills from backend.
|
||||
* Optionally specify a custom directory to scan.
|
||||
*/
|
||||
async refresh(skillDir?: string): Promise<number> {
|
||||
try {
|
||||
const { invoke } = await import('@tauri-apps/api/core');
|
||||
const backendSkills = await invoke<BackendSkillInfo[]>('skill_refresh', {
|
||||
skillDir
|
||||
});
|
||||
|
||||
this.skills = backendSkills.map(this.convertFromBackend);
|
||||
this.loadedFromBackend = true;
|
||||
this.saveIndex();
|
||||
console.log(`[SkillDiscovery] Refreshed ${this.skills.length} skills`);
|
||||
return this.skills.length;
|
||||
} catch (error) {
|
||||
console.error('[SkillDiscovery] Failed to refresh skills:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if skills were loaded from backend.
|
||||
*/
|
||||
isLoadedFromBackend(): boolean {
|
||||
return this.loadedFromBackend;
|
||||
}
|
||||
|
||||
// === Search ===
|
||||
|
||||
/**
|
||||
|
||||
11
desktop/src/lib/workflow-builder/index.ts
Normal file
11
desktop/src/lib/workflow-builder/index.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
/**
|
||||
* Workflow Builder Library
|
||||
*
|
||||
* Provides types, converters, and utilities for building visual workflow editors.
|
||||
*/
|
||||
|
||||
export * from './types';
|
||||
export * from './yaml-converter';
|
||||
|
||||
// Re-export commonly used types from @xyflow/react
|
||||
export type { Node, Edge, Connection } from '@xyflow/react';
|
||||
329
desktop/src/lib/workflow-builder/types.ts
Normal file
329
desktop/src/lib/workflow-builder/types.ts
Normal file
@@ -0,0 +1,329 @@
|
||||
/**
|
||||
* Workflow Builder Types
|
||||
*
|
||||
* Core types for the visual workflow builder that creates Pipeline DSL
|
||||
* configurations through drag-and-drop node composition.
|
||||
*/
|
||||
|
||||
import type { Node, Edge } from '@xyflow/react';
|
||||
|
||||
// =============================================================================
|
||||
// Node Types
|
||||
// =============================================================================
|
||||
|
||||
export type WorkflowNodeType =
|
||||
| 'input'
|
||||
| 'llm'
|
||||
| 'skill'
|
||||
| 'hand'
|
||||
| 'orchestration'
|
||||
| 'condition'
|
||||
| 'parallel'
|
||||
| 'loop'
|
||||
| 'export'
|
||||
| 'http'
|
||||
| 'setVar'
|
||||
| 'delay';
|
||||
|
||||
// =============================================================================
|
||||
// Node Data Types
|
||||
// =============================================================================
|
||||
|
||||
// Base node data that satisfies Record<string, unknown>
|
||||
export interface BaseNodeData extends Record<string, unknown> {
|
||||
label: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface InputNodeData extends BaseNodeData {
|
||||
type: 'input';
|
||||
/** Input variable name */
|
||||
variableName: string;
|
||||
/** Default value for testing */
|
||||
defaultValue?: unknown;
|
||||
/** JSON schema for validation */
|
||||
schema?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface LlmNodeData extends BaseNodeData {
|
||||
type: 'llm';
|
||||
/** Template path or inline prompt */
|
||||
template: string;
|
||||
/** Whether template is a file path */
|
||||
isTemplateFile: boolean;
|
||||
/** Model override */
|
||||
model?: string;
|
||||
/** Temperature override */
|
||||
temperature?: number;
|
||||
/** Max tokens override */
|
||||
maxTokens?: number;
|
||||
/** JSON mode for structured output */
|
||||
jsonMode: boolean;
|
||||
}
|
||||
|
||||
export interface SkillNodeData extends BaseNodeData {
|
||||
type: 'skill';
|
||||
/** Skill ID to execute */
|
||||
skillId: string;
|
||||
/** Skill name for display */
|
||||
skillName?: string;
|
||||
/** Input variable mappings */
|
||||
inputMappings: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface HandNodeData extends BaseNodeData {
|
||||
type: 'hand';
|
||||
/** Hand ID */
|
||||
handId: string;
|
||||
/** Hand name for display */
|
||||
handName?: string;
|
||||
/** Action to perform */
|
||||
action: string;
|
||||
/** Action parameters */
|
||||
params: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface OrchestrationNodeData extends BaseNodeData {
|
||||
type: 'orchestration';
|
||||
/** Graph ID reference */
|
||||
graphId?: string;
|
||||
/** Inline graph definition */
|
||||
graph?: Record<string, unknown>;
|
||||
/** Input mappings */
|
||||
inputMappings: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface ConditionNodeData extends BaseNodeData {
|
||||
type: 'condition';
|
||||
/** Condition expression */
|
||||
condition: string;
|
||||
/** Branch definitions */
|
||||
branches: ConditionBranch[];
|
||||
/** Has default branch */
|
||||
hasDefault: boolean;
|
||||
}
|
||||
|
||||
export interface ConditionBranch {
|
||||
/** Condition expression for this branch */
|
||||
when: string;
|
||||
/** Label for display */
|
||||
label: string;
|
||||
}
|
||||
|
||||
export interface ParallelNodeData extends BaseNodeData {
|
||||
type: 'parallel';
|
||||
/** Expression to iterate over */
|
||||
each: string;
|
||||
/** Max concurrent workers */
|
||||
maxWorkers: number;
|
||||
}
|
||||
|
||||
export interface LoopNodeData extends BaseNodeData {
|
||||
type: 'loop';
|
||||
/** Expression to iterate over */
|
||||
each: string;
|
||||
/** Variable name for current item */
|
||||
itemVar: string;
|
||||
/** Variable name for index */
|
||||
indexVar: string;
|
||||
}
|
||||
|
||||
export interface ExportNodeData extends BaseNodeData {
|
||||
type: 'export';
|
||||
/** Export formats */
|
||||
formats: ExportFormat[];
|
||||
/** Output directory */
|
||||
outputDir?: string;
|
||||
}
|
||||
|
||||
export interface HttpNodeData extends BaseNodeData {
|
||||
type: 'http';
|
||||
/** URL */
|
||||
url: string;
|
||||
/** HTTP method */
|
||||
method: 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH';
|
||||
/** Headers */
|
||||
headers: Record<string, string>;
|
||||
/** Request body expression */
|
||||
body?: string;
|
||||
}
|
||||
|
||||
export interface SetVarNodeData extends BaseNodeData {
|
||||
type: 'setVar';
|
||||
/** Variable name */
|
||||
variableName: string;
|
||||
/** Value expression */
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface DelayNodeData extends BaseNodeData {
|
||||
type: 'delay';
|
||||
/** Delay in milliseconds */
|
||||
ms: number;
|
||||
}
|
||||
|
||||
export type WorkflowNodeData =
|
||||
| InputNodeData
|
||||
| LlmNodeData
|
||||
| SkillNodeData
|
||||
| HandNodeData
|
||||
| OrchestrationNodeData
|
||||
| ConditionNodeData
|
||||
| ParallelNodeData
|
||||
| LoopNodeData
|
||||
| ExportNodeData
|
||||
| HttpNodeData
|
||||
| SetVarNodeData
|
||||
| DelayNodeData;
|
||||
|
||||
// =============================================================================
|
||||
// Canvas Types
|
||||
// =============================================================================
|
||||
|
||||
// Use Record<string, unknown> as base to satisfy React Flow constraints
|
||||
// The actual data will be one of the WorkflowNodeData union types
|
||||
export type WorkflowNode = Node<Record<string, unknown>, string>;
|
||||
export type WorkflowEdge = Edge;
|
||||
|
||||
export interface WorkflowCanvas {
|
||||
/** Unique canvas ID */
|
||||
id: string;
|
||||
/** Canvas name */
|
||||
name: string;
|
||||
/** Canvas description */
|
||||
description?: string;
|
||||
/** Category for organization */
|
||||
category?: string;
|
||||
/** Nodes in the canvas */
|
||||
nodes: WorkflowNode[];
|
||||
/** Edges connecting nodes */
|
||||
edges: WorkflowEdge[];
|
||||
/** Viewport state */
|
||||
viewport: {
|
||||
x: number;
|
||||
y: number;
|
||||
zoom: number;
|
||||
};
|
||||
/** Canvas metadata */
|
||||
metadata: WorkflowMetadata;
|
||||
}
|
||||
|
||||
export interface WorkflowMetadata {
|
||||
/** Created timestamp */
|
||||
createdAt: string;
|
||||
/** Updated timestamp */
|
||||
updatedAt: string;
|
||||
/** Author */
|
||||
author?: string;
|
||||
/** Tags for search */
|
||||
tags: string[];
|
||||
/** Version */
|
||||
version: string;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Export Types
|
||||
// =============================================================================
|
||||
|
||||
export type ExportFormat = 'pptx' | 'html' | 'pdf' | 'markdown' | 'json';
|
||||
|
||||
// =============================================================================
|
||||
// Palette Types
|
||||
// =============================================================================
|
||||
|
||||
export interface NodePaletteItem {
|
||||
type: WorkflowNodeType;
|
||||
label: string;
|
||||
description: string;
|
||||
icon: string;
|
||||
category: NodeCategory;
|
||||
defaultData: Partial<WorkflowNodeData>;
|
||||
}
|
||||
|
||||
export type NodeCategory =
|
||||
| 'input'
|
||||
| 'ai'
|
||||
| 'action'
|
||||
| 'control'
|
||||
| 'output';
|
||||
|
||||
// =============================================================================
|
||||
// Conversion Types
|
||||
// =============================================================================
|
||||
|
||||
export interface PipelineYaml {
|
||||
apiVersion: 'zclaw/v1';
|
||||
kind: 'Pipeline';
|
||||
metadata: {
|
||||
name: string;
|
||||
description?: string;
|
||||
tags?: string[];
|
||||
};
|
||||
spec: {
|
||||
input?: Record<string, unknown>;
|
||||
steps: PipelineStepYaml[];
|
||||
output?: Record<string, string>;
|
||||
};
|
||||
}
|
||||
|
||||
export interface PipelineStepYaml {
|
||||
id: string;
|
||||
name?: string;
|
||||
action: Record<string, unknown>;
|
||||
when?: string;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Validation Types
|
||||
// =============================================================================
|
||||
|
||||
export interface ValidationError {
|
||||
nodeId: string;
|
||||
field?: string;
|
||||
message: string;
|
||||
severity: 'error' | 'warning';
|
||||
}
|
||||
|
||||
export interface ValidationResult {
|
||||
valid: boolean;
|
||||
errors: ValidationError[];
|
||||
warnings: ValidationError[];
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Template Types
|
||||
// =============================================================================
|
||||
|
||||
export interface WorkflowTemplate {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
category: string;
|
||||
thumbnail?: string;
|
||||
canvas: WorkflowCanvas;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Store Types
|
||||
// =============================================================================
|
||||
|
||||
export interface WorkflowBuilderState {
|
||||
/** Current canvas */
|
||||
canvas: WorkflowCanvas | null;
|
||||
/** All saved workflows */
|
||||
workflows: WorkflowCanvas[];
|
||||
/** Selected node ID */
|
||||
selectedNodeId: string | null;
|
||||
/** Is dragging from palette */
|
||||
isDragging: boolean;
|
||||
/** Is canvas dirty (unsaved changes) */
|
||||
isDirty: boolean;
|
||||
/** Validation result */
|
||||
validation: ValidationResult | null;
|
||||
/** Templates */
|
||||
templates: WorkflowTemplate[];
|
||||
/** Available skills for palette */
|
||||
availableSkills: Array<{ id: string; name: string; description: string }>;
|
||||
/** Available hands for palette */
|
||||
availableHands: Array<{ id: string; name: string; actions: string[] }>;
|
||||
}
|
||||
803
desktop/src/lib/workflow-builder/yaml-converter.ts
Normal file
803
desktop/src/lib/workflow-builder/yaml-converter.ts
Normal file
@@ -0,0 +1,803 @@
|
||||
/**
|
||||
* YAML Converter for Workflow Builder
|
||||
*
|
||||
* Bidirectional conversion between WorkflowCanvas (visual representation)
|
||||
* and Pipeline YAML (execution format).
|
||||
*/
|
||||
|
||||
import * as yaml from 'js-yaml';
|
||||
import type { Edge } from '@xyflow/react';
|
||||
import dagre from '@dagrejs/dagre';
|
||||
import type {
|
||||
WorkflowCanvas,
|
||||
WorkflowNode,
|
||||
WorkflowNodeData,
|
||||
InputNodeData,
|
||||
LlmNodeData,
|
||||
SkillNodeData,
|
||||
HandNodeData,
|
||||
ConditionNodeData,
|
||||
ParallelNodeData,
|
||||
ExportNodeData,
|
||||
PipelineYaml,
|
||||
PipelineStepYaml,
|
||||
ValidationError,
|
||||
ValidationResult,
|
||||
} from './types';
|
||||
|
||||
// =============================================================================
|
||||
// Canvas to YAML Conversion
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Convert a WorkflowCanvas to Pipeline YAML string
|
||||
*/
|
||||
export function canvasToYaml(canvas: WorkflowCanvas): string {
|
||||
const pipeline: PipelineYaml = {
|
||||
apiVersion: 'zclaw/v1',
|
||||
kind: 'Pipeline',
|
||||
metadata: {
|
||||
name: canvas.name,
|
||||
description: canvas.description,
|
||||
tags: canvas.metadata.tags,
|
||||
},
|
||||
spec: {
|
||||
input: extractInputs(canvas.nodes),
|
||||
steps: nodesToSteps(canvas.nodes, canvas.edges),
|
||||
output: extractOutputs(canvas.nodes),
|
||||
},
|
||||
};
|
||||
|
||||
return yaml.dump(pipeline, {
|
||||
indent: 2,
|
||||
lineWidth: -1,
|
||||
noRefs: true,
|
||||
sortKeys: false,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract input definitions from input nodes
|
||||
*/
|
||||
function extractInputs(nodes: WorkflowNode[]): Record<string, unknown> | undefined {
|
||||
const inputs: Record<string, unknown> = {};
|
||||
|
||||
for (const node of nodes) {
|
||||
if (node.data.type === 'input') {
|
||||
const data = node.data as InputNodeData;
|
||||
inputs[data.variableName] = data.defaultValue ?? null;
|
||||
}
|
||||
}
|
||||
|
||||
return Object.keys(inputs).length > 0 ? inputs : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract output mappings from the last nodes or explicit output nodes
|
||||
*/
|
||||
function extractOutputs(nodes: WorkflowNode[]): Record<string, string> | undefined {
|
||||
const outputs: Record<string, string> = {};
|
||||
|
||||
for (const node of nodes) {
|
||||
if (node.data.type === 'export') {
|
||||
// Export nodes define outputs
|
||||
outputs[`${node.id}_export`] = `\${steps.${node.id}.output}`;
|
||||
}
|
||||
}
|
||||
|
||||
return Object.keys(outputs).length > 0 ? outputs : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert nodes and edges to pipeline steps
|
||||
*/
|
||||
function nodesToSteps(nodes: WorkflowNode[], edges: Edge[]): PipelineStepYaml[] {
|
||||
// Topological sort to get execution order
|
||||
const sortedNodes = topologicalSort(nodes, edges);
|
||||
|
||||
return sortedNodes
|
||||
.filter(node => node.data.type !== 'input') // Skip input nodes
|
||||
.map(node => nodeToStep(node))
|
||||
.filter((step): step is PipelineStepYaml => step !== null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a single node to a pipeline step
|
||||
*/
|
||||
function nodeToStep(node: WorkflowNode): PipelineStepYaml | null {
|
||||
const data = node.data;
|
||||
const label = data.label as string | undefined;
|
||||
const base: PipelineStepYaml = {
|
||||
id: node.id,
|
||||
name: label,
|
||||
action: {},
|
||||
};
|
||||
|
||||
const nodeType = data.type as string;
|
||||
|
||||
switch (nodeType) {
|
||||
case 'llm': {
|
||||
const llmData = data as LlmNodeData;
|
||||
base.action = {
|
||||
llm_generate: {
|
||||
template: llmData.template,
|
||||
input: mapExpressionsToObject(llmData.template),
|
||||
model: llmData.model,
|
||||
temperature: llmData.temperature,
|
||||
max_tokens: llmData.maxTokens,
|
||||
json_mode: llmData.jsonMode,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'skill': {
|
||||
const skillData = data as SkillNodeData;
|
||||
base.action = {
|
||||
skill: {
|
||||
skill_id: skillData.skillId,
|
||||
input: skillData.inputMappings,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'hand': {
|
||||
const handData = data as HandNodeData;
|
||||
base.action = {
|
||||
hand: {
|
||||
hand_id: handData.handId,
|
||||
hand_action: handData.action,
|
||||
params: handData.params,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'orchestration': {
|
||||
const orchData = data as { graphId?: string; graph?: Record<string, unknown>; inputMappings?: Record<string, string> };
|
||||
base.action = {
|
||||
skill_orchestration: {
|
||||
graph_id: orchData.graphId,
|
||||
graph: orchData.graph,
|
||||
input: orchData.inputMappings,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'condition': {
|
||||
const condData = data as ConditionNodeData;
|
||||
base.action = {
|
||||
condition: {
|
||||
condition: condData.condition,
|
||||
branches: condData.branches.map((b: { when: string }) => ({
|
||||
when: b.when,
|
||||
then: { /* Will be filled by connected nodes */ },
|
||||
})),
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'parallel': {
|
||||
const parData = data as ParallelNodeData;
|
||||
base.action = {
|
||||
parallel: {
|
||||
each: parData.each,
|
||||
step: { /* Will be filled by child nodes */ },
|
||||
max_workers: parData.maxWorkers,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'loop': {
|
||||
const loopData = data as { each: string; itemVar: string; indexVar: string };
|
||||
base.action = {
|
||||
loop: {
|
||||
each: loopData.each,
|
||||
item_var: loopData.itemVar,
|
||||
index_var: loopData.indexVar,
|
||||
step: { /* Will be filled by child nodes */ },
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'export': {
|
||||
const exportData = data as ExportNodeData;
|
||||
base.action = {
|
||||
file_export: {
|
||||
formats: exportData.formats,
|
||||
input: `\${steps.${node.id}.input}`,
|
||||
output_dir: exportData.outputDir,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'http': {
|
||||
const httpData = data as { url: string; method: string; headers: Record<string, string>; body?: string };
|
||||
base.action = {
|
||||
http_request: {
|
||||
url: httpData.url,
|
||||
method: httpData.method,
|
||||
headers: httpData.headers,
|
||||
body: httpData.body,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'setVar': {
|
||||
const varData = data as { variableName: string; value: string };
|
||||
base.action = {
|
||||
set_var: {
|
||||
name: varData.variableName,
|
||||
value: varData.value,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'delay': {
|
||||
const delayData = data as { ms: number };
|
||||
base.action = {
|
||||
delay: {
|
||||
ms: delayData.ms,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'input':
|
||||
// Input nodes don't become steps
|
||||
return null;
|
||||
|
||||
default:
|
||||
console.warn(`Unknown node type: ${nodeType}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return base;
|
||||
}
|
||||
|
||||
/**
|
||||
* Topological sort of nodes based on edges
|
||||
*/
|
||||
function topologicalSort(nodes: WorkflowNode[], edges: Edge[]): WorkflowNode[] {
|
||||
const nodeMap = new Map(nodes.map(n => [n.id, n]));
|
||||
const inDegree = new Map<string, number>();
|
||||
const adjacency = new Map<string, string[]>();
|
||||
|
||||
// Initialize
|
||||
for (const node of nodes) {
|
||||
inDegree.set(node.id, 0);
|
||||
adjacency.set(node.id, []);
|
||||
}
|
||||
|
||||
// Build graph
|
||||
for (const edge of edges) {
|
||||
const current = adjacency.get(edge.source) || [];
|
||||
current.push(edge.target);
|
||||
adjacency.set(edge.source, current);
|
||||
|
||||
inDegree.set(edge.target, (inDegree.get(edge.target) || 0) + 1);
|
||||
}
|
||||
|
||||
// Kahn's algorithm
|
||||
const queue: string[] = [];
|
||||
const result: WorkflowNode[] = [];
|
||||
|
||||
for (const [nodeId, degree] of inDegree) {
|
||||
if (degree === 0) {
|
||||
queue.push(nodeId);
|
||||
}
|
||||
}
|
||||
|
||||
while (queue.length > 0) {
|
||||
const nodeId = queue.shift()!;
|
||||
const node = nodeMap.get(nodeId);
|
||||
if (node) {
|
||||
result.push(node);
|
||||
}
|
||||
|
||||
const neighbors = adjacency.get(nodeId) || [];
|
||||
for (const neighbor of neighbors) {
|
||||
const newDegree = (inDegree.get(neighbor) || 0) - 1;
|
||||
inDegree.set(neighbor, newDegree);
|
||||
if (newDegree === 0) {
|
||||
queue.push(neighbor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract variable references from a template string
|
||||
*/
|
||||
function mapExpressionsToObject(template: string): Record<string, string> {
|
||||
const regex = /\$\{([^}]+)\}/g;
|
||||
const matches = template.match(regex) || [];
|
||||
const result: Record<string, string> = {};
|
||||
|
||||
for (const match of matches) {
|
||||
const expr = match.slice(2, -1); // Remove ${ and }
|
||||
const parts = expr.split('.');
|
||||
if (parts.length >= 2) {
|
||||
result[parts[parts.length - 1]] = match;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// YAML to Canvas Conversion
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Parse Pipeline YAML string to WorkflowCanvas
|
||||
*/
|
||||
export function yamlToCanvas(yamlString: string): WorkflowCanvas {
|
||||
const pipeline = yaml.load(yamlString) as PipelineYaml;
|
||||
|
||||
const nodes: WorkflowNode[] = [];
|
||||
const edges: Edge[] = [];
|
||||
|
||||
// Create input nodes from spec.input
|
||||
if (pipeline.spec.input) {
|
||||
let y = 50;
|
||||
for (const [varName, defaultValue] of Object.entries(pipeline.spec.input)) {
|
||||
nodes.push({
|
||||
id: `input_${varName}`,
|
||||
type: 'input',
|
||||
position: { x: 50, y },
|
||||
data: {
|
||||
type: 'input',
|
||||
label: varName,
|
||||
variableName: varName,
|
||||
defaultValue,
|
||||
},
|
||||
});
|
||||
y += 100;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert steps to nodes
|
||||
if (pipeline.spec.steps) {
|
||||
let x = 300;
|
||||
let y = 50;
|
||||
|
||||
for (const step of pipeline.spec.steps) {
|
||||
const node = stepToNode(step, x, y);
|
||||
if (node) {
|
||||
nodes.push(node);
|
||||
y += 150;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-layout with dagre
|
||||
const layoutedNodes = applyDagreLayout(nodes, edges);
|
||||
|
||||
return {
|
||||
id: `workflow_${Date.now()}`,
|
||||
name: pipeline.metadata?.name || 'Imported Workflow',
|
||||
description: pipeline.metadata?.description,
|
||||
category: 'imported',
|
||||
nodes: layoutedNodes,
|
||||
edges,
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
metadata: {
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
tags: pipeline.metadata?.tags || [],
|
||||
version: '1.0.0',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a pipeline step to a workflow node
|
||||
*/
|
||||
function stepToNode(step: PipelineStepYaml, x: number, y: number): WorkflowNode | null {
|
||||
const action = step.action;
|
||||
const actionType = Object.keys(action)[0];
|
||||
const actionData = action[actionType];
|
||||
|
||||
const baseData = {
|
||||
label: step.name || step.id,
|
||||
};
|
||||
|
||||
switch (actionType) {
|
||||
case 'llm_generate':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'llm',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'llm',
|
||||
...baseData,
|
||||
template: (actionData as { template?: string }).template || '',
|
||||
isTemplateFile: false,
|
||||
model: (actionData as { model?: string }).model,
|
||||
temperature: (actionData as { temperature?: number }).temperature,
|
||||
maxTokens: (actionData as { max_tokens?: number }).max_tokens,
|
||||
jsonMode: (actionData as { json_mode?: boolean }).json_mode || false,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'skill':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'skill',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'skill',
|
||||
...baseData,
|
||||
skillId: (actionData as { skill_id?: string }).skill_id || '',
|
||||
inputMappings: (actionData as { input?: Record<string, string> }).input || {},
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'hand':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'hand',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'hand',
|
||||
...baseData,
|
||||
handId: (actionData as { hand_id?: string }).hand_id || '',
|
||||
action: (actionData as { hand_action?: string }).hand_action || '',
|
||||
params: (actionData as { params?: Record<string, string> }).params || {},
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'skill_orchestration':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'orchestration',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'orchestration',
|
||||
...baseData,
|
||||
graphId: (actionData as { graph_id?: string }).graph_id,
|
||||
graph: (actionData as { graph?: Record<string, unknown> }).graph,
|
||||
inputMappings: (actionData as { input?: Record<string, string> }).input || {},
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'condition':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'condition',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'condition',
|
||||
...baseData,
|
||||
condition: (actionData as { condition?: string }).condition || '',
|
||||
branches: ((actionData as { branches?: Array<{ when: string }> }).branches || []).map(b => ({
|
||||
when: b.when,
|
||||
label: b.when.slice(0, 20),
|
||||
})),
|
||||
hasDefault: true,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'parallel':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'parallel',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'parallel',
|
||||
...baseData,
|
||||
each: (actionData as { each?: string }).each || '',
|
||||
maxWorkers: (actionData as { max_workers?: number }).max_workers || 4,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'file_export':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'export',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'export',
|
||||
...baseData,
|
||||
formats: (actionData as { formats?: string[] }).formats || [],
|
||||
outputDir: (actionData as { output_dir?: string }).output_dir,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'http_request':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'http',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'http',
|
||||
...baseData,
|
||||
url: (actionData as { url?: string }).url || '',
|
||||
method: ((actionData as { method?: string }).method || 'GET') as 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH',
|
||||
headers: (actionData as { headers?: Record<string, string> }).headers || {},
|
||||
body: (actionData as { body?: string }).body,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'set_var':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'setVar',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'setVar',
|
||||
...baseData,
|
||||
variableName: (actionData as { name?: string }).name || '',
|
||||
value: (actionData as { value?: string }).value || '',
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'delay':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'delay',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'delay',
|
||||
...baseData,
|
||||
ms: (actionData as { ms?: number }).ms || 0,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
default:
|
||||
console.warn(`Unknown action type: ${actionType}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Layout Utilities
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Apply dagre layout to nodes
|
||||
*/
|
||||
export function applyDagreLayout(nodes: WorkflowNode[], edges: Edge[]): WorkflowNode[] {
|
||||
const dagreGraph = new dagre.graphlib.Graph();
|
||||
dagreGraph.setDefaultEdgeLabel(() => ({}));
|
||||
|
||||
dagreGraph.setGraph({
|
||||
rankdir: 'LR',
|
||||
nodesep: 100,
|
||||
ranksep: 150,
|
||||
marginx: 50,
|
||||
marginy: 50,
|
||||
});
|
||||
|
||||
// Add nodes to dagre
|
||||
for (const node of nodes) {
|
||||
dagreGraph.setNode(node.id, {
|
||||
width: 250,
|
||||
height: 100,
|
||||
});
|
||||
}
|
||||
|
||||
// Add edges to dagre
|
||||
for (const edge of edges) {
|
||||
dagreGraph.setEdge(edge.source, edge.target);
|
||||
}
|
||||
|
||||
// Apply layout
|
||||
dagre.layout(dagreGraph);
|
||||
|
||||
// Update node positions
|
||||
return nodes.map(node => {
|
||||
const dagreNode = dagreGraph.node(node.id);
|
||||
if (dagreNode) {
|
||||
return {
|
||||
...node,
|
||||
position: {
|
||||
x: dagreNode.x - dagreNode.width / 2,
|
||||
y: dagreNode.y - dagreNode.height / 2,
|
||||
},
|
||||
};
|
||||
}
|
||||
return node;
|
||||
});
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Validation
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Validate a workflow canvas
|
||||
*/
|
||||
export function validateCanvas(canvas: WorkflowCanvas): ValidationResult {
|
||||
const errors: ValidationError[] = [];
|
||||
const warnings: ValidationError[] = [];
|
||||
|
||||
// Check for empty canvas
|
||||
if (canvas.nodes.length === 0) {
|
||||
errors.push({
|
||||
nodeId: 'canvas',
|
||||
message: 'Workflow is empty',
|
||||
severity: 'error',
|
||||
});
|
||||
return { valid: false, errors, warnings };
|
||||
}
|
||||
|
||||
// Check for input nodes
|
||||
const hasInput = canvas.nodes.some(n => n.data.type === 'input');
|
||||
if (!hasInput) {
|
||||
warnings.push({
|
||||
nodeId: 'canvas',
|
||||
message: 'No input nodes defined',
|
||||
severity: 'warning',
|
||||
});
|
||||
}
|
||||
|
||||
// Check for disconnected nodes
|
||||
const connectedNodeIds = new Set<string>();
|
||||
for (const edge of canvas.edges) {
|
||||
connectedNodeIds.add(edge.source);
|
||||
connectedNodeIds.add(edge.target);
|
||||
}
|
||||
|
||||
for (const node of canvas.nodes) {
|
||||
if (canvas.nodes.length > 1 && !connectedNodeIds.has(node.id) && node.data.type !== 'input') {
|
||||
warnings.push({
|
||||
nodeId: node.id,
|
||||
message: `Node "${node.data.label}" is not connected`,
|
||||
severity: 'warning',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Validate individual nodes
|
||||
for (const node of canvas.nodes) {
|
||||
const nodeErrors = validateNode(node);
|
||||
errors.push(...nodeErrors);
|
||||
}
|
||||
|
||||
// Check for cycles (basic check)
|
||||
if (hasCycle(canvas.nodes, canvas.edges)) {
|
||||
errors.push({
|
||||
nodeId: 'canvas',
|
||||
message: 'Workflow contains a cycle',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a single node
|
||||
*/
|
||||
function validateNode(node: WorkflowNode): ValidationError[] {
|
||||
const errors: ValidationError[] = [];
|
||||
const data = node.data;
|
||||
|
||||
switch (data.type) {
|
||||
case 'llm':
|
||||
if (!data.template) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'template',
|
||||
message: 'Template is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case 'skill':
|
||||
if (!data.skillId) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'skillId',
|
||||
message: 'Skill ID is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case 'hand':
|
||||
if (!data.handId) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'handId',
|
||||
message: 'Hand ID is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
if (!data.action) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'action',
|
||||
message: 'Action is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case 'http':
|
||||
if (!data.url) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'url',
|
||||
message: 'URL is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case 'input':
|
||||
if (!data.variableName) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'variableName',
|
||||
message: 'Variable name is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the graph has a cycle
|
||||
*/
|
||||
function hasCycle(nodes: WorkflowNode[], edges: Edge[]): boolean {
|
||||
const adjacency = new Map<string, string[]>();
|
||||
const visited = new Set<string>();
|
||||
const recStack = new Set<string>();
|
||||
|
||||
// Build adjacency list
|
||||
for (const node of nodes) {
|
||||
adjacency.set(node.id, []);
|
||||
}
|
||||
for (const edge of edges) {
|
||||
const neighbors = adjacency.get(edge.source) || [];
|
||||
neighbors.push(edge.target);
|
||||
adjacency.set(edge.source, neighbors);
|
||||
}
|
||||
|
||||
// DFS cycle detection
|
||||
function dfs(nodeId: string): boolean {
|
||||
visited.add(nodeId);
|
||||
recStack.add(nodeId);
|
||||
|
||||
const neighbors = adjacency.get(nodeId) || [];
|
||||
for (const neighbor of neighbors) {
|
||||
if (!visited.has(neighbor)) {
|
||||
if (dfs(neighbor)) return true;
|
||||
} else if (recStack.has(neighbor)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
recStack.delete(nodeId);
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const node of nodes) {
|
||||
if (!visited.has(node.id)) {
|
||||
if (dfs(node.id)) return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
@@ -62,9 +62,13 @@ export interface ScheduledTask {
|
||||
export interface SkillInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
path: string;
|
||||
source: 'builtin' | 'extra';
|
||||
path?: string;
|
||||
source?: 'builtin' | 'extra';
|
||||
description?: string;
|
||||
version?: string;
|
||||
capabilities?: string[];
|
||||
tags?: string[];
|
||||
mode?: string;
|
||||
triggers?: Array<{ type: string; pattern?: string }>;
|
||||
actions?: Array<{ type: string; params?: Record<string, unknown> }>;
|
||||
enabled?: boolean;
|
||||
@@ -539,6 +543,8 @@ export type {
|
||||
|
||||
// === Client Injection ===
|
||||
|
||||
import type { KernelClient } from '../lib/kernel-client';
|
||||
|
||||
/**
|
||||
* Helper to create a ConfigStoreClient adapter from a GatewayClient.
|
||||
*/
|
||||
@@ -572,11 +578,135 @@ function createConfigClientFromGateway(client: GatewayClient): ConfigStoreClient
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a ConfigStoreClient adapter from a KernelClient.
|
||||
*/
|
||||
function createConfigClientFromKernel(client: KernelClient): ConfigStoreClient {
|
||||
return {
|
||||
getWorkspaceInfo: async () => {
|
||||
try {
|
||||
const status = await client.status();
|
||||
return {
|
||||
path: '',
|
||||
resolvedPath: '',
|
||||
exists: status.initialized as boolean,
|
||||
fileCount: 0,
|
||||
totalSize: 0,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
getQuickConfig: async () => ({ quickConfig: {} }),
|
||||
saveQuickConfig: async () => null,
|
||||
listSkills: async () => {
|
||||
try {
|
||||
const result = await client.listSkills();
|
||||
if (result?.skills) {
|
||||
return {
|
||||
skills: result.skills.map((s) => ({
|
||||
id: s.id,
|
||||
name: s.name,
|
||||
description: s.description || '',
|
||||
version: s.version,
|
||||
// Use capabilities directly
|
||||
capabilities: s.capabilities || [],
|
||||
tags: s.tags || [],
|
||||
mode: s.mode,
|
||||
// Map triggers to the expected format
|
||||
triggers: (s.triggers || []).map((t: string) => ({
|
||||
type: 'keyword',
|
||||
pattern: t,
|
||||
})),
|
||||
// Create actions from capabilities for UI display
|
||||
actions: (s.capabilities || []).map((cap: string) => ({
|
||||
type: cap,
|
||||
params: undefined,
|
||||
})),
|
||||
enabled: s.enabled ?? true,
|
||||
category: s.category,
|
||||
})),
|
||||
};
|
||||
}
|
||||
return { skills: [] };
|
||||
} catch {
|
||||
return { skills: [] };
|
||||
}
|
||||
},
|
||||
getSkill: async (id: string) => {
|
||||
return { skill: { id, name: id, description: '' } };
|
||||
},
|
||||
createSkill: async () => {
|
||||
throw new Error('Skill creation not supported in KernelClient');
|
||||
},
|
||||
updateSkill: async () => {
|
||||
throw new Error('Skill update not supported in KernelClient');
|
||||
},
|
||||
deleteSkill: async () => {
|
||||
throw new Error('Skill deletion not supported in KernelClient');
|
||||
},
|
||||
listChannels: async () => ({ channels: [] }),
|
||||
getChannel: async () => null,
|
||||
createChannel: async () => null,
|
||||
updateChannel: async () => null,
|
||||
deleteChannel: async () => {},
|
||||
listScheduledTasks: async () => ({ tasks: [] }),
|
||||
createScheduledTask: async () => {
|
||||
throw new Error('Scheduled tasks not supported in KernelClient');
|
||||
},
|
||||
listModels: async () => {
|
||||
try {
|
||||
const status = await client.status();
|
||||
return {
|
||||
models: status.defaultModel ? [{
|
||||
id: status.defaultModel as string,
|
||||
name: status.defaultModel as string,
|
||||
provider: (status.defaultProvider as string) || 'default',
|
||||
}] : [],
|
||||
};
|
||||
} catch {
|
||||
return { models: [] };
|
||||
}
|
||||
},
|
||||
getFeishuStatus: async () => null,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the client for the config store.
|
||||
* Called by the coordinator during initialization.
|
||||
*/
|
||||
export function setConfigStoreClient(client: unknown): void {
|
||||
const configClient = createConfigClientFromGateway(client as GatewayClient);
|
||||
let configClient: ConfigStoreClient;
|
||||
|
||||
// Check if it's a KernelClient (has listHands method)
|
||||
if (client && typeof client === 'object' && 'listHands' in client) {
|
||||
configClient = createConfigClientFromKernel(client as KernelClient);
|
||||
} else if (client && typeof client === 'object') {
|
||||
// It's GatewayClient
|
||||
configClient = createConfigClientFromGateway(client as GatewayClient);
|
||||
} else {
|
||||
// Fallback stub client
|
||||
configClient = {
|
||||
getWorkspaceInfo: async () => null,
|
||||
getQuickConfig: async () => null,
|
||||
saveQuickConfig: async () => null,
|
||||
listSkills: async () => ({ skills: [] }),
|
||||
getSkill: async () => null,
|
||||
createSkill: async () => null,
|
||||
updateSkill: async () => null,
|
||||
deleteSkill: async () => {},
|
||||
listChannels: async () => ({ channels: [] }),
|
||||
getChannel: async () => null,
|
||||
createChannel: async () => null,
|
||||
updateChannel: async () => null,
|
||||
deleteChannel: async () => {},
|
||||
listScheduledTasks: async () => ({ tasks: [] }),
|
||||
createScheduledTask: async () => { throw new Error('Not implemented'); },
|
||||
listModels: async () => ({ models: [] }),
|
||||
getFeishuStatus: async () => null,
|
||||
};
|
||||
}
|
||||
|
||||
useConfigStore.getState().setConfigStoreClient(configClient);
|
||||
}
|
||||
|
||||
@@ -261,6 +261,10 @@ export const useConnectionStore = create<ConnectionStore>((set, get) => {
|
||||
// Update the stored client reference
|
||||
set({ client: kernelClient });
|
||||
|
||||
// Re-inject client to all stores so they get the kernel client
|
||||
const { initializeStores } = await import('./index');
|
||||
initializeStores();
|
||||
|
||||
// Connect to internal kernel
|
||||
await kernelClient.connect();
|
||||
|
||||
|
||||
456
desktop/src/store/workflowBuilderStore.ts
Normal file
456
desktop/src/store/workflowBuilderStore.ts
Normal file
@@ -0,0 +1,456 @@
|
||||
/**
|
||||
* Workflow Builder Store
|
||||
*
|
||||
* Zustand store for managing workflow builder state.
|
||||
*/
|
||||
|
||||
import { create } from 'zustand';
|
||||
import { persist } from 'zustand/middleware';
|
||||
import type {
|
||||
WorkflowCanvas,
|
||||
WorkflowNode,
|
||||
WorkflowEdge,
|
||||
WorkflowNodeData,
|
||||
WorkflowTemplate,
|
||||
ValidationResult,
|
||||
NodePaletteItem,
|
||||
WorkflowNodeType,
|
||||
NodeCategory,
|
||||
} from '../lib/workflow-builder/types';
|
||||
import { validateCanvas } from '../lib/workflow-builder/yaml-converter';
|
||||
|
||||
// =============================================================================
|
||||
// Store State
|
||||
// =============================================================================
|
||||
|
||||
interface WorkflowBuilderState {
|
||||
// Canvas state
|
||||
canvas: WorkflowCanvas | null;
|
||||
workflows: WorkflowCanvas[];
|
||||
|
||||
// Selection
|
||||
selectedNodeId: string | null;
|
||||
selectedEdgeId: string | null;
|
||||
|
||||
// UI state
|
||||
isDragging: boolean;
|
||||
isDirty: boolean;
|
||||
isPreviewOpen: boolean;
|
||||
validation: ValidationResult | null;
|
||||
|
||||
// Templates
|
||||
templates: WorkflowTemplate[];
|
||||
|
||||
// Available items for palette
|
||||
availableSkills: Array<{ id: string; name: string; description: string }>;
|
||||
availableHands: Array<{ id: string; name: string; actions: string[] }>;
|
||||
|
||||
// Actions
|
||||
createNewWorkflow: (name: string, description?: string) => void;
|
||||
loadWorkflow: (id: string) => void;
|
||||
saveWorkflow: () => void;
|
||||
deleteWorkflow: (id: string) => void;
|
||||
|
||||
// Node actions
|
||||
addNode: (type: WorkflowNodeType, position: { x: number; y: number }) => void;
|
||||
updateNode: (nodeId: string, data: Partial<WorkflowNodeData>) => void;
|
||||
deleteNode: (nodeId: string) => void;
|
||||
duplicateNode: (nodeId: string) => void;
|
||||
|
||||
// Edge actions
|
||||
addEdge: (source: string, target: string) => void;
|
||||
deleteEdge: (edgeId: string) => void;
|
||||
|
||||
// Selection actions
|
||||
selectNode: (nodeId: string | null) => void;
|
||||
selectEdge: (edgeId: string | null) => void;
|
||||
|
||||
// UI actions
|
||||
setDragging: (isDragging: boolean) => void;
|
||||
setPreviewOpen: (isOpen: boolean) => void;
|
||||
validate: () => ValidationResult;
|
||||
|
||||
// Data loading
|
||||
setAvailableSkills: (skills: Array<{ id: string; name: string; description: string }>) => void;
|
||||
setAvailableHands: (hands: Array<{ id: string; name: string; actions: string[] }>) => void;
|
||||
|
||||
// Canvas metadata
|
||||
updateCanvasMetadata: (updates: Partial<Pick<WorkflowCanvas, 'name' | 'description' | 'category'>>) => void;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Default Node Data
|
||||
// =============================================================================
|
||||
|
||||
function getDefaultNodeData(type: WorkflowNodeType, _id: string): WorkflowNodeData {
|
||||
const base = { label: type.charAt(0).toUpperCase() + type.slice(1) };
|
||||
|
||||
switch (type) {
|
||||
case 'input':
|
||||
return { type: 'input', ...base, variableName: 'input', schema: undefined };
|
||||
case 'llm':
|
||||
return { type: 'llm', ...base, template: '', isTemplateFile: false, jsonMode: false };
|
||||
case 'skill':
|
||||
return { type: 'skill', ...base, skillId: '', inputMappings: {} };
|
||||
case 'hand':
|
||||
return { type: 'hand', ...base, handId: '', action: '', params: {} };
|
||||
case 'orchestration':
|
||||
return { type: 'orchestration', ...base, inputMappings: {} };
|
||||
case 'condition':
|
||||
return { type: 'condition', ...base, condition: '', branches: [{ when: '', label: 'Branch 1' }], hasDefault: true };
|
||||
case 'parallel':
|
||||
return { type: 'parallel', ...base, each: '${inputs.items}', maxWorkers: 4 };
|
||||
case 'loop':
|
||||
return { type: 'loop', ...base, each: '${inputs.items}', itemVar: 'item', indexVar: 'index' };
|
||||
case 'export':
|
||||
return { type: 'export', ...base, formats: ['json'] };
|
||||
case 'http':
|
||||
return { type: 'http', ...base, url: '', method: 'GET', headers: {} };
|
||||
case 'setVar':
|
||||
return { type: 'setVar', ...base, variableName: 'result', value: '' };
|
||||
case 'delay':
|
||||
return { type: 'delay', ...base, ms: 1000 };
|
||||
default:
|
||||
throw new Error(`Unknown node type: ${type}`);
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Store Implementation
|
||||
// =============================================================================
|
||||
|
||||
export const useWorkflowBuilderStore = create<WorkflowBuilderState>()(
|
||||
persist(
|
||||
(set, get) => ({
|
||||
// Initial state
|
||||
canvas: null,
|
||||
workflows: [],
|
||||
selectedNodeId: null,
|
||||
selectedEdgeId: null,
|
||||
isDragging: false,
|
||||
isDirty: false,
|
||||
isPreviewOpen: false,
|
||||
validation: null,
|
||||
templates: [],
|
||||
availableSkills: [],
|
||||
availableHands: [],
|
||||
|
||||
// Workflow actions
|
||||
createNewWorkflow: (name, description) => {
|
||||
const canvas: WorkflowCanvas = {
|
||||
id: `workflow_${Date.now()}`,
|
||||
name,
|
||||
description,
|
||||
category: 'custom',
|
||||
nodes: [],
|
||||
edges: [],
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
metadata: {
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
tags: [],
|
||||
version: '1.0.0',
|
||||
},
|
||||
};
|
||||
set({ canvas, isDirty: false, selectedNodeId: null, selectedEdgeId: null, validation: null });
|
||||
},
|
||||
|
||||
loadWorkflow: (id) => {
|
||||
const workflow = get().workflows.find(w => w.id === id);
|
||||
if (workflow) {
|
||||
set({ canvas: workflow, isDirty: false, selectedNodeId: null, selectedEdgeId: null });
|
||||
}
|
||||
},
|
||||
|
||||
saveWorkflow: () => {
|
||||
const { canvas, workflows } = get();
|
||||
if (!canvas) return;
|
||||
|
||||
const updatedCanvas: WorkflowCanvas = {
|
||||
...canvas,
|
||||
metadata: {
|
||||
...canvas.metadata,
|
||||
updatedAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
||||
const existingIndex = workflows.findIndex(w => w.id === canvas.id);
|
||||
let updatedWorkflows: WorkflowCanvas[];
|
||||
|
||||
if (existingIndex >= 0) {
|
||||
updatedWorkflows = [...workflows];
|
||||
updatedWorkflows[existingIndex] = updatedCanvas;
|
||||
} else {
|
||||
updatedWorkflows = [...workflows, updatedCanvas];
|
||||
}
|
||||
|
||||
set({ workflows: updatedWorkflows, canvas: updatedCanvas, isDirty: false });
|
||||
},
|
||||
|
||||
deleteWorkflow: (id) => {
|
||||
set(state => ({
|
||||
workflows: state.workflows.filter(w => w.id !== id),
|
||||
canvas: state.canvas?.id === id ? null : state.canvas,
|
||||
}));
|
||||
},
|
||||
|
||||
// Node actions
|
||||
addNode: (type, position) => {
|
||||
const { canvas } = get();
|
||||
if (!canvas) return;
|
||||
|
||||
const id = `${type}_${Date.now()}`;
|
||||
const node: WorkflowNode = {
|
||||
id,
|
||||
type,
|
||||
position,
|
||||
data: getDefaultNodeData(type, id),
|
||||
};
|
||||
|
||||
set({
|
||||
canvas: { ...canvas, nodes: [...canvas.nodes, node] },
|
||||
isDirty: true,
|
||||
selectedNodeId: id,
|
||||
});
|
||||
},
|
||||
|
||||
updateNode: (nodeId, data) => {
|
||||
const { canvas } = get();
|
||||
if (!canvas) return;
|
||||
|
||||
const updatedNodes = canvas.nodes.map(node =>
|
||||
node.id === nodeId
|
||||
? { ...node, data: { ...node.data, ...data } as WorkflowNodeData }
|
||||
: node
|
||||
);
|
||||
|
||||
set({ canvas: { ...canvas, nodes: updatedNodes }, isDirty: true });
|
||||
},
|
||||
|
||||
deleteNode: (nodeId) => {
|
||||
const { canvas } = get();
|
||||
if (!canvas) return;
|
||||
|
||||
const updatedNodes = canvas.nodes.filter(n => n.id !== nodeId);
|
||||
const updatedEdges = canvas.edges.filter(e => e.source !== nodeId && e.target !== nodeId);
|
||||
|
||||
set({
|
||||
canvas: { ...canvas, nodes: updatedNodes, edges: updatedEdges },
|
||||
isDirty: true,
|
||||
selectedNodeId: null,
|
||||
});
|
||||
},
|
||||
|
||||
duplicateNode: (nodeId) => {
|
||||
const { canvas } = get();
|
||||
if (!canvas) return;
|
||||
|
||||
const node = canvas.nodes.find(n => n.id === nodeId);
|
||||
if (!node) return;
|
||||
|
||||
const newId = `${node.type}_${Date.now()}`;
|
||||
const newNode: WorkflowNode = {
|
||||
...node,
|
||||
id: newId,
|
||||
position: {
|
||||
x: node.position.x + 50,
|
||||
y: node.position.y + 50,
|
||||
},
|
||||
data: { ...node.data, label: `${node.data.label} (copy)` } as WorkflowNodeData,
|
||||
};
|
||||
|
||||
set({
|
||||
canvas: { ...canvas, nodes: [...canvas.nodes, newNode] },
|
||||
isDirty: true,
|
||||
selectedNodeId: newId,
|
||||
});
|
||||
},
|
||||
|
||||
// Edge actions
|
||||
addEdge: (source, target) => {
|
||||
const { canvas } = get();
|
||||
if (!canvas) return;
|
||||
|
||||
// Check if edge already exists
|
||||
const exists = canvas.edges.some(e => e.source === source && e.target === target);
|
||||
if (exists) return;
|
||||
|
||||
const edge: WorkflowEdge = {
|
||||
id: `edge_${source}_${target}`,
|
||||
source,
|
||||
target,
|
||||
type: 'default',
|
||||
};
|
||||
|
||||
set({ canvas: { ...canvas, edges: [...canvas.edges, edge] }, isDirty: true });
|
||||
},
|
||||
|
||||
deleteEdge: (edgeId) => {
|
||||
const { canvas } = get();
|
||||
if (!canvas) return;
|
||||
|
||||
set({
|
||||
canvas: { ...canvas, edges: canvas.edges.filter(e => e.id !== edgeId) },
|
||||
isDirty: true,
|
||||
});
|
||||
},
|
||||
|
||||
// Selection actions
|
||||
selectNode: (nodeId) => set({ selectedNodeId: nodeId, selectedEdgeId: null }),
|
||||
selectEdge: (edgeId) => set({ selectedEdgeId: edgeId, selectedNodeId: null }),
|
||||
|
||||
// UI actions
|
||||
setDragging: (isDragging) => set({ isDragging }),
|
||||
setPreviewOpen: (isOpen) => set({ isPreviewOpen: isOpen }),
|
||||
|
||||
validate: () => {
|
||||
const { canvas } = get();
|
||||
if (!canvas) {
|
||||
return { valid: false, errors: [{ nodeId: 'canvas', message: 'No workflow loaded', severity: 'error' as const }], warnings: [] };
|
||||
}
|
||||
const result = validateCanvas(canvas);
|
||||
set({ validation: result });
|
||||
return result;
|
||||
},
|
||||
|
||||
// Data loading
|
||||
setAvailableSkills: (skills) => set({ availableSkills: skills }),
|
||||
setAvailableHands: (hands) => set({ availableHands: hands }),
|
||||
|
||||
// Canvas metadata
|
||||
updateCanvasMetadata: (updates) => {
|
||||
const { canvas } = get();
|
||||
if (!canvas) return;
|
||||
set({ canvas: { ...canvas, ...updates }, isDirty: true });
|
||||
},
|
||||
}),
|
||||
{
|
||||
name: 'workflow-builder-storage',
|
||||
partialize: (state) => ({
|
||||
workflows: state.workflows,
|
||||
templates: state.templates,
|
||||
}),
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
// =============================================================================
|
||||
// Node Palette Items
|
||||
// =============================================================================
|
||||
|
||||
export const nodePaletteItems: NodePaletteItem[] = [
|
||||
// Input category
|
||||
{
|
||||
type: 'input',
|
||||
label: 'Input',
|
||||
description: 'Define workflow input variables',
|
||||
icon: '📥',
|
||||
category: 'input',
|
||||
defaultData: { variableName: 'input' },
|
||||
},
|
||||
|
||||
// AI category
|
||||
{
|
||||
type: 'llm',
|
||||
label: 'LLM Generate',
|
||||
description: 'Generate text using LLM',
|
||||
icon: '🤖',
|
||||
category: 'ai',
|
||||
defaultData: { template: '', jsonMode: false },
|
||||
},
|
||||
{
|
||||
type: 'skill',
|
||||
label: 'Skill',
|
||||
description: 'Execute a skill',
|
||||
icon: '⚡',
|
||||
category: 'ai',
|
||||
defaultData: { skillId: '', inputMappings: {} },
|
||||
},
|
||||
{
|
||||
type: 'orchestration',
|
||||
label: 'Skill Orchestration',
|
||||
description: 'Execute multiple skills in a DAG',
|
||||
icon: '🔀',
|
||||
category: 'ai',
|
||||
defaultData: { inputMappings: {} },
|
||||
},
|
||||
|
||||
// Action category
|
||||
{
|
||||
type: 'hand',
|
||||
label: 'Hand',
|
||||
description: 'Execute a hand action',
|
||||
icon: '✋',
|
||||
category: 'action',
|
||||
defaultData: { handId: '', action: '', params: {} },
|
||||
},
|
||||
{
|
||||
type: 'http',
|
||||
label: 'HTTP Request',
|
||||
description: 'Make an HTTP request',
|
||||
icon: '🌐',
|
||||
category: 'action',
|
||||
defaultData: { url: '', method: 'GET', headers: {} },
|
||||
},
|
||||
{
|
||||
type: 'setVar',
|
||||
label: 'Set Variable',
|
||||
description: 'Set a variable value',
|
||||
icon: '📝',
|
||||
category: 'action',
|
||||
defaultData: { variableName: '', value: '' },
|
||||
},
|
||||
{
|
||||
type: 'delay',
|
||||
label: 'Delay',
|
||||
description: 'Pause execution',
|
||||
icon: '⏱️',
|
||||
category: 'action',
|
||||
defaultData: { ms: 1000 },
|
||||
},
|
||||
|
||||
// Control category
|
||||
{
|
||||
type: 'condition',
|
||||
label: 'Condition',
|
||||
description: 'Branch based on condition',
|
||||
icon: '🔀',
|
||||
category: 'control',
|
||||
defaultData: { condition: '', branches: [{ when: '', label: 'Branch' }] },
|
||||
},
|
||||
{
|
||||
type: 'parallel',
|
||||
label: 'Parallel',
|
||||
description: 'Execute in parallel',
|
||||
icon: '⚡',
|
||||
category: 'control',
|
||||
defaultData: { each: '${inputs.items}', maxWorkers: 4 },
|
||||
},
|
||||
{
|
||||
type: 'loop',
|
||||
label: 'Loop',
|
||||
description: 'Iterate over items',
|
||||
icon: '🔄',
|
||||
category: 'control',
|
||||
defaultData: { each: '${inputs.items}', itemVar: 'item', indexVar: 'index' },
|
||||
},
|
||||
|
||||
// Output category
|
||||
{
|
||||
type: 'export',
|
||||
label: 'Export',
|
||||
description: 'Export to file formats',
|
||||
icon: '📤',
|
||||
category: 'output',
|
||||
defaultData: { formats: ['json'] },
|
||||
},
|
||||
];
|
||||
|
||||
// Group palette items by category
|
||||
export const paletteCategories: Record<NodeCategory, NodePaletteItem[]> = {
|
||||
input: nodePaletteItems.filter(i => i.category === 'input'),
|
||||
ai: nodePaletteItems.filter(i => i.category === 'ai'),
|
||||
action: nodePaletteItems.filter(i => i.category === 'action'),
|
||||
control: nodePaletteItems.filter(i => i.category === 'control'),
|
||||
output: nodePaletteItems.filter(i => i.category === 'output'),
|
||||
};
|
||||
@@ -10,6 +10,7 @@
|
||||
import type { Hand, HandStatus, HandParameter } from './hands';
|
||||
import { HAND_DEFINITIONS } from './hands';
|
||||
import type { Workflow, WorkflowRunStatus } from './workflow';
|
||||
import { HAND_CATEGORY_MAP } from '../constants/hands';
|
||||
|
||||
// === Category Types ===
|
||||
|
||||
@@ -42,19 +43,11 @@ export interface CategoryStats {
|
||||
}
|
||||
|
||||
// === Category Mapping for Hands ===
|
||||
// Re-export from constants for backward compatibility
|
||||
export { HAND_CATEGORY_MAP, getHandCategory } from '../constants/hands';
|
||||
|
||||
/**
|
||||
* Maps Hand IDs to their categories
|
||||
*/
|
||||
export const HAND_CATEGORY_MAP: Record<string, CategoryType> = {
|
||||
researcher: 'research',
|
||||
browser: 'research',
|
||||
collector: 'data',
|
||||
predictor: 'data',
|
||||
lead: 'communication',
|
||||
twitter: 'communication',
|
||||
clip: 'content',
|
||||
};
|
||||
// Re-export category type for backward compatibility
|
||||
export type { HandCategoryType } from '../constants/hands';
|
||||
|
||||
/**
|
||||
* Category configurations for UI display
|
||||
|
||||
Reference in New Issue
Block a user