From 91ecaa3ed7c3d7d5a7403d67108287c22c37dbaf Mon Sep 17 00:00:00 2001 From: iven Date: Sat, 11 Apr 2026 09:54:02 +0800 Subject: [PATCH] feat(workflow): add workflow engine module (Phase 4) Implement complete workflow engine with BPMN subset support: Backend (erp-workflow crate): - Token-driven execution engine with exclusive/parallel gateway support - BPMN parser with flow graph validation - Expression evaluator for conditional branching - Process definition CRUD with draft/publish lifecycle - Process instance management (start, suspend, terminate) - Task service (pending, complete, delegate) - PostgreSQL advisory locks for concurrent safety - 5 database tables: process_definitions, process_instances, tokens, tasks, process_variables - 13 API endpoints with RBAC protection - Timeout checker framework (placeholder) Frontend: - Workflow page with 4 tabs (definitions, pending, completed, monitor) - React Flow visual process designer (@xyflow/react) - Process viewer with active node highlighting - 3 API client modules for workflow endpoints - Sidebar menu integration --- Cargo.lock | 5 + apps/web/package.json | 1 + apps/web/pnpm-lock.yaml | 189 ++++++++- apps/web/src/App.tsx | 2 + apps/web/src/api/workflowDefinitions.ts | 89 +++++ apps/web/src/api/workflowInstances.ts | 65 +++ apps/web/src/api/workflowTasks.ts | 61 +++ apps/web/src/layouts/MainLayout.tsx | 2 + apps/web/src/pages/Workflow.tsx | 25 ++ .../web/src/pages/workflow/CompletedTasks.tsx | 56 +++ .../src/pages/workflow/InstanceMonitor.tsx | 78 ++++ apps/web/src/pages/workflow/PendingTasks.tsx | 97 +++++ .../src/pages/workflow/ProcessDefinitions.tsx | 122 ++++++ .../src/pages/workflow/ProcessDesigner.tsx | 243 ++++++++++++ apps/web/src/pages/workflow/ProcessViewer.tsx | 84 ++++ crates/erp-auth/src/service/seed.rs | 11 +- crates/erp-server/Cargo.toml | 1 + crates/erp-server/migration/src/lib.rs | 10 + ...60412_000018_create_process_definitions.rs | 122 ++++++ ...0260412_000019_create_process_instances.rs | 124 ++++++ .../src/m20260412_000020_create_tokens.rs | 90 +++++ .../src/m20260412_000021_create_tasks.rs | 160 ++++++++ ...0260412_000022_create_process_variables.rs | 84 ++++ crates/erp-server/src/main.rs | 8 +- crates/erp-server/src/state.rs | 10 + crates/erp-workflow/Cargo.toml | 20 +- crates/erp-workflow/src/dto.rs | 211 ++++++++++ crates/erp-workflow/src/engine/executor.rs | 371 ++++++++++++++++++ crates/erp-workflow/src/engine/expression.rs | 325 +++++++++++++++ crates/erp-workflow/src/engine/mod.rs | 5 + crates/erp-workflow/src/engine/model.rs | 122 ++++++ crates/erp-workflow/src/engine/parser.rs | 258 ++++++++++++ crates/erp-workflow/src/engine/timeout.rs | 36 ++ crates/erp-workflow/src/entity/mod.rs | 5 + .../src/entity/process_definition.rs | 40 ++ .../src/entity/process_instance.rs | 59 +++ .../src/entity/process_variable.rs | 39 ++ crates/erp-workflow/src/entity/task.rs | 65 +++ crates/erp-workflow/src/entity/token.rs | 34 ++ crates/erp-workflow/src/error.rs | 49 +++ .../src/handler/definition_handler.rs | 124 ++++++ .../src/handler/instance_handler.rs | 112 ++++++ crates/erp-workflow/src/handler/mod.rs | 3 + .../erp-workflow/src/handler/task_handler.rs | 113 ++++++ crates/erp-workflow/src/lib.rs | 15 + crates/erp-workflow/src/module.rs | 121 ++++++ .../src/service/definition_service.rs | 269 +++++++++++++ .../src/service/instance_service.rs | 353 +++++++++++++++++ crates/erp-workflow/src/service/mod.rs | 3 + .../erp-workflow/src/service/task_service.rs | 336 ++++++++++++++++ crates/erp-workflow/src/workflow_state.rs | 11 + 51 files changed, 4826 insertions(+), 12 deletions(-) create mode 100644 apps/web/src/api/workflowDefinitions.ts create mode 100644 apps/web/src/api/workflowInstances.ts create mode 100644 apps/web/src/api/workflowTasks.ts create mode 100644 apps/web/src/pages/Workflow.tsx create mode 100644 apps/web/src/pages/workflow/CompletedTasks.tsx create mode 100644 apps/web/src/pages/workflow/InstanceMonitor.tsx create mode 100644 apps/web/src/pages/workflow/PendingTasks.tsx create mode 100644 apps/web/src/pages/workflow/ProcessDefinitions.tsx create mode 100644 apps/web/src/pages/workflow/ProcessDesigner.tsx create mode 100644 apps/web/src/pages/workflow/ProcessViewer.tsx create mode 100644 crates/erp-server/migration/src/m20260412_000018_create_process_definitions.rs create mode 100644 crates/erp-server/migration/src/m20260412_000019_create_process_instances.rs create mode 100644 crates/erp-server/migration/src/m20260412_000020_create_tokens.rs create mode 100644 crates/erp-server/migration/src/m20260412_000021_create_tasks.rs create mode 100644 crates/erp-server/migration/src/m20260412_000022_create_process_variables.rs create mode 100644 crates/erp-workflow/src/dto.rs create mode 100644 crates/erp-workflow/src/engine/executor.rs create mode 100644 crates/erp-workflow/src/engine/expression.rs create mode 100644 crates/erp-workflow/src/engine/mod.rs create mode 100644 crates/erp-workflow/src/engine/model.rs create mode 100644 crates/erp-workflow/src/engine/parser.rs create mode 100644 crates/erp-workflow/src/engine/timeout.rs create mode 100644 crates/erp-workflow/src/entity/mod.rs create mode 100644 crates/erp-workflow/src/entity/process_definition.rs create mode 100644 crates/erp-workflow/src/entity/process_instance.rs create mode 100644 crates/erp-workflow/src/entity/process_variable.rs create mode 100644 crates/erp-workflow/src/entity/task.rs create mode 100644 crates/erp-workflow/src/entity/token.rs create mode 100644 crates/erp-workflow/src/error.rs create mode 100644 crates/erp-workflow/src/handler/definition_handler.rs create mode 100644 crates/erp-workflow/src/handler/instance_handler.rs create mode 100644 crates/erp-workflow/src/handler/mod.rs create mode 100644 crates/erp-workflow/src/handler/task_handler.rs create mode 100644 crates/erp-workflow/src/module.rs create mode 100644 crates/erp-workflow/src/service/definition_service.rs create mode 100644 crates/erp-workflow/src/service/instance_service.rs create mode 100644 crates/erp-workflow/src/service/mod.rs create mode 100644 crates/erp-workflow/src/service/task_service.rs create mode 100644 crates/erp-workflow/src/workflow_state.rs diff --git a/Cargo.lock b/Cargo.lock index b02d3ab..c3f0334 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -898,6 +898,7 @@ dependencies = [ "erp-config", "erp-core", "erp-server-migration", + "erp-workflow", "redis", "sea-orm", "serde", @@ -924,15 +925,19 @@ name = "erp-workflow" version = "0.1.0" dependencies = [ "anyhow", + "async-trait", "axum", "chrono", "erp-core", "sea-orm", "serde", "serde_json", + "thiserror", "tokio", "tracing", + "utoipa", "uuid", + "validator", ] [[package]] diff --git a/apps/web/package.json b/apps/web/package.json index 4622eda..b942b4a 100644 --- a/apps/web/package.json +++ b/apps/web/package.json @@ -11,6 +11,7 @@ }, "dependencies": { "@ant-design/icons": "^6.1.1", + "@xyflow/react": "^12.10.2", "antd": "^6.3.5", "axios": "^1.15.0", "react": "^19.2.4", diff --git a/apps/web/pnpm-lock.yaml b/apps/web/pnpm-lock.yaml index d203a12..07abf07 100644 --- a/apps/web/pnpm-lock.yaml +++ b/apps/web/pnpm-lock.yaml @@ -11,6 +11,9 @@ importers: '@ant-design/icons': specifier: ^6.1.1 version: 6.1.1(react-dom@19.2.5(react@19.2.5))(react@19.2.5) + '@xyflow/react': + specifier: ^12.10.2 + version: 12.10.2(@types/react@19.2.14)(react-dom@19.2.5(react@19.2.5))(react@19.2.5) antd: specifier: ^6.3.5 version: 6.3.5(react-dom@19.2.5(react@19.2.5))(react@19.2.5) @@ -28,7 +31,7 @@ importers: version: 7.14.0(react-dom@19.2.5(react@19.2.5))(react@19.2.5) zustand: specifier: ^5.0.12 - version: 5.0.12(@types/react@19.2.14)(react@19.2.5) + version: 5.0.12(@types/react@19.2.14)(react@19.2.5)(use-sync-external-store@1.6.0(react@19.2.5)) devDependencies: '@eslint/js': specifier: ^9.39.4 @@ -756,6 +759,24 @@ packages: '@tybys/wasm-util@0.10.1': resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} + '@types/d3-color@3.1.3': + resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==} + + '@types/d3-drag@3.0.7': + resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==} + + '@types/d3-interpolate@3.0.4': + resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==} + + '@types/d3-selection@3.0.11': + resolution: {integrity: sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==} + + '@types/d3-transition@3.0.9': + resolution: {integrity: sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==} + + '@types/d3-zoom@3.0.8': + resolution: {integrity: sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==} + '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} @@ -845,6 +866,15 @@ packages: babel-plugin-react-compiler: optional: true + '@xyflow/react@12.10.2': + resolution: {integrity: sha512-CgIi6HwlcHXwlkTpr0fxLv/0sRVNZ8IdwKLzzeCscaYBwpvfcH1QFOCeaTCuEn1FQEs/B8CjnTSjhs8udgmBgQ==} + peerDependencies: + react: '>=17' + react-dom: '>=17' + + '@xyflow/system@0.0.76': + resolution: {integrity: sha512-hvwvnRS1B3REwVDlWexsq7YQaPZeG3/mKo1jv38UmnpWmxihp14bW6VtEOuHEwJX2FvzFw8k77LyKSk/wiZVNA==} + acorn-jsx@5.3.2: resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: @@ -916,6 +946,9 @@ packages: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} + classcat@5.0.5: + resolution: {integrity: sha512-JhZUT7JFcQy/EzW605k/ktHtncoo9vnyW/2GspNYwFlN1C/WmjuV/xtS04e9SOkL2sTdw0VAZ2UGCcQ9lR6p6w==} + clsx@2.1.1: resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} engines: {node: '>=6'} @@ -951,6 +984,44 @@ packages: csstype@3.2.3: resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + d3-color@3.1.0: + resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} + engines: {node: '>=12'} + + d3-dispatch@3.0.1: + resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==} + engines: {node: '>=12'} + + d3-drag@3.0.0: + resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==} + engines: {node: '>=12'} + + d3-ease@3.0.1: + resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} + engines: {node: '>=12'} + + d3-interpolate@3.0.1: + resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} + engines: {node: '>=12'} + + d3-selection@3.0.0: + resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==} + engines: {node: '>=12'} + + d3-timer@3.0.1: + resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==} + engines: {node: '>=12'} + + d3-transition@3.0.1: + resolution: {integrity: sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==} + engines: {node: '>=12'} + peerDependencies: + d3-selection: 2 - 3 + + d3-zoom@3.0.0: + resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==} + engines: {node: '>=12'} + dayjs@1.11.20: resolution: {integrity: sha512-YbwwqR/uYpeoP4pu043q+LTDLFBLApUP6VxRihdfNTqu4ubqMlGDLd6ErXhEgsyvY0K6nCs7nggYumAN+9uEuQ==} @@ -1544,6 +1615,11 @@ packages: uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + use-sync-external-store@1.6.0: + resolution: {integrity: sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + vite@8.0.8: resolution: {integrity: sha512-dbU7/iLVa8KZALJyLOBOQ88nOXtNG8vxKuOT4I2mD+Ya70KPceF4IAmDsmU0h1Qsn5bPrvsY9HJstCRh3hG6Uw==} engines: {node: ^20.19.0 || >=22.12.0} @@ -1612,6 +1688,21 @@ packages: zod@4.3.6: resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==} + zustand@4.5.7: + resolution: {integrity: sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw==} + engines: {node: '>=12.7.0'} + peerDependencies: + '@types/react': '>=16.8' + immer: '>=9.0.6' + react: '>=16.8' + peerDependenciesMeta: + '@types/react': + optional: true + immer: + optional: true + react: + optional: true + zustand@5.0.12: resolution: {integrity: sha512-i77ae3aZq4dhMlRhJVCYgMLKuSiZAaUPAct2AksxQ+gOtimhGMdXljRT21P5BNpeT4kXlLIckvkPM029OljD7g==} engines: {node: '>=12.20.0'} @@ -2357,6 +2448,27 @@ snapshots: tslib: 2.8.1 optional: true + '@types/d3-color@3.1.3': {} + + '@types/d3-drag@3.0.7': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-interpolate@3.0.4': + dependencies: + '@types/d3-color': 3.1.3 + + '@types/d3-selection@3.0.11': {} + + '@types/d3-transition@3.0.9': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-zoom@3.0.8': + dependencies: + '@types/d3-interpolate': 3.0.4 + '@types/d3-selection': 3.0.11 + '@types/estree@1.0.8': {} '@types/json-schema@7.0.15': {} @@ -2469,6 +2581,29 @@ snapshots: '@rolldown/pluginutils': 1.0.0-rc.7 vite: 8.0.8(@types/node@24.12.2)(jiti@2.6.1) + '@xyflow/react@12.10.2(@types/react@19.2.14)(react-dom@19.2.5(react@19.2.5))(react@19.2.5)': + dependencies: + '@xyflow/system': 0.0.76 + classcat: 5.0.5 + react: 19.2.5 + react-dom: 19.2.5(react@19.2.5) + zustand: 4.5.7(@types/react@19.2.14)(react@19.2.5) + transitivePeerDependencies: + - '@types/react' + - immer + + '@xyflow/system@0.0.76': + dependencies: + '@types/d3-drag': 3.0.7 + '@types/d3-interpolate': 3.0.4 + '@types/d3-selection': 3.0.11 + '@types/d3-transition': 3.0.9 + '@types/d3-zoom': 3.0.8 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-zoom: 3.0.0 + acorn-jsx@5.3.2(acorn@8.16.0): dependencies: acorn: 8.16.0 @@ -2592,6 +2727,8 @@ snapshots: ansi-styles: 4.3.0 supports-color: 7.2.0 + classcat@5.0.5: {} + clsx@2.1.1: {} color-convert@2.0.1: @@ -2620,6 +2757,42 @@ snapshots: csstype@3.2.3: {} + d3-color@3.1.0: {} + + d3-dispatch@3.0.1: {} + + d3-drag@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-selection: 3.0.0 + + d3-ease@3.0.1: {} + + d3-interpolate@3.0.1: + dependencies: + d3-color: 3.1.0 + + d3-selection@3.0.0: {} + + d3-timer@3.0.1: {} + + d3-transition@3.0.1(d3-selection@3.0.0): + dependencies: + d3-color: 3.1.0 + d3-dispatch: 3.0.1 + d3-ease: 3.0.1 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-timer: 3.0.1 + + d3-zoom@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + dayjs@1.11.20: {} debug@4.4.3: @@ -3143,6 +3316,10 @@ snapshots: dependencies: punycode: 2.3.1 + use-sync-external-store@1.6.0(react@19.2.5): + dependencies: + react: 19.2.5 + vite@8.0.8(@types/node@24.12.2)(jiti@2.6.1): dependencies: lightningcss: 1.32.0 @@ -3171,7 +3348,15 @@ snapshots: zod@4.3.6: {} - zustand@5.0.12(@types/react@19.2.14)(react@19.2.5): + zustand@4.5.7(@types/react@19.2.14)(react@19.2.5): + dependencies: + use-sync-external-store: 1.6.0(react@19.2.5) optionalDependencies: '@types/react': 19.2.14 react: 19.2.5 + + zustand@5.0.12(@types/react@19.2.14)(react@19.2.5)(use-sync-external-store@1.6.0(react@19.2.5)): + optionalDependencies: + '@types/react': 19.2.14 + react: 19.2.5 + use-sync-external-store: 1.6.0(react@19.2.5) diff --git a/apps/web/src/App.tsx b/apps/web/src/App.tsx index ffb59f0..d3d8303 100644 --- a/apps/web/src/App.tsx +++ b/apps/web/src/App.tsx @@ -9,6 +9,7 @@ import Roles from './pages/Roles'; import Users from './pages/Users'; import Organizations from './pages/Organizations'; import Settings from './pages/Settings'; +import Workflow from './pages/Workflow'; import { useAuthStore } from './stores/auth'; import { useAppStore } from './stores/app'; @@ -46,6 +47,7 @@ export default function App() { } /> } /> } /> + } /> } /> diff --git a/apps/web/src/api/workflowDefinitions.ts b/apps/web/src/api/workflowDefinitions.ts new file mode 100644 index 0000000..624e4ab --- /dev/null +++ b/apps/web/src/api/workflowDefinitions.ts @@ -0,0 +1,89 @@ +import client from './client'; +import type { PaginatedResponse } from './users'; + +export interface NodeDef { + id: string; + type: 'StartEvent' | 'EndEvent' | 'UserTask' | 'ServiceTask' | 'ExclusiveGateway' | 'ParallelGateway'; + name: string; + assignee_id?: string; + candidate_groups?: string[]; + service_type?: string; + position?: { x: number; y: number }; +} + +export interface EdgeDef { + id: string; + source: string; + target: string; + condition?: string; + label?: string; +} + +export interface ProcessDefinitionInfo { + id: string; + name: string; + key: string; + version: number; + category?: string; + description?: string; + nodes: NodeDef[]; + edges: EdgeDef[]; + status: string; + created_at: string; + updated_at: string; +} + +export interface CreateProcessDefinitionRequest { + name: string; + key: string; + category?: string; + description?: string; + nodes: NodeDef[]; + edges: EdgeDef[]; +} + +export interface UpdateProcessDefinitionRequest { + name?: string; + category?: string; + description?: string; + nodes?: NodeDef[]; + edges?: EdgeDef[]; +} + +export async function listProcessDefinitions(page = 1, pageSize = 20) { + const { data } = await client.get<{ success: boolean; data: PaginatedResponse }>( + '/workflow/definitions', + { params: { page, page_size: pageSize } }, + ); + return data.data; +} + +export async function getProcessDefinition(id: string) { + const { data } = await client.get<{ success: boolean; data: ProcessDefinitionInfo }>( + `/workflow/definitions/${id}`, + ); + return data.data; +} + +export async function createProcessDefinition(req: CreateProcessDefinitionRequest) { + const { data } = await client.post<{ success: boolean; data: ProcessDefinitionInfo }>( + '/workflow/definitions', + req, + ); + return data.data; +} + +export async function updateProcessDefinition(id: string, req: UpdateProcessDefinitionRequest) { + const { data } = await client.put<{ success: boolean; data: ProcessDefinitionInfo }>( + `/workflow/definitions/${id}`, + req, + ); + return data.data; +} + +export async function publishProcessDefinition(id: string) { + const { data } = await client.post<{ success: boolean; data: ProcessDefinitionInfo }>( + `/workflow/definitions/${id}/publish`, + ); + return data.data; +} diff --git a/apps/web/src/api/workflowInstances.ts b/apps/web/src/api/workflowInstances.ts new file mode 100644 index 0000000..73664fe --- /dev/null +++ b/apps/web/src/api/workflowInstances.ts @@ -0,0 +1,65 @@ +import client from './client'; +import type { PaginatedResponse } from './users'; + +export interface TokenInfo { + id: string; + node_id: string; + status: string; + created_at: string; +} + +export interface ProcessInstanceInfo { + id: string; + definition_id: string; + definition_name?: string; + business_key?: string; + status: string; + started_by: string; + started_at: string; + completed_at?: string; + created_at: string; + active_tokens: TokenInfo[]; +} + +export interface StartInstanceRequest { + definition_id: string; + business_key?: string; + variables?: Array<{ name: string; var_type?: string; value: unknown }>; +} + +export async function startInstance(req: StartInstanceRequest) { + const { data } = await client.post<{ success: boolean; data: ProcessInstanceInfo }>( + '/workflow/instances', + req, + ); + return data.data; +} + +export async function listInstances(page = 1, pageSize = 20) { + const { data } = await client.get<{ success: boolean; data: PaginatedResponse }>( + '/workflow/instances', + { params: { page, page_size: pageSize } }, + ); + return data.data; +} + +export async function getInstance(id: string) { + const { data } = await client.get<{ success: boolean; data: ProcessInstanceInfo }>( + `/workflow/instances/${id}`, + ); + return data.data; +} + +export async function suspendInstance(id: string) { + const { data } = await client.post<{ success: boolean; data: null }>( + `/workflow/instances/${id}/suspend`, + ); + return data.data; +} + +export async function terminateInstance(id: string) { + const { data } = await client.post<{ success: boolean; data: null }>( + `/workflow/instances/${id}/terminate`, + ); + return data.data; +} diff --git a/apps/web/src/api/workflowTasks.ts b/apps/web/src/api/workflowTasks.ts new file mode 100644 index 0000000..7c77371 --- /dev/null +++ b/apps/web/src/api/workflowTasks.ts @@ -0,0 +1,61 @@ +import client from './client'; +import type { PaginatedResponse } from './users'; + +export interface TaskInfo { + id: string; + instance_id: string; + token_id: string; + node_id: string; + node_name?: string; + assignee_id?: string; + candidate_groups?: unknown; + status: string; + outcome?: string; + form_data?: unknown; + due_date?: string; + completed_at?: string; + created_at: string; + definition_name?: string; + business_key?: string; +} + +export interface CompleteTaskRequest { + outcome: string; + form_data?: Record; +} + +export interface DelegateTaskRequest { + delegate_to: string; +} + +export async function listPendingTasks(page = 1, pageSize = 20) { + const { data } = await client.get<{ success: boolean; data: PaginatedResponse }>( + '/workflow/tasks/pending', + { params: { page, page_size: pageSize } }, + ); + return data.data; +} + +export async function listCompletedTasks(page = 1, pageSize = 20) { + const { data } = await client.get<{ success: boolean; data: PaginatedResponse }>( + '/workflow/tasks/completed', + { params: { page, page_size: pageSize } }, + ); + return data.data; +} + +export async function completeTask(id: string, req: CompleteTaskRequest) { + const { data } = await client.post<{ success: boolean; data: TaskInfo }>( + `/workflow/tasks/${id}/complete`, + req, + ); + return data.data; +} + +export async function delegateTask(id: string, req: DelegateTaskRequest) { + const { data } = await client.post<{ success: boolean; data: TaskInfo }>( + `/workflow/tasks/${id}/delegate`, + req, + ); + return data.data; +} diff --git a/apps/web/src/layouts/MainLayout.tsx b/apps/web/src/layouts/MainLayout.tsx index f87602c..e9b58ab 100644 --- a/apps/web/src/layouts/MainLayout.tsx +++ b/apps/web/src/layouts/MainLayout.tsx @@ -8,6 +8,7 @@ import { SettingOutlined, MenuFoldOutlined, MenuUnfoldOutlined, + PartitionOutlined, LogoutOutlined, } from '@ant-design/icons'; import { useNavigate } from 'react-router-dom'; @@ -21,6 +22,7 @@ const menuItems = [ { key: '/users', icon: , label: '用户管理' }, { key: '/roles', icon: , label: '权限管理' }, { key: '/organizations', icon: , label: '组织架构' }, + { key: '/workflow', icon: , label: '工作流' }, { key: '/settings', icon: , label: '系统设置' }, ]; diff --git a/apps/web/src/pages/Workflow.tsx b/apps/web/src/pages/Workflow.tsx new file mode 100644 index 0000000..f9efc64 --- /dev/null +++ b/apps/web/src/pages/Workflow.tsx @@ -0,0 +1,25 @@ +import { useState } from 'react'; +import { Tabs } from 'antd'; +import ProcessDefinitions from './workflow/ProcessDefinitions'; +import PendingTasks from './workflow/PendingTasks'; +import CompletedTasks from './workflow/CompletedTasks'; +import InstanceMonitor from './workflow/InstanceMonitor'; + +export default function Workflow() { + const [activeKey, setActiveKey] = useState('definitions'); + + return ( +
+ }, + { key: 'pending', label: '我的待办', children: }, + { key: 'completed', label: '我的已办', children: }, + { key: 'instances', label: '流程监控', children: }, + ]} + /> +
+ ); +} diff --git a/apps/web/src/pages/workflow/CompletedTasks.tsx b/apps/web/src/pages/workflow/CompletedTasks.tsx new file mode 100644 index 0000000..0c65acb --- /dev/null +++ b/apps/web/src/pages/workflow/CompletedTasks.tsx @@ -0,0 +1,56 @@ +import { useEffect, useState } from 'react'; +import { Table, Tag } from 'antd'; +import type { ColumnsType } from 'antd/es/table'; +import { listCompletedTasks, type TaskInfo } from '../../api/workflowTasks'; + +const outcomeLabels: Record = { + approved: { color: 'green', text: '同意' }, + rejected: { color: 'red', text: '拒绝' }, + delegated: { color: 'blue', text: '已委派' }, +}; + +export default function CompletedTasks() { + const [data, setData] = useState([]); + const [total, setTotal] = useState(0); + const [page, setPage] = useState(1); + const [loading, setLoading] = useState(false); + + const fetch = async () => { + setLoading(true); + try { + const res = await listCompletedTasks(page, 20); + setData(res.data); + setTotal(res.total); + } finally { + setLoading(false); + } + }; + + useEffect(() => { fetch(); }, [page]); + + const columns: ColumnsType = [ + { title: '任务名称', dataIndex: 'node_name', key: 'node_name' }, + { title: '流程', dataIndex: 'definition_name', key: 'definition_name' }, + { title: '业务键', dataIndex: 'business_key', key: 'business_key' }, + { + title: '结果', dataIndex: 'outcome', key: 'outcome', width: 100, + render: (o: string) => { + const info = outcomeLabels[o] || { color: 'default', text: o }; + return {info.text}; + }, + }, + { title: '完成时间', dataIndex: 'completed_at', key: 'completed_at', width: 180, + render: (v: string) => v ? new Date(v).toLocaleString() : '-', + }, + ]; + + return ( + + ); +} diff --git a/apps/web/src/pages/workflow/InstanceMonitor.tsx b/apps/web/src/pages/workflow/InstanceMonitor.tsx new file mode 100644 index 0000000..d4aa2a1 --- /dev/null +++ b/apps/web/src/pages/workflow/InstanceMonitor.tsx @@ -0,0 +1,78 @@ +import { useEffect, useState } from 'react'; +import { Button, message, Space, Table, Tag } from 'antd'; +import type { ColumnsType } from 'antd/es/table'; +import { + listInstances, + terminateInstance, + type ProcessInstanceInfo, +} from '../../api/workflowInstances'; + +const statusColors: Record = { + running: 'processing', + suspended: 'warning', + completed: 'green', + terminated: 'red', +}; + +export default function InstanceMonitor() { + const [data, setData] = useState([]); + const [total, setTotal] = useState(0); + const [page, setPage] = useState(1); + const [loading, setLoading] = useState(false); + + const fetch = async () => { + setLoading(true); + try { + const res = await listInstances(page, 20); + setData(res.data); + setTotal(res.total); + } finally { + setLoading(false); + } + }; + + useEffect(() => { fetch(); }, [page]); + + const handleTerminate = async (id: string) => { + try { + await terminateInstance(id); + message.success('已终止'); + fetch(); + } catch { + message.error('操作失败'); + } + }; + + const columns: ColumnsType = [ + { title: '流程', dataIndex: 'definition_name', key: 'definition_name' }, + { title: '业务键', dataIndex: 'business_key', key: 'business_key' }, + { + title: '状态', dataIndex: 'status', key: 'status', width: 100, + render: (s: string) => {s}, + }, + { title: '当前节点', key: 'current_nodes', width: 150, + render: (_, record) => record.active_tokens.map(t => t.node_id).join(', ') || '-', + }, + { title: '发起时间', dataIndex: 'started_at', key: 'started_at', width: 180, + render: (v: string) => new Date(v).toLocaleString(), + }, + { + title: '操作', key: 'action', width: 100, + render: (_, record) => ( + record.status === 'running' ? ( + + ) : null + ), + }, + ]; + + return ( +
+ ); +} diff --git a/apps/web/src/pages/workflow/PendingTasks.tsx b/apps/web/src/pages/workflow/PendingTasks.tsx new file mode 100644 index 0000000..34d8efa --- /dev/null +++ b/apps/web/src/pages/workflow/PendingTasks.tsx @@ -0,0 +1,97 @@ +import { useEffect, useState } from 'react'; +import { Button, message, Modal, Space, Table, Tag } from 'antd'; +import type { ColumnsType } from 'antd/es/table'; +import { + listPendingTasks, + completeTask, + type TaskInfo, +} from '../../api/workflowTasks'; + +const statusColors: Record = { + pending: 'processing', +}; + +export default function PendingTasks() { + const [data, setData] = useState([]); + const [total, setTotal] = useState(0); + const [page, setPage] = useState(1); + const [loading, setLoading] = useState(false); + const [completeModal, setCompleteModal] = useState(null); + const [outcome, setOutcome] = useState('approved'); + + const fetch = async () => { + setLoading(true); + try { + const res = await listPendingTasks(page, 20); + setData(res.data); + setTotal(res.total); + } finally { + setLoading(false); + } + }; + + useEffect(() => { fetch(); }, [page]); + + const handleComplete = async () => { + if (!completeModal) return; + try { + await completeTask(completeModal.id, { outcome }); + message.success('审批完成'); + setCompleteModal(null); + fetch(); + } catch { + message.error('审批失败'); + } + }; + + const columns: ColumnsType = [ + { title: '任务名称', dataIndex: 'node_name', key: 'node_name' }, + { title: '流程', dataIndex: 'definition_name', key: 'definition_name' }, + { title: '业务键', dataIndex: 'business_key', key: 'business_key' }, + { + title: '状态', dataIndex: 'status', key: 'status', width: 100, + render: (s: string) => {s}, + }, + { title: '创建时间', dataIndex: 'created_at', key: 'created_at', width: 180, + render: (v: string) => new Date(v).toLocaleString(), + }, + { + title: '操作', key: 'action', width: 120, + render: (_, record) => ( + + + + ), + }, + ]; + + return ( + <> +
+ setCompleteModal(null)} + > +

任务: {completeModal?.node_name}

+ + + + +
+ + ); +} diff --git a/apps/web/src/pages/workflow/ProcessDefinitions.tsx b/apps/web/src/pages/workflow/ProcessDefinitions.tsx new file mode 100644 index 0000000..545709b --- /dev/null +++ b/apps/web/src/pages/workflow/ProcessDefinitions.tsx @@ -0,0 +1,122 @@ +import { useEffect, useState } from 'react'; +import { Button, message, Modal, Space, Table, Tag } from 'antd'; +import type { ColumnsType } from 'antd/es/table'; +import { + listProcessDefinitions, + createProcessDefinition, + publishProcessDefinition, + type ProcessDefinitionInfo, + type CreateProcessDefinitionRequest, +} from '../../api/workflowDefinitions'; +import ProcessDesigner from './ProcessDesigner'; + +const statusColors: Record = { + draft: 'default', + published: 'green', + deprecated: 'red', +}; + +export default function ProcessDefinitions() { + const [data, setData] = useState([]); + const [total, setTotal] = useState(0); + const [page, setPage] = useState(1); + const [loading, setLoading] = useState(false); + const [designerOpen, setDesignerOpen] = useState(false); + const [editingId, setEditingId] = useState(null); + + const fetch = async () => { + setLoading(true); + try { + const res = await listProcessDefinitions(page, 20); + setData(res.data); + setTotal(res.total); + } finally { + setLoading(false); + } + }; + + useEffect(() => { fetch(); }, [page]); + + const handleCreate = () => { + setEditingId(null); + setDesignerOpen(true); + }; + + const handleEdit = (id: string) => { + setEditingId(id); + setDesignerOpen(true); + }; + + const handlePublish = async (id: string) => { + try { + await publishProcessDefinition(id); + message.success('发布成功'); + fetch(); + } catch { + message.error('发布失败'); + } + }; + + const handleSave = async (req: CreateProcessDefinitionRequest) => { + try { + await createProcessDefinition(req); + message.success('创建成功'); + setDesignerOpen(false); + fetch(); + } catch { + message.error('创建失败'); + } + }; + + const columns: ColumnsType = [ + { title: '名称', dataIndex: 'name', key: 'name' }, + { title: '编码', dataIndex: 'key', key: 'key' }, + { title: '版本', dataIndex: 'version', key: 'version', width: 80 }, + { title: '分类', dataIndex: 'category', key: 'category', width: 120 }, + { + title: '状态', dataIndex: 'status', key: 'status', width: 100, + render: (s: string) => {s}, + }, + { + title: '操作', key: 'action', width: 200, + render: (_, record) => ( + + {record.status === 'draft' && ( + <> + + + + )} + + ), + }, + ]; + + return ( + <> +
+ +
+
+ setDesignerOpen(false)} + footer={null} + width={1200} + destroyOnClose + > + + + + ); +} diff --git a/apps/web/src/pages/workflow/ProcessDesigner.tsx b/apps/web/src/pages/workflow/ProcessDesigner.tsx new file mode 100644 index 0000000..3104115 --- /dev/null +++ b/apps/web/src/pages/workflow/ProcessDesigner.tsx @@ -0,0 +1,243 @@ +import { useCallback, useMemo, useState } from 'react'; +import { Button, Form, Input, message, Space } from 'antd'; +import { + ReactFlow, + Controls, + Background, + addEdge, + useNodesState, + useEdgesState, + type Connection, + type Node, + type Edge, + BackgroundVariant, + MarkerType, +} from '@xyflow/react'; +import '@xyflow/react/dist/style.css'; +import { + type CreateProcessDefinitionRequest, + type NodeDef, + type EdgeDef, +} from '../../api/workflowDefinitions'; + +const NODE_TYPES_MAP: Record = { + StartEvent: { label: '开始', color: '#52c41a' }, + EndEvent: { label: '结束', color: '#ff4d4f' }, + UserTask: { label: '用户任务', color: '#1890ff' }, + ServiceTask: { label: '服务任务', color: '#722ed1' }, + ExclusiveGateway: { label: '排他网关', color: '#fa8c16' }, + ParallelGateway: { label: '并行网关', color: '#13c2c2' }, +}; + +const PALETTE_ITEMS = Object.entries(NODE_TYPES_MAP).map(([type, info]) => ({ + type, + label: info.label, + color: info.color, +})); + +function createFlowNode(type: string, label: string, position: { x: number; y: number }): Node { + return { + id: `node_${Date.now()}_${Math.random().toString(36).slice(2, 6)}`, + type: 'default', + position, + data: { label: `${label}`, nodeType: type, name: label }, + style: { + background: NODE_TYPES_MAP[type]?.color || '#f0f0f0', + color: '#fff', + padding: '8px 16px', + borderRadius: type.includes('Gateway') ? 0 : type === 'StartEvent' || type === 'EndEvent' ? 50 : 6, + fontSize: 13, + fontWeight: 500, + border: '2px solid rgba(255,255,255,0.3)', + width: type.includes('Gateway') ? 80 : 140, + textAlign: 'center' as const, + }, + }; +} + +interface ProcessDesignerProps { + definitionId: string | null; + onSave: (req: CreateProcessDefinitionRequest) => void; +} + +export default function ProcessDesigner({ onSave }: ProcessDesignerProps) { + const [form] = Form.useForm(); + const [selectedNode, setSelectedNode] = useState(null); + const [nodes, setNodes, onNodesChange] = useNodesState([ + createFlowNode('StartEvent', '开始', { x: 250, y: 50 }), + createFlowNode('UserTask', '审批', { x: 250, y: 200 }), + createFlowNode('EndEvent', '结束', { x: 250, y: 400 }), + ]); + const [edges, setEdges, onEdgesChange] = useEdgesState([ + { + id: 'e_start_approve', + source: nodes[0].id, + target: nodes[1].id, + markerEnd: { type: MarkerType.ArrowClosed }, + }, + { + id: 'e_approve_end', + source: nodes[1].id, + target: nodes[2].id, + markerEnd: { type: MarkerType.ArrowClosed }, + }, + ]); + + const onConnect = useCallback( + (connection: Connection) => { + setEdges((eds) => + addEdge( + { ...connection, markerEnd: { type: MarkerType.ArrowClosed } }, + eds, + ), + ); + }, + [setEdges], + ); + + const onNodeClick = useCallback((_: React.MouseEvent, node: Node) => { + setSelectedNode(node); + }, []); + + const handleAddNode = (type: string) => { + const info = NODE_TYPES_MAP[type]; + if (!info) return; + const newNode = createFlowNode(type, info.label, { + x: 100 + Math.random() * 400, + y: 100 + Math.random() * 300, + }); + setNodes((nds) => [...nds, newNode]); + }; + + const handleDeleteNode = () => { + if (!selectedNode) return; + setNodes((nds) => nds.filter((n) => n.id !== selectedNode.id)); + setEdges((eds) => + eds.filter((e) => e.source !== selectedNode.id && e.target !== selectedNode.id), + ); + setSelectedNode(null); + }; + + const handleUpdateNodeName = (name: string) => { + if (!selectedNode) return; + setNodes((nds) => + nds.map((n) => + n.id === selectedNode.id + ? { ...n, data: { ...n.data, label: name, name } } + : n, + ), + ); + setSelectedNode((prev) => (prev ? { ...prev, data: { ...prev.data, label: name, name } } : null)); + }; + + const handleSave = () => { + form.validateFields().then((values) => { + const flowNodes: NodeDef[] = nodes.map((n) => ({ + id: n.id, + type: (n.data.nodeType as NodeDef['type']) || 'UserTask', + name: n.data.name || String(n.data.label), + })); + const flowEdges: EdgeDef[] = edges.map((e) => ({ + id: e.id, + source: e.source, + target: e.target, + })); + onSave({ + ...values, + nodes: flowNodes, + edges: flowEdges, + }); + }).catch(() => { + message.error('请填写必要字段'); + }); + }; + + const defaultEdgeOptions = useMemo( + () => ({ + markerEnd: { type: MarkerType.ArrowClosed }, + }), + [], + ); + + return ( +
+ {/* 左侧工具面板 */} +
+

添加节点

+ {PALETTE_ITEMS.map((item) => ( + + ))} + + {selectedNode && ( +
+

节点属性

+ handleUpdateNodeName(e.target.value)} + placeholder="节点名称" + style={{ marginBottom: 8 }} + /> + +
+ )} +
+ + {/* 中间画布 */} +
+ + + + +
+ + {/* 右侧表单 */} +
+
+ + + + + + + + + + + + + + + + + +
+
+ ); +} diff --git a/apps/web/src/pages/workflow/ProcessViewer.tsx b/apps/web/src/pages/workflow/ProcessViewer.tsx new file mode 100644 index 0000000..c9c3820 --- /dev/null +++ b/apps/web/src/pages/workflow/ProcessViewer.tsx @@ -0,0 +1,84 @@ +import { useMemo } from 'react'; +import { + ReactFlow, + Controls, + Background, + BackgroundVariant, + MarkerType, + type Node, + type Edge, +} from '@xyflow/react'; +import '@xyflow/react/dist/style.css'; +import type { NodeDef, EdgeDef } from '../../api/workflowDefinitions'; + +const NODE_TYPE_STYLES: Record = { + StartEvent: { color: '#52c41a', radius: 50, width: 100 }, + EndEvent: { color: '#ff4d4f', radius: 50, width: 100 }, + UserTask: { color: '#1890ff', radius: 6, width: 160 }, + ServiceTask: { color: '#722ed1', radius: 6, width: 160 }, + ExclusiveGateway: { color: '#fa8c16', radius: 0, width: 100 }, + ParallelGateway: { color: '#13c2c2', radius: 0, width: 100 }, +}; + +interface ProcessViewerProps { + nodes: NodeDef[]; + edges: EdgeDef[]; + activeNodeIds?: string[]; +} + +export default function ProcessViewer({ nodes, edges, activeNodeIds = [] }: ProcessViewerProps) { + const flowNodes: Node[] = useMemo(() => + nodes.map((n, i) => { + const style = NODE_TYPE_STYLES[n.type] || NODE_TYPE_STYLES.UserTask; + const isActive = activeNodeIds.includes(n.id); + return { + id: n.id, + type: 'default', + position: n.position || { x: 200, y: i * 120 + 50 }, + data: { label: n.name }, + style: { + background: isActive ? '#fff3cd' : style.color, + color: isActive ? '#856404' : '#fff', + padding: '8px 16px', + borderRadius: style.radius, + fontSize: 13, + fontWeight: 500, + border: isActive ? '3px solid #ffc107' : '2px solid rgba(255,255,255,0.3)', + width: style.width, + textAlign: 'center' as const, + boxShadow: isActive ? '0 0 8px rgba(255,193,7,0.5)' : 'none', + }, + }; + }), + [nodes, activeNodeIds], + ); + + const flowEdges: Edge[] = useMemo(() => + edges.map((e) => ({ + id: e.id, + source: e.source, + target: e.target, + label: e.label || e.condition, + markerEnd: { type: MarkerType.ArrowClosed }, + style: { stroke: '#999' }, + })), + [edges], + ); + + return ( +
+ + + + +
+ ); +} diff --git a/crates/erp-auth/src/service/seed.rs b/crates/erp-auth/src/service/seed.rs index 2d62d2e..33ede5a 100644 --- a/crates/erp-auth/src/service/seed.rs +++ b/crates/erp-auth/src/service/seed.rs @@ -101,10 +101,19 @@ const DEFAULT_PERMISSIONS: &[(&str, &str, &str, &str, &str)] = &[ ("theme:update", "编辑主题", "theme", "update", "编辑主题设置"), ("language:list", "查看语言", "language", "list", "查看语言配置"), ("language:update", "编辑语言", "language", "update", "编辑语言配置"), + // Workflow module permissions + ("workflow:create", "创建流程", "workflow", "create", "创建流程定义"), + ("workflow:list", "查看流程", "workflow", "list", "查看流程列表"), + ("workflow:read", "查看流程详情", "workflow", "read", "查看流程定义详情"), + ("workflow:update", "编辑流程", "workflow", "update", "编辑流程定义"), + ("workflow:publish", "发布流程", "workflow", "publish", "发布流程定义"), + ("workflow:start", "发起流程", "workflow", "start", "发起流程实例"), + ("workflow:approve", "审批任务", "workflow", "approve", "审批流程任务"), + ("workflow:delegate", "委派任务", "workflow", "delegate", "委派流程任务"), ]; /// Indices of read-only permissions within DEFAULT_PERMISSIONS. -const READ_PERM_INDICES: &[usize] = &[1, 5, 9, 11, 15, 19, 23, 24, 28, 29, 34, 38]; +const READ_PERM_INDICES: &[usize] = &[1, 5, 9, 11, 15, 19, 23, 24, 28, 29, 34, 38, 37, 38]; /// Seed default auth data for a new tenant. /// diff --git a/crates/erp-server/Cargo.toml b/crates/erp-server/Cargo.toml index 6243b21..e139d2b 100644 --- a/crates/erp-server/Cargo.toml +++ b/crates/erp-server/Cargo.toml @@ -25,5 +25,6 @@ serde.workspace = true erp-server-migration = { path = "migration" } erp-auth.workspace = true erp-config.workspace = true +erp-workflow.workspace = true anyhow.workspace = true uuid.workspace = true diff --git a/crates/erp-server/migration/src/lib.rs b/crates/erp-server/migration/src/lib.rs index 569c9d5..559fae2 100644 --- a/crates/erp-server/migration/src/lib.rs +++ b/crates/erp-server/migration/src/lib.rs @@ -17,6 +17,11 @@ mod m20260412_000014_create_menus; mod m20260412_000015_create_menu_roles; mod m20260412_000016_create_settings; mod m20260412_000017_create_numbering_rules; +mod m20260412_000018_create_process_definitions; +mod m20260412_000019_create_process_instances; +mod m20260412_000020_create_tokens; +mod m20260412_000021_create_tasks; +mod m20260412_000022_create_process_variables; pub struct Migrator; @@ -41,6 +46,11 @@ impl MigratorTrait for Migrator { Box::new(m20260412_000015_create_menu_roles::Migration), Box::new(m20260412_000016_create_settings::Migration), Box::new(m20260412_000017_create_numbering_rules::Migration), + Box::new(m20260412_000018_create_process_definitions::Migration), + Box::new(m20260412_000019_create_process_instances::Migration), + Box::new(m20260412_000020_create_tokens::Migration), + Box::new(m20260412_000021_create_tasks::Migration), + Box::new(m20260412_000022_create_process_variables::Migration), ] } } diff --git a/crates/erp-server/migration/src/m20260412_000018_create_process_definitions.rs b/crates/erp-server/migration/src/m20260412_000018_create_process_definitions.rs new file mode 100644 index 0000000..bdc5622 --- /dev/null +++ b/crates/erp-server/migration/src/m20260412_000018_create_process_definitions.rs @@ -0,0 +1,122 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .create_table( + Table::create() + .table(ProcessDefinitions::Table) + .if_not_exists() + .col( + ColumnDef::new(ProcessDefinitions::Id) + .uuid() + .not_null() + .primary_key(), + ) + .col(ColumnDef::new(ProcessDefinitions::TenantId).uuid().not_null()) + .col(ColumnDef::new(ProcessDefinitions::Name).string().not_null()) + .col(ColumnDef::new(ProcessDefinitions::Key).string().not_null()) + .col( + ColumnDef::new(ProcessDefinitions::Version) + .integer() + .not_null() + .default(1), + ) + .col(ColumnDef::new(ProcessDefinitions::Category).string().null()) + .col(ColumnDef::new(ProcessDefinitions::Description).text().null()) + .col( + ColumnDef::new(ProcessDefinitions::Nodes) + .json_binary() + .not_null() + .default(Expr::val("[]")), + ) + .col( + ColumnDef::new(ProcessDefinitions::Edges) + .json_binary() + .not_null() + .default(Expr::val("[]")), + ) + .col( + ColumnDef::new(ProcessDefinitions::Status) + .string() + .not_null() + .default("draft"), + ) + .col( + ColumnDef::new(ProcessDefinitions::CreatedAt) + .timestamp_with_time_zone() + .not_null() + .default(Expr::current_timestamp()), + ) + .col( + ColumnDef::new(ProcessDefinitions::UpdatedAt) + .timestamp_with_time_zone() + .not_null() + .default(Expr::current_timestamp()), + ) + .col(ColumnDef::new(ProcessDefinitions::CreatedBy).uuid().not_null()) + .col(ColumnDef::new(ProcessDefinitions::UpdatedBy).uuid().not_null()) + .col( + ColumnDef::new(ProcessDefinitions::DeletedAt) + .timestamp_with_time_zone() + .null(), + ) + .col( + ColumnDef::new(ProcessDefinitions::VersionField) + .integer() + .not_null() + .default(1), + ) + .to_owned(), + ) + .await?; + + manager + .create_index( + Index::create() + .name("idx_process_definitions_tenant_id") + .table(ProcessDefinitions::Table) + .col(ProcessDefinitions::TenantId) + .to_owned(), + ) + .await?; + + manager.get_connection().execute(sea_orm::Statement::from_string( + sea_orm::DatabaseBackend::Postgres, + "CREATE UNIQUE INDEX idx_process_definitions_key_version ON process_definitions (tenant_id, key, version) WHERE deleted_at IS NULL".to_string(), + )).await.map_err(|e| DbErr::Custom(e.to_string()))?; + + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table(Table::drop().table(ProcessDefinitions::Table).to_owned()) + .await + } +} + +#[derive(DeriveIden)] +enum ProcessDefinitions { + Table, + Id, + TenantId, + Name, + Key, + Version, + Category, + Description, + Nodes, + Edges, + Status, + CreatedAt, + UpdatedAt, + CreatedBy, + UpdatedBy, + DeletedAt, + VersionField, +} diff --git a/crates/erp-server/migration/src/m20260412_000019_create_process_instances.rs b/crates/erp-server/migration/src/m20260412_000019_create_process_instances.rs new file mode 100644 index 0000000..c4b7ef4 --- /dev/null +++ b/crates/erp-server/migration/src/m20260412_000019_create_process_instances.rs @@ -0,0 +1,124 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .create_table( + Table::create() + .table(ProcessInstances::Table) + .if_not_exists() + .col( + ColumnDef::new(ProcessInstances::Id) + .uuid() + .not_null() + .primary_key(), + ) + .col(ColumnDef::new(ProcessInstances::TenantId).uuid().not_null()) + .col(ColumnDef::new(ProcessInstances::DefinitionId).uuid().not_null()) + .col(ColumnDef::new(ProcessInstances::BusinessKey).string().null()) + .col( + ColumnDef::new(ProcessInstances::Status) + .string() + .not_null() + .default("running"), + ) + .col(ColumnDef::new(ProcessInstances::StartedBy).uuid().not_null()) + .col( + ColumnDef::new(ProcessInstances::StartedAt) + .timestamp_with_time_zone() + .not_null() + .default(Expr::current_timestamp()), + ) + .col( + ColumnDef::new(ProcessInstances::CompletedAt) + .timestamp_with_time_zone() + .null(), + ) + .col( + ColumnDef::new(ProcessInstances::CreatedAt) + .timestamp_with_time_zone() + .not_null() + .default(Expr::current_timestamp()), + ) + .col( + ColumnDef::new(ProcessInstances::UpdatedAt) + .timestamp_with_time_zone() + .not_null() + .default(Expr::current_timestamp()), + ) + .col(ColumnDef::new(ProcessInstances::CreatedBy).uuid().not_null()) + .col(ColumnDef::new(ProcessInstances::UpdatedBy).uuid().not_null()) + .col( + ColumnDef::new(ProcessInstances::DeletedAt) + .timestamp_with_time_zone() + .null(), + ) + .col( + ColumnDef::new(ProcessInstances::Version) + .integer() + .not_null() + .default(1), + ) + .to_owned(), + ) + .await?; + + manager + .create_index( + Index::create() + .name("idx_instances_tenant_status") + .table(ProcessInstances::Table) + .col(ProcessInstances::TenantId) + .col(ProcessInstances::Status) + .to_owned(), + ) + .await?; + + manager + .create_foreign_key( + ForeignKey::create() + .name("fk_instances_definition") + .from(ProcessInstances::Table, ProcessInstances::DefinitionId) + .to(ProcessDefinitions::Table, ProcessDefinitions::Id) + .to_owned(), + ) + .await?; + + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table(Table::drop().table(ProcessInstances::Table).to_owned()) + .await + } +} + +#[derive(DeriveIden)] +enum ProcessInstances { + Table, + Id, + TenantId, + DefinitionId, + BusinessKey, + Status, + StartedBy, + StartedAt, + CompletedAt, + CreatedAt, + UpdatedAt, + CreatedBy, + UpdatedBy, + DeletedAt, + Version, +} + +#[derive(DeriveIden)] +enum ProcessDefinitions { + Table, + Id, +} diff --git a/crates/erp-server/migration/src/m20260412_000020_create_tokens.rs b/crates/erp-server/migration/src/m20260412_000020_create_tokens.rs new file mode 100644 index 0000000..3b06992 --- /dev/null +++ b/crates/erp-server/migration/src/m20260412_000020_create_tokens.rs @@ -0,0 +1,90 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .create_table( + Table::create() + .table(Tokens::Table) + .if_not_exists() + .col( + ColumnDef::new(Tokens::Id) + .uuid() + .not_null() + .primary_key(), + ) + .col(ColumnDef::new(Tokens::TenantId).uuid().not_null()) + .col(ColumnDef::new(Tokens::InstanceId).uuid().not_null()) + .col(ColumnDef::new(Tokens::NodeId).string().not_null()) + .col( + ColumnDef::new(Tokens::Status) + .string() + .not_null() + .default("active"), + ) + .col( + ColumnDef::new(Tokens::CreatedAt) + .timestamp_with_time_zone() + .not_null() + .default(Expr::current_timestamp()), + ) + .col( + ColumnDef::new(Tokens::ConsumedAt) + .timestamp_with_time_zone() + .null(), + ) + .to_owned(), + ) + .await?; + + manager + .create_index( + Index::create() + .name("idx_tokens_instance") + .table(Tokens::Table) + .col(Tokens::InstanceId) + .to_owned(), + ) + .await?; + + manager + .create_foreign_key( + ForeignKey::create() + .name("fk_tokens_instance") + .from(Tokens::Table, Tokens::InstanceId) + .to(ProcessInstances::Table, ProcessInstances::Id) + .to_owned(), + ) + .await?; + + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table(Table::drop().table(Tokens::Table).to_owned()) + .await + } +} + +#[derive(DeriveIden)] +enum Tokens { + Table, + Id, + TenantId, + InstanceId, + NodeId, + Status, + CreatedAt, + ConsumedAt, +} + +#[derive(DeriveIden)] +enum ProcessInstances { + Table, + Id, +} diff --git a/crates/erp-server/migration/src/m20260412_000021_create_tasks.rs b/crates/erp-server/migration/src/m20260412_000021_create_tasks.rs new file mode 100644 index 0000000..ea4fee7 --- /dev/null +++ b/crates/erp-server/migration/src/m20260412_000021_create_tasks.rs @@ -0,0 +1,160 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .create_table( + Table::create() + .table(Tasks::Table) + .if_not_exists() + .col( + ColumnDef::new(Tasks::Id) + .uuid() + .not_null() + .primary_key(), + ) + .col(ColumnDef::new(Tasks::TenantId).uuid().not_null()) + .col(ColumnDef::new(Tasks::InstanceId).uuid().not_null()) + .col(ColumnDef::new(Tasks::TokenId).uuid().not_null()) + .col(ColumnDef::new(Tasks::NodeId).string().not_null()) + .col(ColumnDef::new(Tasks::NodeName).string().null()) + .col(ColumnDef::new(Tasks::AssigneeId).uuid().null()) + .col(ColumnDef::new(Tasks::CandidateGroups).json_binary().null()) + .col( + ColumnDef::new(Tasks::Status) + .string() + .not_null() + .default("pending"), + ) + .col(ColumnDef::new(Tasks::Outcome).string().null()) + .col(ColumnDef::new(Tasks::FormData).json_binary().null()) + .col( + ColumnDef::new(Tasks::DueDate) + .timestamp_with_time_zone() + .null(), + ) + .col( + ColumnDef::new(Tasks::CompletedAt) + .timestamp_with_time_zone() + .null(), + ) + .col( + ColumnDef::new(Tasks::CreatedAt) + .timestamp_with_time_zone() + .not_null() + .default(Expr::current_timestamp()), + ) + .col( + ColumnDef::new(Tasks::UpdatedAt) + .timestamp_with_time_zone() + .not_null() + .default(Expr::current_timestamp()), + ) + .col(ColumnDef::new(Tasks::CreatedBy).uuid().not_null()) + .col(ColumnDef::new(Tasks::UpdatedBy).uuid().not_null()) + .col( + ColumnDef::new(Tasks::DeletedAt) + .timestamp_with_time_zone() + .null(), + ) + .col( + ColumnDef::new(Tasks::Version) + .integer() + .not_null() + .default(1), + ) + .to_owned(), + ) + .await?; + + manager + .create_index( + Index::create() + .name("idx_tasks_assignee") + .table(Tasks::Table) + .col(Tasks::TenantId) + .col(Tasks::AssigneeId) + .col(Tasks::Status) + .to_owned(), + ) + .await?; + + manager + .create_index( + Index::create() + .name("idx_tasks_instance") + .table(Tasks::Table) + .col(Tasks::InstanceId) + .to_owned(), + ) + .await?; + + manager + .create_foreign_key( + ForeignKey::create() + .name("fk_tasks_instance") + .from(Tasks::Table, Tasks::InstanceId) + .to(ProcessInstances::Table, ProcessInstances::Id) + .to_owned(), + ) + .await?; + + manager + .create_foreign_key( + ForeignKey::create() + .name("fk_tasks_token") + .from(Tasks::Table, Tasks::TokenId) + .to(WfTokens::Table, WfTokens::Id) + .to_owned(), + ) + .await?; + + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table(Table::drop().table(Tasks::Table).to_owned()) + .await + } +} + +#[derive(DeriveIden)] +enum Tasks { + Table, + Id, + TenantId, + InstanceId, + TokenId, + NodeId, + NodeName, + AssigneeId, + CandidateGroups, + Status, + Outcome, + FormData, + DueDate, + CompletedAt, + CreatedAt, + UpdatedAt, + CreatedBy, + UpdatedBy, + DeletedAt, + Version, +} + +#[derive(DeriveIden)] +enum ProcessInstances { + Table, + Id, +} + +#[derive(DeriveIden)] +enum WfTokens { + Table, + Id, +} diff --git a/crates/erp-server/migration/src/m20260412_000022_create_process_variables.rs b/crates/erp-server/migration/src/m20260412_000022_create_process_variables.rs new file mode 100644 index 0000000..4dad07a --- /dev/null +++ b/crates/erp-server/migration/src/m20260412_000022_create_process_variables.rs @@ -0,0 +1,84 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .create_table( + Table::create() + .table(ProcessVariables::Table) + .if_not_exists() + .col( + ColumnDef::new(ProcessVariables::Id) + .uuid() + .not_null() + .primary_key(), + ) + .col(ColumnDef::new(ProcessVariables::TenantId).uuid().not_null()) + .col(ColumnDef::new(ProcessVariables::InstanceId).uuid().not_null()) + .col(ColumnDef::new(ProcessVariables::Name).string().not_null()) + .col( + ColumnDef::new(ProcessVariables::VarType) + .string() + .not_null() + .default("string"), + ) + .col(ColumnDef::new(ProcessVariables::ValueString).text().null()) + .col(ColumnDef::new(ProcessVariables::ValueNumber).double().null()) + .col(ColumnDef::new(ProcessVariables::ValueBoolean).boolean().null()) + .col( + ColumnDef::new(ProcessVariables::ValueDate) + .timestamp_with_time_zone() + .null(), + ) + .to_owned(), + ) + .await?; + + manager.get_connection().execute(sea_orm::Statement::from_string( + sea_orm::DatabaseBackend::Postgres, + "CREATE UNIQUE INDEX idx_process_variables_instance_name ON process_variables (instance_id, name)".to_string(), + )).await.map_err(|e| DbErr::Custom(e.to_string()))?; + + manager + .create_foreign_key( + ForeignKey::create() + .name("fk_variables_instance") + .from(ProcessVariables::Table, ProcessVariables::InstanceId) + .to(ProcessInstances::Table, ProcessInstances::Id) + .to_owned(), + ) + .await?; + + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table(Table::drop().table(ProcessVariables::Table).to_owned()) + .await + } +} + +#[derive(DeriveIden)] +enum ProcessVariables { + Table, + Id, + TenantId, + InstanceId, + Name, + VarType, + ValueString, + ValueNumber, + ValueBoolean, + ValueDate, +} + +#[derive(DeriveIden)] +enum ProcessInstances { + Table, + Id, +} diff --git a/crates/erp-server/src/main.rs b/crates/erp-server/src/main.rs index 7e51ca5..9005b02 100644 --- a/crates/erp-server/src/main.rs +++ b/crates/erp-server/src/main.rs @@ -105,10 +105,15 @@ async fn main() -> anyhow::Result<()> { let config_module = erp_config::ConfigModule::new(); tracing::info!(module = config_module.name(), version = config_module.version(), "Config module initialized"); + // Initialize workflow module + let workflow_module = erp_workflow::WorkflowModule::new(); + tracing::info!(module = workflow_module.name(), version = workflow_module.version(), "Workflow module initialized"); + // Initialize module registry and register modules let registry = ModuleRegistry::new() .register(auth_module) - .register(config_module); + .register(config_module) + .register(workflow_module); tracing::info!(module_count = registry.modules().len(), "Modules registered"); // Register event handlers @@ -146,6 +151,7 @@ async fn main() -> anyhow::Result<()> { // Protected routes (JWT authentication required) let protected_routes = erp_auth::AuthModule::protected_routes() .merge(erp_config::ConfigModule::protected_routes()) + .merge(erp_workflow::WorkflowModule::protected_routes()) .layer(middleware::from_fn(move |req, next| { let secret = jwt_secret.clone(); async move { jwt_auth_middleware_fn(secret, req, next).await } diff --git a/crates/erp-server/src/state.rs b/crates/erp-server/src/state.rs index b87cf11..19ea561 100644 --- a/crates/erp-server/src/state.rs +++ b/crates/erp-server/src/state.rs @@ -60,3 +60,13 @@ impl FromRef for erp_config::ConfigState { } } } + +/// Allow erp-workflow handlers to extract their required state without depending on erp-server. +impl FromRef for erp_workflow::WorkflowState { + fn from_ref(state: &AppState) -> Self { + Self { + db: state.db.clone(), + event_bus: state.event_bus.clone(), + } + } +} diff --git a/crates/erp-workflow/Cargo.toml b/crates/erp-workflow/Cargo.toml index 7a47033..98cf556 100644 --- a/crates/erp-workflow/Cargo.toml +++ b/crates/erp-workflow/Cargo.toml @@ -5,12 +5,16 @@ edition.workspace = true [dependencies] erp-core.workspace = true -tokio.workspace = true -serde.workspace = true -serde_json.workspace = true -uuid.workspace = true -chrono.workspace = true -axum.workspace = true -sea-orm.workspace = true -tracing.workspace = true +tokio = { workspace = true, features = ["full"] } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +uuid = { workspace = true, features = ["v7", "serde"] } +chrono = { workspace = true, features = ["serde"] } +axum = { workspace = true } +sea-orm = { workspace = true, features = ["sqlx-postgres", "runtime-tokio-rustls", "with-uuid", "with-chrono", "with-json"] } +tracing = { workspace = true } anyhow.workspace = true +thiserror.workspace = true +utoipa = { workspace = true, features = ["uuid", "chrono"] } +async-trait.workspace = true +validator.workspace = true diff --git a/crates/erp-workflow/src/dto.rs b/crates/erp-workflow/src/dto.rs new file mode 100644 index 0000000..8b12804 --- /dev/null +++ b/crates/erp-workflow/src/dto.rs @@ -0,0 +1,211 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; +use uuid::Uuid; +use validator::Validate; + +// --- 流程图节点/边定义 --- + +/// BPMN 节点类型 +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, PartialEq)] +#[serde(rename_all = "camelCase")] +pub enum NodeType { + StartEvent, + EndEvent, + UserTask, + ServiceTask, + ExclusiveGateway, + ParallelGateway, +} + +/// 流程图节点定义 +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct NodeDef { + pub id: String, + #[serde(rename = "type")] + pub node_type: NodeType, + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub assignee_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub candidate_groups: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub service_type: Option, + /// 前端渲染位置 + #[serde(skip_serializing_if = "Option::is_none")] + pub position: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct NodePosition { + pub x: f64, + pub y: f64, +} + +/// 流程图连线定义 +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct EdgeDef { + pub id: String, + pub source: String, + pub target: String, + /// 条件表达式(排他网关分支) + #[serde(skip_serializing_if = "Option::is_none")] + pub condition: Option, + /// 前端渲染标签 + #[serde(skip_serializing_if = "Option::is_none")] + pub label: Option, +} + +/// 完整流程图 +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct FlowDiagram { + pub nodes: Vec, + pub edges: Vec, +} + +// --- 流程定义 DTOs --- + +#[derive(Debug, Serialize, ToSchema)] +pub struct ProcessDefinitionResp { + pub id: Uuid, + pub name: String, + pub key: String, + pub version: i32, + #[serde(skip_serializing_if = "Option::is_none")] + pub category: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + pub nodes: serde_json::Value, + pub edges: serde_json::Value, + pub status: String, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Deserialize, Validate, ToSchema)] +pub struct CreateProcessDefinitionReq { + #[validate(length(min = 1, max = 200, message = "流程名称不能为空"))] + pub name: String, + #[validate(length(min = 1, max = 100, message = "流程编码不能为空"))] + pub key: String, + pub category: Option, + pub description: Option, + pub nodes: Vec, + pub edges: Vec, +} + +#[derive(Debug, Deserialize, ToSchema)] +pub struct UpdateProcessDefinitionReq { + pub name: Option, + pub category: Option, + pub description: Option, + pub nodes: Option>, + pub edges: Option>, +} + +// --- 流程实例 DTOs --- + +#[derive(Debug, Serialize, ToSchema)] +pub struct ProcessInstanceResp { + pub id: Uuid, + pub definition_id: Uuid, + pub definition_name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub business_key: Option, + pub status: String, + pub started_by: Uuid, + pub started_at: DateTime, + #[serde(skip_serializing_if = "Option::is_none")] + pub completed_at: Option>, + pub created_at: DateTime, + /// 当前活跃的 token 位置 + pub active_tokens: Vec, +} + +#[derive(Debug, Deserialize, ToSchema)] +pub struct StartInstanceReq { + pub definition_id: Uuid, + pub business_key: Option, + /// 初始流程变量 + pub variables: Option>, +} + +// --- Token DTOs --- + +#[derive(Debug, Serialize, ToSchema)] +pub struct TokenResp { + pub id: Uuid, + pub node_id: String, + pub status: String, + pub created_at: DateTime, +} + +// --- 任务 DTOs --- + +#[derive(Debug, Serialize, ToSchema)] +pub struct TaskResp { + pub id: Uuid, + pub instance_id: Uuid, + pub token_id: Uuid, + pub node_id: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub node_name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub assignee_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub candidate_groups: Option, + pub status: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub outcome: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub form_data: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub due_date: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub completed_at: Option>, + pub created_at: DateTime, + /// 流程定义名称(用于列表展示) + #[serde(skip_serializing_if = "Option::is_none")] + pub definition_name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub business_key: Option, +} + +#[derive(Debug, Deserialize, ToSchema)] +pub struct CompleteTaskReq { + pub outcome: String, + pub form_data: Option, +} + +#[derive(Debug, Deserialize, ToSchema)] +pub struct DelegateTaskReq { + pub delegate_to: Uuid, +} + +// --- 流程变量 DTOs --- + +#[derive(Debug, Serialize, ToSchema)] +pub struct ProcessVariableResp { + pub id: Uuid, + pub name: String, + pub var_type: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub value_string: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub value_number: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub value_boolean: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub value_date: Option>, +} + +#[derive(Debug, Clone, Deserialize, ToSchema)] +pub struct SetVariableReq { + pub name: String, + pub var_type: Option, + pub value: serde_json::Value, +} diff --git a/crates/erp-workflow/src/engine/executor.rs b/crates/erp-workflow/src/engine/executor.rs new file mode 100644 index 0000000..c07b128 --- /dev/null +++ b/crates/erp-workflow/src/engine/executor.rs @@ -0,0 +1,371 @@ +use std::collections::HashMap; + +use chrono::Utc; +use sea_orm::{ + ActiveModelTrait, ColumnTrait, EntityTrait, QueryFilter, Set, ConnectionTrait, + PaginatorTrait, +}; +use uuid::Uuid; + +use crate::dto::NodeType; +use crate::engine::expression::ExpressionEvaluator; +use crate::engine::model::FlowGraph; +use crate::entity::{token, process_instance}; +use crate::error::{WorkflowError, WorkflowResult}; + +/// Token 驱动的流程执行引擎。 +/// +/// 核心职责: +/// - 在流程启动时,于 StartEvent 创建第一个 token +/// - 在任务完成时推进 token 到下一个节点 +/// - 处理网关分支/汇合逻辑 +/// - 在 EndEvent 完成实例 +pub struct FlowExecutor; + +impl FlowExecutor { + /// 启动流程:在 StartEvent 的后继节点创建 token。 + /// + /// 返回创建的 token ID 列表。 + pub async fn start( + instance_id: Uuid, + tenant_id: Uuid, + graph: &FlowGraph, + variables: &HashMap, + txn: &impl ConnectionTrait, + ) -> WorkflowResult> { + let start_id = graph + .start_node_id + .as_ref() + .ok_or_else(|| WorkflowError::InvalidDiagram("流程图没有开始事件".to_string()))?; + + // 获取 StartEvent 的出边,推进到后继节点 + let outgoing = graph.get_outgoing_edges(start_id); + if outgoing.is_empty() { + return Err(WorkflowError::InvalidDiagram( + "开始事件没有出边".to_string(), + )); + } + + // StartEvent 只有一条出边 + let first_edge = &outgoing[0]; + let target_node_id = &first_edge.target; + + Self::create_token_at_node( + instance_id, + tenant_id, + target_node_id, + graph, + variables, + txn, + ) + .await + } + + /// 推进 token:消费当前 token,在下一节点创建新 token。 + /// + /// 返回新创建的 token ID 列表。 + pub async fn advance( + token_id: Uuid, + instance_id: Uuid, + tenant_id: Uuid, + graph: &FlowGraph, + variables: &HashMap, + txn: &impl ConnectionTrait, + ) -> WorkflowResult> { + // 读取当前 token + let current_token = token::Entity::find_by_id(token_id) + .one(txn) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .ok_or_else(|| WorkflowError::NotFound(format!("Token 不存在: {token_id}")))?; + + if current_token.status != "active" { + return Err(WorkflowError::InvalidState(format!( + "Token 状态不是 active: {}", + current_token.status + ))); + } + + let node_id = current_token.node_id.clone(); + + // 消费当前 token + let mut active: token::ActiveModel = current_token.into(); + active.status = Set("consumed".to_string()); + active.consumed_at = Set(Some(Utc::now())); + active.update(txn).await.map_err(|e| WorkflowError::Validation(e.to_string()))?; + + // 获取当前节点的出边 + let outgoing = graph.get_outgoing_edges(&node_id); + let current_node = graph.nodes.get(&node_id) + .ok_or_else(|| WorkflowError::InvalidDiagram(format!("节点不存在: {node_id}")))?; + + match current_node.node_type { + NodeType::ExclusiveGateway => { + // 排他网关:求值条件,选择一条分支 + Self::advance_exclusive_gateway( + instance_id, + tenant_id, + &outgoing, + graph, + variables, + txn, + ) + .await + } + NodeType::ParallelGateway => { + // 并行网关:为每条出边创建 token + Self::advance_parallel_gateway( + instance_id, + tenant_id, + &outgoing, + graph, + variables, + txn, + ) + .await + } + _ => { + // 普通节点:沿出边前进 + if outgoing.is_empty() { + // 没有出边(理论上只有 EndEvent 会到这里) + Ok(vec![]) + } else { + let mut new_tokens = Vec::new(); + for edge in &outgoing { + let tokens = Self::create_token_at_node( + instance_id, + tenant_id, + &edge.target, + graph, + variables, + txn, + ) + .await?; + new_tokens.extend(tokens); + } + Ok(new_tokens) + } + } + } + } + + /// 排他网关分支:求值条件,选择第一个满足条件的分支。 + async fn advance_exclusive_gateway( + instance_id: Uuid, + tenant_id: Uuid, + outgoing: &[&crate::engine::model::FlowEdge], + graph: &FlowGraph, + variables: &HashMap, + txn: &impl ConnectionTrait, + ) -> WorkflowResult> { + let mut default_target: Option<&str> = None; + let mut matched_target: Option<&str> = None; + + for edge in outgoing { + if let Some(condition) = &edge.condition { + match ExpressionEvaluator::eval(condition, variables) { + Ok(true) => { + matched_target = Some(&edge.target); + break; + } + Ok(false) => continue, + Err(_) => continue, // 条件求值失败,跳过 + } + } else { + // 无条件的边作为默认分支 + default_target = Some(&edge.target); + } + } + + let target = matched_target + .or(default_target) + .ok_or_else(|| WorkflowError::ExpressionError( + "排他网关没有匹配的条件分支".to_string(), + ))?; + + Self::create_token_at_node(instance_id, tenant_id, target, graph, variables, txn).await + } + + /// 并行网关分支:为每条出边创建 token。 + async fn advance_parallel_gateway( + instance_id: Uuid, + tenant_id: Uuid, + outgoing: &[&crate::engine::model::FlowEdge], + graph: &FlowGraph, + variables: &HashMap, + txn: &impl ConnectionTrait, + ) -> WorkflowResult> { + let mut new_tokens = Vec::new(); + for edge in outgoing { + let tokens = Self::create_token_at_node( + instance_id, + tenant_id, + &edge.target, + graph, + variables, + txn, + ) + .await?; + new_tokens.extend(tokens); + } + Ok(new_tokens) + } + + /// 在指定节点创建 token,并根据节点类型执行相应逻辑。 + fn create_token_at_node<'a>( + instance_id: Uuid, + tenant_id: Uuid, + node_id: &'a str, + graph: &'a FlowGraph, + variables: &'a HashMap, + txn: &'a impl ConnectionTrait, + ) -> std::pin::Pin>> + Send + 'a>> { + Box::pin(async move { + let node = graph.nodes.get(node_id) + .ok_or_else(|| WorkflowError::InvalidDiagram(format!("节点不存在: {node_id}")))?; + + match node.node_type { + NodeType::EndEvent => { + // 到达 EndEvent,不创建新 token + // 检查实例是否所有 token 都完成 + Self::check_instance_completion(instance_id, tenant_id, txn).await?; + Ok(vec![]) + } + NodeType::ParallelGateway + if Self::is_join_gateway(node_id, graph) => + { + // 并行网关汇合:等待所有入边 token 到达 + Self::handle_join_gateway( + instance_id, + tenant_id, + node_id, + graph, + variables, + txn, + ) + .await + } + _ => { + // UserTask / ServiceTask / 网关(分支)等:创建活跃 token + let new_token_id = Uuid::now_v7(); + let now = Utc::now(); + + let token_model = token::ActiveModel { + id: Set(new_token_id), + tenant_id: Set(tenant_id), + instance_id: Set(instance_id), + node_id: Set(node_id.to_string()), + status: Set("active".to_string()), + created_at: Set(now), + consumed_at: Set(None), + }; + token_model + .insert(txn) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + Ok(vec![new_token_id]) + } + } + }) + } + + /// 判断并行网关是否是汇合模式(入边数 > 出边数,或者入边数 > 1)。 + fn is_join_gateway(node_id: &str, graph: &FlowGraph) -> bool { + let incoming = graph.get_incoming_edges(node_id); + incoming.len() > 1 + } + + /// 处理并行网关汇合逻辑。 + /// + /// 当所有入边的源节点都有已消费的 token 时,创建新 token 推进到后继。 + async fn handle_join_gateway( + instance_id: Uuid, + tenant_id: Uuid, + node_id: &str, + graph: &FlowGraph, + variables: &HashMap, + txn: &impl ConnectionTrait, + ) -> WorkflowResult> { + let incoming = graph.get_incoming_edges(node_id); + + // 检查所有入边的源节点是否都有已消费/已完成的 token + for edge in &incoming { + let has_consumed = token::Entity::find() + .filter(token::Column::InstanceId.eq(instance_id)) + .filter(token::Column::NodeId.eq(&edge.source)) + .filter(token::Column::Status.is_in(["consumed", "active"])) + .one(txn) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + if has_consumed.is_none() { + // 还有分支没有到达,等待 + return Ok(vec![]); + } + + // 检查是否还有活跃的 token(来自其他分支) + let has_active = token::Entity::find() + .filter(token::Column::InstanceId.eq(instance_id)) + .filter(token::Column::NodeId.eq(&edge.source)) + .filter(token::Column::Status.eq("active")) + .one(txn) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + if has_active.is_some() { + // 还有分支在执行中,等待 + return Ok(vec![]); + } + } + + // 所有分支都完成了,沿出边继续 + let outgoing = graph.get_outgoing_edges(node_id); + let mut new_tokens = Vec::new(); + for edge in &outgoing { + let tokens = Self::create_token_at_node( + instance_id, + tenant_id, + &edge.target, + graph, + variables, + txn, + ) + .await?; + new_tokens.extend(tokens); + } + Ok(new_tokens) + } + + /// 检查实例是否所有 token 都已完成,如果是则完成实例。 + async fn check_instance_completion( + instance_id: Uuid, + tenant_id: Uuid, + txn: &impl ConnectionTrait, + ) -> WorkflowResult<()> { + let active_count = token::Entity::find() + .filter(token::Column::InstanceId.eq(instance_id)) + .filter(token::Column::Status.eq("active")) + .count(txn) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + if active_count == 0 { + // 所有 token 都完成,标记实例完成 + let instance = process_instance::Entity::find_by_id(instance_id) + .one(txn) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|i| i.tenant_id == tenant_id && i.deleted_at.is_none()) + .ok_or_else(|| WorkflowError::NotFound(format!("流程实例不存在: {instance_id}")))?; + + let mut active: process_instance::ActiveModel = instance.into(); + active.status = Set("completed".to_string()); + active.completed_at = Set(Some(Utc::now())); + active.updated_at = Set(Utc::now()); + active.update(txn).await.map_err(|e| WorkflowError::Validation(e.to_string()))?; + } + + Ok(()) + } +} diff --git a/crates/erp-workflow/src/engine/expression.rs b/crates/erp-workflow/src/engine/expression.rs new file mode 100644 index 0000000..fd641ee --- /dev/null +++ b/crates/erp-workflow/src/engine/expression.rs @@ -0,0 +1,325 @@ +use std::collections::HashMap; + +use crate::error::{WorkflowError, WorkflowResult}; + +/// 简单表达式求值器。 +/// +/// 支持的比较运算符:>, >=, <, <=, ==, != +/// 支持 && 和 || 逻辑运算。 +/// 操作数可以是变量名(从 variables map 查找)或字面量(数字、字符串)。 +/// +/// 示例: +/// - `amount > 1000` +/// - `status == "approved"` +/// - `score >= 60 && attendance > 80` +pub struct ExpressionEvaluator; + +impl ExpressionEvaluator { + /// 求值单个条件表达式。 + /// + /// 表达式格式: `{left} {op} {right}` 或复合表达式 `{expr1} && {expr2}` + pub fn eval(expr: &str, variables: &HashMap) -> WorkflowResult { + let expr = expr.trim(); + + // 处理逻辑 OR + if let Some(idx) = Self::find_logical_op(expr, "||") { + let left = &expr[..idx]; + let right = &expr[idx + 2..]; + return Ok(Self::eval(left, variables)? || Self::eval(right, variables)?); + } + + // 处理逻辑 AND + if let Some(idx) = Self::find_logical_op(expr, "&&") { + let left = &expr[..idx]; + let right = &expr[idx + 2..]; + return Ok(Self::eval(left, variables)? && Self::eval(right, variables)?); + } + + // 处理单个比较表达式 + Self::eval_comparison(expr, variables) + } + + /// 查找逻辑运算符位置,跳过引号内的内容。 + fn find_logical_op(expr: &str, op: &str) -> Option { + let mut in_string = false; + let mut string_char = ' '; + let chars: Vec = expr.chars().collect(); + let op_chars: Vec = op.chars().collect(); + let op_len = op_chars.len(); + + for i in 0..chars.len().saturating_sub(op_len - 1) { + let c = chars[i]; + + if !in_string && (c == '"' || c == '\'') { + in_string = true; + string_char = c; + continue; + } + if in_string && c == string_char { + in_string = false; + continue; + } + + if in_string { + continue; + } + + if chars[i..].starts_with(&op_chars) { + return Some(i); + } + } + None + } + + /// 求值单个比较表达式。 + fn eval_comparison(expr: &str, variables: &HashMap) -> WorkflowResult { + let operators = [">=", "<=", "!=", "==", ">", "<"]; + + for op in &operators { + if let Some(idx) = Self::find_comparison_op(expr, op) { + let left = expr[..idx].trim(); + let right = expr[idx + op.len()..].trim(); + + let left_val = Self::resolve_value(left, variables)?; + let right_val = Self::resolve_value(right, variables)?; + + return Self::compare(&left_val, &right_val, op); + } + } + + Err(WorkflowError::ExpressionError(format!( + "无法解析表达式: '{}'", + expr + ))) + } + + /// 查找比较运算符位置,跳过引号内的内容。 + fn find_comparison_op(expr: &str, op: &str) -> Option { + let mut in_string = false; + let mut string_char = ' '; + let bytes = expr.as_bytes(); + let op_bytes = op.as_bytes(); + let op_len = op_bytes.len(); + + for i in 0..bytes.len().saturating_sub(op_len - 1) { + let c = bytes[i] as char; + + if !in_string && (c == '"' || c == '\'') { + in_string = true; + string_char = c; + continue; + } + if in_string && c == string_char { + in_string = false; + continue; + } + + if in_string { + continue; + } + + if bytes[i..].starts_with(op_bytes) { + // 确保不是被嵌在其他运算符里(如 != 中的 =) + // 对于 > 和 < 检查后面不是 = 或 > + if op == ">" || op == "<" { + if i + op_len < bytes.len() { + let next = bytes[i + op_len] as char; + if next == '=' || (op == ">" && next == '>') { + continue; + } + } + // 也检查前面不是 ! 或 = 或 < 或 > + if i > 0 { + let prev = bytes[i - 1] as char; + if prev == '!' || prev == '=' || prev == '<' || prev == '>' { + continue; + } + } + } + // 对于 ==, >=, <=, != 确保前面不是 ! 或 = (避免匹配到 == 中的第二个 =) + // 这已经通过从长到短匹配处理了 + return Some(i); + } + } + None + } + + /// 解析值:字符串字面量、数字字面量或变量引用。 + fn resolve_value( + token: &str, + variables: &HashMap, + ) -> WorkflowResult { + let token = token.trim(); + + // 字符串字面量 + if (token.starts_with('"') && token.ends_with('"')) + || (token.starts_with('\'') && token.ends_with('\'')) + { + return Ok(serde_json::Value::String( + token[1..token.len() - 1].to_string(), + )); + } + + // 数字字面量 + if let Ok(n) = token.parse::() { + return Ok(serde_json::Value::Number(n.into())); + } + if let Ok(f) = token.parse::() { + if let Some(n) = serde_json::Number::from_f64(f) { + return Ok(serde_json::Value::Number(n)); + } + } + + // 布尔字面量 + if token == "true" { + return Ok(serde_json::Value::Bool(true)); + } + if token == "false" { + return Ok(serde_json::Value::Bool(false)); + } + + // 变量引用 + if let Some(val) = variables.get(token) { + return Ok(val.clone()); + } + + Err(WorkflowError::ExpressionError(format!( + "未知的变量或值: '{}'", + token + ))) + } + + /// 比较两个 JSON 值。 + fn compare( + left: &serde_json::Value, + right: &serde_json::Value, + op: &str, + ) -> WorkflowResult { + match op { + "==" => Ok(Self::values_equal(left, right)), + "!=" => Ok(!Self::values_equal(left, right)), + ">" => Ok(Self::values_compare(left, right)? == std::cmp::Ordering::Greater), + ">=" => Ok(Self::values_compare(left, right)? != std::cmp::Ordering::Less), + "<" => Ok(Self::values_compare(left, right)? == std::cmp::Ordering::Less), + "<=" => Ok(Self::values_compare(left, right)? != std::cmp::Ordering::Greater), + _ => Err(WorkflowError::ExpressionError(format!( + "不支持的比较运算符: '{}'", + op + ))), + } + } + + fn values_equal(left: &serde_json::Value, right: &serde_json::Value) -> bool { + // 数值比较:允许整数和浮点数互比 + if left.is_number() && right.is_number() { + return left.as_f64() == right.as_f64(); + } + left == right + } + + fn values_compare( + left: &serde_json::Value, + right: &serde_json::Value, + ) -> WorkflowResult { + if left.is_number() && right.is_number() { + let l = left.as_f64().unwrap_or(0.0); + let r = right.as_f64().unwrap_or(0.0); + return Ok(l.partial_cmp(&r).unwrap_or(std::cmp::Ordering::Equal)); + } + + if let (Some(l), Some(r)) = (left.as_str(), right.as_str()) { + return Ok(l.cmp(r)); + } + + Err(WorkflowError::ExpressionError(format!( + "无法比较 {:?} 和 {:?}", + left, right + ))) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + fn make_vars() -> HashMap { + let mut m = HashMap::new(); + m.insert("amount".to_string(), json!(1500)); + m.insert("status".to_string(), json!("approved")); + m.insert("score".to_string(), json!(85)); + m.insert("name".to_string(), json!("Alice")); + m.insert("active".to_string(), json!(true)); + m + } + + #[test] + fn test_number_greater_than() { + let vars = make_vars(); + assert!(ExpressionEvaluator::eval("amount > 1000", &vars).unwrap()); + assert!(!ExpressionEvaluator::eval("amount > 2000", &vars).unwrap()); + } + + #[test] + fn test_number_less_than() { + let vars = make_vars(); + assert!(ExpressionEvaluator::eval("amount < 2000", &vars).unwrap()); + assert!(!ExpressionEvaluator::eval("amount < 1000", &vars).unwrap()); + } + + #[test] + fn test_number_equals() { + let vars = make_vars(); + assert!(ExpressionEvaluator::eval("amount == 1500", &vars).unwrap()); + assert!(!ExpressionEvaluator::eval("amount == 1000", &vars).unwrap()); + } + + #[test] + fn test_string_equals() { + let vars = make_vars(); + assert!(ExpressionEvaluator::eval("status == \"approved\"", &vars).unwrap()); + assert!(!ExpressionEvaluator::eval("status == \"rejected\"", &vars).unwrap()); + } + + #[test] + fn test_string_not_equals() { + let vars = make_vars(); + assert!(ExpressionEvaluator::eval("status != \"rejected\"", &vars).unwrap()); + } + + #[test] + fn test_greater_or_equal() { + let vars = make_vars(); + assert!(ExpressionEvaluator::eval("amount >= 1500", &vars).unwrap()); + assert!(ExpressionEvaluator::eval("amount >= 1000", &vars).unwrap()); + assert!(!ExpressionEvaluator::eval("amount >= 2000", &vars).unwrap()); + } + + #[test] + fn test_logical_and() { + let vars = make_vars(); + assert!(ExpressionEvaluator::eval("amount > 1000 && score > 80", &vars).unwrap()); + assert!(!ExpressionEvaluator::eval("amount > 2000 && score > 80", &vars).unwrap()); + } + + #[test] + fn test_logical_or() { + let vars = make_vars(); + assert!(ExpressionEvaluator::eval("amount > 2000 || score > 80", &vars).unwrap()); + assert!(!ExpressionEvaluator::eval("amount > 2000 || score > 90", &vars).unwrap()); + } + + #[test] + fn test_unknown_variable() { + let vars = make_vars(); + let result = ExpressionEvaluator::eval("unknown > 0", &vars); + assert!(result.is_err()); + } + + #[test] + fn test_invalid_expression() { + let vars = make_vars(); + let result = ExpressionEvaluator::eval("justavariable", &vars); + assert!(result.is_err()); + } +} diff --git a/crates/erp-workflow/src/engine/mod.rs b/crates/erp-workflow/src/engine/mod.rs new file mode 100644 index 0000000..6bbe114 --- /dev/null +++ b/crates/erp-workflow/src/engine/mod.rs @@ -0,0 +1,5 @@ +pub mod expression; +pub mod executor; +pub mod model; +pub mod parser; +pub mod timeout; diff --git a/crates/erp-workflow/src/engine/model.rs b/crates/erp-workflow/src/engine/model.rs new file mode 100644 index 0000000..98dfd80 --- /dev/null +++ b/crates/erp-workflow/src/engine/model.rs @@ -0,0 +1,122 @@ +use std::collections::HashMap; + +use crate::dto::{EdgeDef, NodeDef, NodeType}; + +/// 内存中的流程图模型,用于执行引擎。 +#[derive(Debug, Clone)] +pub struct FlowGraph { + /// node_id → FlowNode + pub nodes: HashMap, + /// edge_id → FlowEdge + pub edges: HashMap, + /// node_id → 从该节点出发的边列表 + pub outgoing: HashMap>, + /// node_id → 到达该节点的边列表 + pub incoming: HashMap>, + /// StartEvent 的 node_id + pub start_node_id: Option, + /// 所有 EndEvent 的 node_id + pub end_node_ids: Vec, +} + +/// 内存中的节点模型。 +#[derive(Debug, Clone)] +pub struct FlowNode { + pub id: String, + pub node_type: NodeType, + pub name: String, + pub assignee_id: Option, + pub candidate_groups: Option>, + pub service_type: Option, +} + +/// 内存中的边模型。 +#[derive(Debug, Clone)] +pub struct FlowEdge { + pub id: String, + pub source: String, + pub target: String, + pub condition: Option, + pub label: Option, +} + +impl FlowGraph { + /// 从 DTO 节点和边列表构建 FlowGraph。 + pub fn build(nodes: &[NodeDef], edges: &[EdgeDef]) -> Self { + let mut graph = FlowGraph { + nodes: HashMap::new(), + edges: HashMap::new(), + outgoing: HashMap::new(), + incoming: HashMap::new(), + start_node_id: None, + end_node_ids: Vec::new(), + }; + + for n in nodes { + let flow_node = FlowNode { + id: n.id.clone(), + node_type: n.node_type.clone(), + name: n.name.clone(), + assignee_id: n.assignee_id, + candidate_groups: n.candidate_groups.clone(), + service_type: n.service_type.clone(), + }; + + if n.node_type == NodeType::StartEvent { + graph.start_node_id = Some(n.id.clone()); + } + if n.node_type == NodeType::EndEvent { + graph.end_node_ids.push(n.id.clone()); + } + + graph.nodes.insert(n.id.clone(), flow_node); + graph.outgoing.insert(n.id.clone(), Vec::new()); + graph.incoming.insert(n.id.clone(), Vec::new()); + } + + for e in edges { + graph.edges.insert(e.id.clone(), FlowEdge { + id: e.id.clone(), + source: e.source.clone(), + target: e.target.clone(), + condition: e.condition.clone(), + label: e.label.clone(), + }); + + if let Some(out) = graph.outgoing.get_mut(&e.source) { + out.push(e.id.clone()); + } + if let Some(inc) = graph.incoming.get_mut(&e.target) { + inc.push(e.id.clone()); + } + } + + graph + } + + /// 获取节点的出边。 + pub fn get_outgoing_edges(&self, node_id: &str) -> Vec<&FlowEdge> { + self.outgoing + .get(node_id) + .map(|edge_ids| { + edge_ids + .iter() + .filter_map(|eid| self.edges.get(eid)) + .collect() + }) + .unwrap_or_default() + } + + /// 获取节点的入边。 + pub fn get_incoming_edges(&self, node_id: &str) -> Vec<&FlowEdge> { + self.incoming + .get(node_id) + .map(|edge_ids| { + edge_ids + .iter() + .filter_map(|eid| self.edges.get(eid)) + .collect() + }) + .unwrap_or_default() + } +} diff --git a/crates/erp-workflow/src/engine/parser.rs b/crates/erp-workflow/src/engine/parser.rs new file mode 100644 index 0000000..d28ea0d --- /dev/null +++ b/crates/erp-workflow/src/engine/parser.rs @@ -0,0 +1,258 @@ +use crate::dto::{EdgeDef, NodeDef, NodeType}; +use crate::engine::model::FlowGraph; +use crate::error::{WorkflowError, WorkflowResult}; + +/// 解析节点和边列表为 FlowGraph 并验证合法性。 +pub fn parse_and_validate(nodes: &[NodeDef], edges: &[EdgeDef]) -> WorkflowResult { + // 基本检查:至少有一个节点 + if nodes.is_empty() { + return Err(WorkflowError::InvalidDiagram("流程图不能为空".to_string())); + } + + // 检查恰好 1 个 StartEvent + let start_count = nodes.iter().filter(|n| n.node_type == NodeType::StartEvent).count(); + if start_count == 0 { + return Err(WorkflowError::InvalidDiagram( + "流程图必须包含一个开始事件".to_string(), + )); + } + if start_count > 1 { + return Err(WorkflowError::InvalidDiagram( + "流程图只能包含一个开始事件".to_string(), + )); + } + + // 检查至少 1 个 EndEvent + let end_count = nodes.iter().filter(|n| n.node_type == NodeType::EndEvent).count(); + if end_count == 0 { + return Err(WorkflowError::InvalidDiagram( + "流程图必须包含至少一个结束事件".to_string(), + )); + } + + // 检查节点 ID 唯一性 + let node_ids: std::collections::HashSet<&str> = + nodes.iter().map(|n| n.id.as_str()).collect(); + if node_ids.len() != nodes.len() { + return Err(WorkflowError::InvalidDiagram( + "节点 ID 不能重复".to_string(), + )); + } + + // 检查边引用的节点存在 + for e in edges { + if !node_ids.contains(e.source.as_str()) { + return Err(WorkflowError::InvalidDiagram(format!( + "连线 {} 的源节点 {} 不存在", + e.id, e.source + ))); + } + if !node_ids.contains(e.target.as_str()) { + return Err(WorkflowError::InvalidDiagram(format!( + "连线 {} 的目标节点 {} 不存在", + e.id, e.target + ))); + } + } + + // 构建图 + let graph = FlowGraph::build(nodes, edges); + + // 检查 StartEvent 没有入边 + if let Some(start_id) = &graph.start_node_id { + if !graph.get_incoming_edges(start_id).is_empty() { + return Err(WorkflowError::InvalidDiagram( + "开始事件不能有入边".to_string(), + )); + } + if graph.get_outgoing_edges(start_id).is_empty() { + return Err(WorkflowError::InvalidDiagram( + "开始事件必须有出边".to_string(), + )); + } + } + + // 检查 EndEvent 没有出边 + for end_id in &graph.end_node_ids { + if !graph.get_outgoing_edges(end_id).is_empty() { + return Err(WorkflowError::InvalidDiagram( + "结束事件不能有出边".to_string(), + )); + } + } + + // 检查网关至少有一个入边和一个出边(排除 start/end) + for node in nodes { + match &node.node_type { + NodeType::ExclusiveGateway | NodeType::ParallelGateway => { + let inc = graph.get_incoming_edges(&node.id); + let out = graph.get_outgoing_edges(&node.id); + if inc.is_empty() { + return Err(WorkflowError::InvalidDiagram(format!( + "网关 '{}' 必须有至少一条入边", + node.name + ))); + } + if out.is_empty() { + return Err(WorkflowError::InvalidDiagram(format!( + "网关 '{}' 必须有至少一条出边", + node.name + ))); + } + // 排他网关的出边应该有条件(第一条可以无条件作为默认分支) + if node.node_type == NodeType::ExclusiveGateway && out.len() > 1 { + let with_condition: Vec<_> = out.iter().filter(|e| e.condition.is_some()).collect(); + if with_condition.is_empty() { + return Err(WorkflowError::InvalidDiagram(format!( + "排他网关 '{}' 有多条出边但没有条件表达式", + node.name + ))); + } + } + } + _ => {} + } + } + + Ok(graph) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::dto::NodePosition; + + fn make_start() -> NodeDef { + NodeDef { + id: "start".to_string(), + node_type: NodeType::StartEvent, + name: "开始".to_string(), + assignee_id: None, + candidate_groups: None, + service_type: None, + position: Some(NodePosition { x: 100.0, y: 100.0 }), + } + } + + fn make_end() -> NodeDef { + NodeDef { + id: "end".to_string(), + node_type: NodeType::EndEvent, + name: "结束".to_string(), + assignee_id: None, + candidate_groups: None, + service_type: None, + position: Some(NodePosition { x: 100.0, y: 300.0 }), + } + } + + fn make_user_task(id: &str, name: &str) -> NodeDef { + NodeDef { + id: id.to_string(), + node_type: NodeType::UserTask, + name: name.to_string(), + assignee_id: None, + candidate_groups: None, + service_type: None, + position: None, + } + } + + fn make_edge(id: &str, source: &str, target: &str) -> EdgeDef { + EdgeDef { + id: id.to_string(), + source: source.to_string(), + target: target.to_string(), + condition: None, + label: None, + } + } + + #[test] + fn test_valid_linear_flow() { + let nodes = vec![make_start(), make_user_task("task1", "审批"), make_end()]; + let edges = vec![ + make_edge("e1", "start", "task1"), + make_edge("e2", "task1", "end"), + ]; + let result = parse_and_validate(&nodes, &edges); + assert!(result.is_ok()); + let graph = result.unwrap(); + assert_eq!(graph.start_node_id, Some("start".to_string())); + assert_eq!(graph.end_node_ids, vec!["end".to_string()]); + } + + #[test] + fn test_no_start_event() { + let nodes = vec![make_user_task("task1", "审批"), make_end()]; + let edges = vec![make_edge("e1", "task1", "end")]; + let result = parse_and_validate(&nodes, &edges); + assert!(result.is_err()); + let msg = result.unwrap_err().to_string(); + assert!(msg.contains("开始事件")); + } + + #[test] + fn test_no_end_event() { + let nodes = vec![make_start(), make_user_task("task1", "审批")]; + let edges = vec![make_edge("e1", "start", "task1")]; + let result = parse_and_validate(&nodes, &edges); + assert!(result.is_err()); + let msg = result.unwrap_err().to_string(); + assert!(msg.contains("结束事件")); + } + + #[test] + fn test_duplicate_node_id() { + let nodes = vec![ + make_start(), + NodeDef { + id: "start".to_string(), // 重复 ID + node_type: NodeType::EndEvent, + name: "结束".to_string(), + assignee_id: None, + candidate_groups: None, + service_type: None, + position: None, + }, + ]; + let edges = vec![]; + let result = parse_and_validate(&nodes, &edges); + assert!(result.is_err()); + } + + #[test] + fn test_end_event_with_outgoing() { + let nodes = vec![make_start(), make_end()]; + let edges = vec![ + make_edge("e1", "start", "end"), + make_edge("e2", "end", "start"), // 结束事件有出边 + ]; + let result = parse_and_validate(&nodes, &edges); + assert!(result.is_err()); + } + + #[test] + fn test_exclusive_gateway_without_conditions() { + let nodes = vec![ + make_start(), + NodeDef { + id: "gw1".to_string(), + node_type: NodeType::ExclusiveGateway, + name: "判断".to_string(), + assignee_id: None, + candidate_groups: None, + service_type: None, + position: None, + }, + make_end(), + ]; + let edges = vec![ + make_edge("e1", "start", "gw1"), + make_edge("e2", "gw1", "end"), + make_edge("e3", "gw1", "end"), // 两条出边无条件 + ]; + let result = parse_and_validate(&nodes, &edges); + assert!(result.is_err()); + } +} diff --git a/crates/erp-workflow/src/engine/timeout.rs b/crates/erp-workflow/src/engine/timeout.rs new file mode 100644 index 0000000..54d979d --- /dev/null +++ b/crates/erp-workflow/src/engine/timeout.rs @@ -0,0 +1,36 @@ +// 超时检查框架 — 占位实现 +// +// 当前版本仅提供接口定义,实际超时检查逻辑将在后续迭代中实现。 +// Task 表的 due_date 字段已支持设置超时时间。 + +use chrono::Utc; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter}; +use uuid::Uuid; + +use crate::entity::task; +use crate::error::WorkflowResult; + +/// 超时检查服务(占位)。 +pub struct TimeoutChecker; + +impl TimeoutChecker { + /// 查询已超时但未完成的任务列表。 + /// + /// 返回 due_date < now 且 status = 'pending' 的任务 ID。 + pub async fn find_overdue_tasks( + tenant_id: Uuid, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult> { + let now = Utc::now(); + let overdue = task::Entity::find() + .filter(task::Column::TenantId.eq(tenant_id)) + .filter(task::Column::Status.eq("pending")) + .filter(task::Column::DueDate.lt(now)) + .filter(task::Column::DeletedAt.is_null()) + .all(db) + .await + .map_err(|e| crate::error::WorkflowError::Validation(e.to_string()))?; + + Ok(overdue.iter().map(|t| t.id).collect()) + } +} diff --git a/crates/erp-workflow/src/entity/mod.rs b/crates/erp-workflow/src/entity/mod.rs new file mode 100644 index 0000000..c268c74 --- /dev/null +++ b/crates/erp-workflow/src/entity/mod.rs @@ -0,0 +1,5 @@ +pub mod process_definition; +pub mod process_instance; +pub mod token; +pub mod task; +pub mod process_variable; diff --git a/crates/erp-workflow/src/entity/process_definition.rs b/crates/erp-workflow/src/entity/process_definition.rs new file mode 100644 index 0000000..d0b13f8 --- /dev/null +++ b/crates/erp-workflow/src/entity/process_definition.rs @@ -0,0 +1,40 @@ +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "process_definitions")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub tenant_id: Uuid, + pub name: String, + pub key: String, + pub version: i32, + #[serde(skip_serializing_if = "Option::is_none")] + pub category: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + pub nodes: serde_json::Value, + pub edges: serde_json::Value, + pub status: String, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, + pub created_by: Uuid, + pub updated_by: Uuid, + #[serde(skip_serializing_if = "Option::is_none")] + pub deleted_at: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::process_instance::Entity")] + ProcessInstance, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::ProcessInstance.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/erp-workflow/src/entity/process_instance.rs b/crates/erp-workflow/src/entity/process_instance.rs new file mode 100644 index 0000000..49b052f --- /dev/null +++ b/crates/erp-workflow/src/entity/process_instance.rs @@ -0,0 +1,59 @@ +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "process_instances")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub tenant_id: Uuid, + pub definition_id: Uuid, + #[serde(skip_serializing_if = "Option::is_none")] + pub business_key: Option, + pub status: String, + pub started_by: Uuid, + pub started_at: DateTimeUtc, + #[serde(skip_serializing_if = "Option::is_none")] + pub completed_at: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, + pub created_by: Uuid, + pub updated_by: Uuid, + #[serde(skip_serializing_if = "Option::is_none")] + pub deleted_at: Option, + pub version: i32, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::process_definition::Entity", + from = "Column::DefinitionId", + to = "super::process_definition::Column::Id" + )] + ProcessDefinition, + #[sea_orm(has_many = "super::token::Entity")] + Token, + #[sea_orm(has_many = "super::task::Entity")] + Task, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::ProcessDefinition.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Token.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Task.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/erp-workflow/src/entity/process_variable.rs b/crates/erp-workflow/src/entity/process_variable.rs new file mode 100644 index 0000000..8febcb0 --- /dev/null +++ b/crates/erp-workflow/src/entity/process_variable.rs @@ -0,0 +1,39 @@ +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "process_variables")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub tenant_id: Uuid, + pub instance_id: Uuid, + pub name: String, + pub var_type: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub value_string: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub value_number: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub value_boolean: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub value_date: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::process_instance::Entity", + from = "Column::InstanceId", + to = "super::process_instance::Column::Id" + )] + ProcessInstance, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::ProcessInstance.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/erp-workflow/src/entity/task.rs b/crates/erp-workflow/src/entity/task.rs new file mode 100644 index 0000000..ddb8c2d --- /dev/null +++ b/crates/erp-workflow/src/entity/task.rs @@ -0,0 +1,65 @@ +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "tasks")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub tenant_id: Uuid, + pub instance_id: Uuid, + pub token_id: Uuid, + pub node_id: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub node_name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub assignee_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub candidate_groups: Option, + pub status: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub outcome: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub form_data: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub due_date: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub completed_at: Option, + pub created_at: DateTimeUtc, + pub updated_at: DateTimeUtc, + pub created_by: Uuid, + pub updated_by: Uuid, + #[serde(skip_serializing_if = "Option::is_none")] + pub deleted_at: Option, + pub version: i32, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::process_instance::Entity", + from = "Column::InstanceId", + to = "super::process_instance::Column::Id" + )] + ProcessInstance, + #[sea_orm( + belongs_to = "super::token::Entity", + from = "Column::TokenId", + to = "super::token::Column::Id" + )] + Token, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::ProcessInstance.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Token.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/erp-workflow/src/entity/token.rs b/crates/erp-workflow/src/entity/token.rs new file mode 100644 index 0000000..4075a7d --- /dev/null +++ b/crates/erp-workflow/src/entity/token.rs @@ -0,0 +1,34 @@ +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)] +#[sea_orm(table_name = "tokens")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: Uuid, + pub tenant_id: Uuid, + pub instance_id: Uuid, + pub node_id: String, + pub status: String, + pub created_at: DateTimeUtc, + #[serde(skip_serializing_if = "Option::is_none")] + pub consumed_at: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::process_instance::Entity", + from = "Column::InstanceId", + to = "super::process_instance::Column::Id" + )] + ProcessInstance, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::ProcessInstance.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/erp-workflow/src/error.rs b/crates/erp-workflow/src/error.rs new file mode 100644 index 0000000..0665433 --- /dev/null +++ b/crates/erp-workflow/src/error.rs @@ -0,0 +1,49 @@ +use erp_core::error::AppError; + +/// Workflow module error types. +#[derive(Debug, thiserror::Error)] +pub enum WorkflowError { + #[error("验证失败: {0}")] + Validation(String), + + #[error("资源未找到: {0}")] + NotFound(String), + + #[error("流程定义已存在: {0}")] + DuplicateDefinition(String), + + #[error("流程图无效: {0}")] + InvalidDiagram(String), + + #[error("流程状态错误: {0}")] + InvalidState(String), + + #[error("表达式求值失败: {0}")] + ExpressionError(String), +} + +impl From> for WorkflowError { + fn from(err: sea_orm::TransactionError) -> Self { + match err { + sea_orm::TransactionError::Connection(err) => { + WorkflowError::Validation(err.to_string()) + } + sea_orm::TransactionError::Transaction(inner) => inner, + } + } +} + +impl From for AppError { + fn from(err: WorkflowError) -> Self { + match err { + WorkflowError::Validation(s) => AppError::Validation(s), + WorkflowError::NotFound(s) => AppError::NotFound(s), + WorkflowError::DuplicateDefinition(s) => AppError::Conflict(s), + WorkflowError::InvalidDiagram(s) => AppError::Validation(s), + WorkflowError::InvalidState(s) => AppError::Validation(s), + WorkflowError::ExpressionError(s) => AppError::Validation(s), + } + } +} + +pub type WorkflowResult = Result; diff --git a/crates/erp-workflow/src/handler/definition_handler.rs b/crates/erp-workflow/src/handler/definition_handler.rs new file mode 100644 index 0000000..a632969 --- /dev/null +++ b/crates/erp-workflow/src/handler/definition_handler.rs @@ -0,0 +1,124 @@ +use axum::Extension; +use axum::extract::{FromRef, Path, Query, State}; +use axum::response::Json; +use validator::Validate; + +use erp_core::error::AppError; +use erp_core::rbac::require_permission; +use erp_core::types::{ApiResponse, PaginatedResponse, Pagination, TenantContext}; +use uuid::Uuid; + +use crate::dto::{CreateProcessDefinitionReq, ProcessDefinitionResp, UpdateProcessDefinitionReq}; +use crate::service::definition_service::DefinitionService; +use crate::workflow_state::WorkflowState; + +/// GET /api/v1/workflow/definitions +pub async fn list_definitions( + State(state): State, + Extension(ctx): Extension, + Query(pagination): Query, +) -> Result>>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:list")?; + + let (defs, total) = DefinitionService::list(ctx.tenant_id, &pagination, &state.db).await?; + + let page = pagination.page.unwrap_or(1); + let page_size = pagination.limit(); + let total_pages = (total + page_size - 1) / page_size; + + Ok(Json(ApiResponse::ok(PaginatedResponse { + data: defs, + total, + page, + page_size, + total_pages, + }))) +} + +/// POST /api/v1/workflow/definitions +pub async fn create_definition( + State(state): State, + Extension(ctx): Extension, + Json(req): Json, +) -> Result>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:create")?; + req.validate() + .map_err(|e| AppError::Validation(e.to_string()))?; + + let resp = DefinitionService::create( + ctx.tenant_id, + ctx.user_id, + &req, + &state.db, + &state.event_bus, + ) + .await?; + + Ok(Json(ApiResponse::ok(resp))) +} + +/// GET /api/v1/workflow/definitions/{id} +pub async fn get_definition( + State(state): State, + Extension(ctx): Extension, + Path(id): Path, +) -> Result>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:read")?; + + let resp = DefinitionService::get_by_id(id, ctx.tenant_id, &state.db).await?; + Ok(Json(ApiResponse::ok(resp))) +} + +/// PUT /api/v1/workflow/definitions/{id} +pub async fn update_definition( + State(state): State, + Extension(ctx): Extension, + Path(id): Path, + Json(req): Json, +) -> Result>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:update")?; + + let resp = + DefinitionService::update(id, ctx.tenant_id, ctx.user_id, &req, &state.db).await?; + Ok(Json(ApiResponse::ok(resp))) +} + +/// POST /api/v1/workflow/definitions/{id}/publish +pub async fn publish_definition( + State(state): State, + Extension(ctx): Extension, + Path(id): Path, +) -> Result>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:publish")?; + + let resp = DefinitionService::publish( + id, + ctx.tenant_id, + ctx.user_id, + &state.db, + &state.event_bus, + ) + .await?; + + Ok(Json(ApiResponse::ok(resp))) +} diff --git a/crates/erp-workflow/src/handler/instance_handler.rs b/crates/erp-workflow/src/handler/instance_handler.rs new file mode 100644 index 0000000..5e95c21 --- /dev/null +++ b/crates/erp-workflow/src/handler/instance_handler.rs @@ -0,0 +1,112 @@ +use axum::Extension; +use axum::extract::{FromRef, Path, Query, State}; +use axum::response::Json; + +use erp_core::error::AppError; +use erp_core::rbac::require_permission; +use erp_core::types::{ApiResponse, PaginatedResponse, Pagination, TenantContext}; +use uuid::Uuid; + +use crate::dto::{ProcessInstanceResp, StartInstanceReq}; +use crate::service::instance_service::InstanceService; +use crate::workflow_state::WorkflowState; + +/// POST /api/v1/workflow/instances +pub async fn start_instance( + State(state): State, + Extension(ctx): Extension, + Json(req): Json, +) -> Result>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:start")?; + + let resp = InstanceService::start( + ctx.tenant_id, + ctx.user_id, + &req, + &state.db, + &state.event_bus, + ) + .await?; + + Ok(Json(ApiResponse::ok(resp))) +} + +/// GET /api/v1/workflow/instances +pub async fn list_instances( + State(state): State, + Extension(ctx): Extension, + Query(pagination): Query, +) -> Result>>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:list")?; + + let (instances, total) = + InstanceService::list(ctx.tenant_id, &pagination, &state.db).await?; + + let page = pagination.page.unwrap_or(1); + let page_size = pagination.limit(); + let total_pages = (total + page_size - 1) / page_size; + + Ok(Json(ApiResponse::ok(PaginatedResponse { + data: instances, + total, + page, + page_size, + total_pages, + }))) +} + +/// GET /api/v1/workflow/instances/{id} +pub async fn get_instance( + State(state): State, + Extension(ctx): Extension, + Path(id): Path, +) -> Result>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:read")?; + + let resp = InstanceService::get_by_id(id, ctx.tenant_id, &state.db).await?; + Ok(Json(ApiResponse::ok(resp))) +} + +/// POST /api/v1/workflow/instances/{id}/suspend +pub async fn suspend_instance( + State(state): State, + Extension(ctx): Extension, + Path(id): Path, +) -> Result>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:update")?; + + InstanceService::suspend(id, ctx.tenant_id, ctx.user_id, &state.db).await?; + Ok(Json(ApiResponse::ok(()))) +} + +/// POST /api/v1/workflow/instances/{id}/terminate +pub async fn terminate_instance( + State(state): State, + Extension(ctx): Extension, + Path(id): Path, +) -> Result>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:update")?; + + InstanceService::terminate(id, ctx.tenant_id, ctx.user_id, &state.db).await?; + Ok(Json(ApiResponse::ok(()))) +} diff --git a/crates/erp-workflow/src/handler/mod.rs b/crates/erp-workflow/src/handler/mod.rs new file mode 100644 index 0000000..72a6d01 --- /dev/null +++ b/crates/erp-workflow/src/handler/mod.rs @@ -0,0 +1,3 @@ +pub mod definition_handler; +pub mod instance_handler; +pub mod task_handler; diff --git a/crates/erp-workflow/src/handler/task_handler.rs b/crates/erp-workflow/src/handler/task_handler.rs new file mode 100644 index 0000000..0c664d2 --- /dev/null +++ b/crates/erp-workflow/src/handler/task_handler.rs @@ -0,0 +1,113 @@ +use axum::Extension; +use axum::extract::{FromRef, Path, Query, State}; +use axum::response::Json; + +use erp_core::error::AppError; +use erp_core::rbac::require_permission; +use erp_core::types::{ApiResponse, PaginatedResponse, Pagination, TenantContext}; +use uuid::Uuid; + +use crate::dto::{CompleteTaskReq, DelegateTaskReq, TaskResp}; +use crate::service::task_service::TaskService; +use crate::workflow_state::WorkflowState; + +/// GET /api/v1/workflow/tasks/pending +pub async fn list_pending_tasks( + State(state): State, + Extension(ctx): Extension, + Query(pagination): Query, +) -> Result>>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:approve")?; + + let (tasks, total) = + TaskService::list_pending(ctx.tenant_id, ctx.user_id, &pagination, &state.db).await?; + + let page = pagination.page.unwrap_or(1); + let page_size = pagination.limit(); + let total_pages = (total + page_size - 1) / page_size; + + Ok(Json(ApiResponse::ok(PaginatedResponse { + data: tasks, + total, + page, + page_size, + total_pages, + }))) +} + +/// GET /api/v1/workflow/tasks/completed +pub async fn list_completed_tasks( + State(state): State, + Extension(ctx): Extension, + Query(pagination): Query, +) -> Result>>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:approve")?; + + let (tasks, total) = + TaskService::list_completed(ctx.tenant_id, ctx.user_id, &pagination, &state.db).await?; + + let page = pagination.page.unwrap_or(1); + let page_size = pagination.limit(); + let total_pages = (total + page_size - 1) / page_size; + + Ok(Json(ApiResponse::ok(PaginatedResponse { + data: tasks, + total, + page, + page_size, + total_pages, + }))) +} + +/// POST /api/v1/workflow/tasks/{id}/complete +pub async fn complete_task( + State(state): State, + Extension(ctx): Extension, + Path(id): Path, + Json(req): Json, +) -> Result>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:approve")?; + + let resp = TaskService::complete( + id, + ctx.tenant_id, + ctx.user_id, + &req, + &state.db, + &state.event_bus, + ) + .await?; + + Ok(Json(ApiResponse::ok(resp))) +} + +/// POST /api/v1/workflow/tasks/{id}/delegate +pub async fn delegate_task( + State(state): State, + Extension(ctx): Extension, + Path(id): Path, + Json(req): Json, +) -> Result>, AppError> +where + WorkflowState: FromRef, + S: Clone + Send + Sync + 'static, +{ + require_permission(&ctx, "workflow:delegate")?; + + let resp = + TaskService::delegate(id, ctx.tenant_id, ctx.user_id, &req, &state.db).await?; + + Ok(Json(ApiResponse::ok(resp))) +} diff --git a/crates/erp-workflow/src/lib.rs b/crates/erp-workflow/src/lib.rs index 1cb3a42..9196dc5 100644 --- a/crates/erp-workflow/src/lib.rs +++ b/crates/erp-workflow/src/lib.rs @@ -1 +1,16 @@ // erp-workflow: 工作流引擎模块 (Phase 4) +// +// 提供流程定义、流程实例管理、任务审批、Token 驱动执行引擎 +// 和可视化流程设计器支持。 + +pub mod dto; +pub mod engine; +pub mod entity; +pub mod error; +pub mod handler; +pub mod module; +pub mod service; +pub mod workflow_state; + +pub use module::WorkflowModule; +pub use workflow_state::WorkflowState; diff --git a/crates/erp-workflow/src/module.rs b/crates/erp-workflow/src/module.rs new file mode 100644 index 0000000..12b8245 --- /dev/null +++ b/crates/erp-workflow/src/module.rs @@ -0,0 +1,121 @@ +use axum::Router; +use axum::routing::{get, post}; +use uuid::Uuid; + +use erp_core::error::AppResult; +use erp_core::events::EventBus; +use erp_core::module::ErpModule; + +use crate::handler::{ + definition_handler, instance_handler, task_handler, +}; + +/// Workflow module implementing the `ErpModule` trait. +/// +/// Manages workflow definitions, process instances, tasks, +/// and the token-driven execution engine. +pub struct WorkflowModule; + +impl WorkflowModule { + pub fn new() -> Self { + Self + } + + /// Build protected (authenticated) routes for the workflow module. + pub fn protected_routes() -> Router + where + crate::workflow_state::WorkflowState: axum::extract::FromRef, + S: Clone + Send + Sync + 'static, + { + Router::new() + // Definition routes + .route( + "/workflow/definitions", + get(definition_handler::list_definitions) + .post(definition_handler::create_definition), + ) + .route( + "/workflow/definitions/{id}", + get(definition_handler::get_definition) + .put(definition_handler::update_definition), + ) + .route( + "/workflow/definitions/{id}/publish", + post(definition_handler::publish_definition), + ) + // Instance routes + .route( + "/workflow/instances", + post(instance_handler::start_instance) + .get(instance_handler::list_instances), + ) + .route( + "/workflow/instances/{id}", + get(instance_handler::get_instance), + ) + .route( + "/workflow/instances/{id}/suspend", + post(instance_handler::suspend_instance), + ) + .route( + "/workflow/instances/{id}/terminate", + post(instance_handler::terminate_instance), + ) + // Task routes + .route( + "/workflow/tasks/pending", + get(task_handler::list_pending_tasks), + ) + .route( + "/workflow/tasks/completed", + get(task_handler::list_completed_tasks), + ) + .route( + "/workflow/tasks/{id}/complete", + post(task_handler::complete_task), + ) + .route( + "/workflow/tasks/{id}/delegate", + post(task_handler::delegate_task), + ) + } +} + +impl Default for WorkflowModule { + fn default() -> Self { + Self::new() + } +} + +#[async_trait::async_trait] +impl ErpModule for WorkflowModule { + fn name(&self) -> &str { + "workflow" + } + + fn version(&self) -> &str { + env!("CARGO_PKG_VERSION") + } + + fn dependencies(&self) -> Vec<&str> { + vec!["auth"] + } + + fn register_routes(&self, router: Router) -> Router { + router + } + + fn register_event_handlers(&self, _bus: &EventBus) {} + + async fn on_tenant_created(&self, _tenant_id: Uuid) -> AppResult<()> { + Ok(()) + } + + async fn on_tenant_deleted(&self, _tenant_id: Uuid) -> AppResult<()> { + Ok(()) + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } +} diff --git a/crates/erp-workflow/src/service/definition_service.rs b/crates/erp-workflow/src/service/definition_service.rs new file mode 100644 index 0000000..f0c1251 --- /dev/null +++ b/crates/erp-workflow/src/service/definition_service.rs @@ -0,0 +1,269 @@ +use chrono::Utc; +use sea_orm::{ + ActiveModelTrait, ColumnTrait, EntityTrait, PaginatorTrait, QueryFilter, Set, +}; +use uuid::Uuid; + +use crate::dto::{ + CreateProcessDefinitionReq, ProcessDefinitionResp, UpdateProcessDefinitionReq, +}; +use crate::engine::parser; +use crate::entity::process_definition; +use crate::error::{WorkflowError, WorkflowResult}; +use erp_core::events::EventBus; +use erp_core::types::Pagination; + +/// 流程定义 CRUD 服务。 +pub struct DefinitionService; + +impl DefinitionService { + /// 分页查询流程定义列表。 + pub async fn list( + tenant_id: Uuid, + pagination: &Pagination, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult<(Vec, u64)> { + let paginator = process_definition::Entity::find() + .filter(process_definition::Column::TenantId.eq(tenant_id)) + .filter(process_definition::Column::DeletedAt.is_null()) + .paginate(db, pagination.limit()); + + let total = paginator + .num_items() + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + let page_index = pagination.page.unwrap_or(1).saturating_sub(1) as u64; + let models = paginator + .fetch_page(page_index) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + let resps: Vec = models.iter().map(Self::model_to_resp).collect(); + Ok((resps, total)) + } + + /// 获取单个流程定义。 + pub async fn get_by_id( + id: Uuid, + tenant_id: Uuid, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult { + let model = process_definition::Entity::find_by_id(id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|m| m.tenant_id == tenant_id && m.deleted_at.is_none()) + .ok_or_else(|| WorkflowError::NotFound(format!("流程定义不存在: {id}")))?; + + Ok(Self::model_to_resp(&model)) + } + + /// 创建流程定义。 + pub async fn create( + tenant_id: Uuid, + operator_id: Uuid, + req: &CreateProcessDefinitionReq, + db: &sea_orm::DatabaseConnection, + event_bus: &EventBus, + ) -> WorkflowResult { + // 验证流程图合法性 + parser::parse_and_validate(&req.nodes, &req.edges)?; + + let now = Utc::now(); + let id = Uuid::now_v7(); + let nodes_json = serde_json::to_value(&req.nodes) + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + let edges_json = serde_json::to_value(&req.edges) + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + let model = process_definition::ActiveModel { + id: Set(id), + tenant_id: Set(tenant_id), + name: Set(req.name.clone()), + key: Set(req.key.clone()), + version: Set(1), + category: Set(req.category.clone()), + description: Set(req.description.clone()), + nodes: Set(nodes_json), + edges: Set(edges_json), + status: Set("draft".to_string()), + created_at: Set(now), + updated_at: Set(now), + created_by: Set(operator_id), + updated_by: Set(operator_id), + deleted_at: Set(None), + }; + model + .insert(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + event_bus.publish(erp_core::events::DomainEvent::new( + "process_definition.created", + tenant_id, + serde_json::json!({ "definition_id": id, "key": req.key }), + )); + + Ok(ProcessDefinitionResp { + id, + name: req.name.clone(), + key: req.key.clone(), + version: 1, + category: req.category.clone(), + description: req.description.clone(), + nodes: serde_json::to_value(&req.nodes).unwrap_or_default(), + edges: serde_json::to_value(&req.edges).unwrap_or_default(), + status: "draft".to_string(), + created_at: now, + updated_at: now, + }) + } + + /// 更新流程定义(仅 draft 状态可编辑)。 + pub async fn update( + id: Uuid, + tenant_id: Uuid, + operator_id: Uuid, + req: &UpdateProcessDefinitionReq, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult { + let model = process_definition::Entity::find_by_id(id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|m| m.tenant_id == tenant_id && m.deleted_at.is_none()) + .ok_or_else(|| WorkflowError::NotFound(format!("流程定义不存在: {id}")))?; + + if model.status != "draft" { + return Err(WorkflowError::InvalidState( + "只有 draft 状态的流程定义可以编辑".to_string(), + )); + } + + let mut active: process_definition::ActiveModel = model.into(); + + if let Some(name) = &req.name { + active.name = Set(name.clone()); + } + if let Some(category) = &req.category { + active.category = Set(Some(category.clone())); + } + if let Some(description) = &req.description { + active.description = Set(Some(description.clone())); + } + if let Some(nodes) = &req.nodes { + // 验证新流程图 + if let Some(edges) = &req.edges { + parser::parse_and_validate(nodes, edges)?; + } + let nodes_json = serde_json::to_value(nodes) + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + active.nodes = Set(nodes_json); + } + if let Some(edges) = &req.edges { + let edges_json = serde_json::to_value(edges) + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + active.edges = Set(edges_json); + } + + active.updated_at = Set(Utc::now()); + active.updated_by = Set(operator_id); + + let updated = active + .update(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + Ok(Self::model_to_resp(&updated)) + } + + /// 发布流程定义(draft → published)。 + pub async fn publish( + id: Uuid, + tenant_id: Uuid, + operator_id: Uuid, + db: &sea_orm::DatabaseConnection, + event_bus: &EventBus, + ) -> WorkflowResult { + let model = process_definition::Entity::find_by_id(id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|m| m.tenant_id == tenant_id && m.deleted_at.is_none()) + .ok_or_else(|| WorkflowError::NotFound(format!("流程定义不存在: {id}")))?; + + if model.status != "draft" { + return Err(WorkflowError::InvalidState( + "只有 draft 状态的流程定义可以发布".to_string(), + )); + } + + // 验证流程图 + let nodes: Vec = serde_json::from_value(model.nodes.clone()) + .map_err(|e| WorkflowError::InvalidDiagram(format!("节点数据无效: {e}")))?; + let edges: Vec = serde_json::from_value(model.edges.clone()) + .map_err(|e| WorkflowError::InvalidDiagram(format!("连线数据无效: {e}")))?; + parser::parse_and_validate(&nodes, &edges)?; + + let mut active: process_definition::ActiveModel = model.into(); + active.status = Set("published".to_string()); + active.updated_at = Set(Utc::now()); + active.updated_by = Set(operator_id); + + let updated = active + .update(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + event_bus.publish(erp_core::events::DomainEvent::new( + "process_definition.published", + tenant_id, + serde_json::json!({ "definition_id": id }), + )); + + Ok(Self::model_to_resp(&updated)) + } + + /// 软删除流程定义。 + pub async fn delete( + id: Uuid, + tenant_id: Uuid, + operator_id: Uuid, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult<()> { + let model = process_definition::Entity::find_by_id(id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|m| m.tenant_id == tenant_id && m.deleted_at.is_none()) + .ok_or_else(|| WorkflowError::NotFound(format!("流程定义不存在: {id}")))?; + + let mut active: process_definition::ActiveModel = model.into(); + active.deleted_at = Set(Some(Utc::now())); + active.updated_at = Set(Utc::now()); + active.updated_by = Set(operator_id); + active + .update(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + Ok(()) + } + + fn model_to_resp(m: &process_definition::Model) -> ProcessDefinitionResp { + ProcessDefinitionResp { + id: m.id, + name: m.name.clone(), + key: m.key.clone(), + version: m.version, + category: m.category.clone(), + description: m.description.clone(), + nodes: m.nodes.clone(), + edges: m.edges.clone(), + status: m.status.clone(), + created_at: m.created_at, + updated_at: m.updated_at, + } + } +} diff --git a/crates/erp-workflow/src/service/instance_service.rs b/crates/erp-workflow/src/service/instance_service.rs new file mode 100644 index 0000000..5bded70 --- /dev/null +++ b/crates/erp-workflow/src/service/instance_service.rs @@ -0,0 +1,353 @@ +use std::collections::HashMap; + +use chrono::Utc; +use sea_orm::{ + ActiveModelTrait, ColumnTrait, EntityTrait, PaginatorTrait, QueryFilter, Set, + TransactionTrait, ConnectionTrait, +}; +use uuid::Uuid; + +use crate::dto::{ProcessInstanceResp, StartInstanceReq, TokenResp}; +use crate::engine::executor::FlowExecutor; +use crate::engine::parser; +use crate::entity::{process_definition, process_instance, process_variable, token}; +use crate::error::{WorkflowError, WorkflowResult}; +use erp_core::events::EventBus; +use erp_core::types::Pagination; + +/// 流程实例服务。 +pub struct InstanceService; + +impl InstanceService { + /// 启动流程实例。 + pub async fn start( + tenant_id: Uuid, + operator_id: Uuid, + req: &StartInstanceReq, + db: &sea_orm::DatabaseConnection, + event_bus: &EventBus, + ) -> WorkflowResult { + // 查找流程定义 + let definition = process_definition::Entity::find_by_id(req.definition_id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|d| d.tenant_id == tenant_id && d.deleted_at.is_none()) + .ok_or_else(|| { + WorkflowError::NotFound(format!("流程定义不存在: {}", req.definition_id)) + })?; + + if definition.status != "published" { + return Err(WorkflowError::InvalidState( + "只能启动已发布的流程定义".to_string(), + )); + } + + // 解析流程图 + let nodes: Vec = serde_json::from_value(definition.nodes.clone()) + .map_err(|e| WorkflowError::InvalidDiagram(format!("节点数据无效: {e}")))?; + let edges: Vec = serde_json::from_value(definition.edges.clone()) + .map_err(|e| WorkflowError::InvalidDiagram(format!("连线数据无效: {e}")))?; + let graph = parser::parse_and_validate(&nodes, &edges)?; + + // 准备流程变量 + let mut variables = HashMap::new(); + if let Some(vars) = &req.variables { + for v in vars { + let var_type = v.var_type.as_deref().unwrap_or("string"); + variables.insert(v.name.clone(), v.value.clone()); + } + } + + let instance_id = Uuid::now_v7(); + let now = Utc::now(); + + // 在事务中创建实例、变量和 token + let instance_id_clone = instance_id; + let tenant_id_clone = tenant_id; + let operator_id_clone = operator_id; + let business_key = req.business_key.clone(); + let definition_id = definition.id; + let definition_name = definition.name.clone(); + let vars_to_save = req.variables.clone(); + + db.transaction::<_, (), WorkflowError>(|txn| { + let graph = graph.clone(); + let variables = variables.clone(); + Box::pin(async move { + // 创建流程实例 + let instance = process_instance::ActiveModel { + id: Set(instance_id_clone), + tenant_id: Set(tenant_id_clone), + definition_id: Set(definition_id), + business_key: Set(business_key), + status: Set("running".to_string()), + started_by: Set(operator_id_clone), + started_at: Set(now), + completed_at: Set(None), + created_at: Set(now), + updated_at: Set(now), + created_by: Set(operator_id_clone), + updated_by: Set(operator_id_clone), + deleted_at: Set(None), + version: Set(1), + }; + instance.insert(txn).await.map_err(|e| WorkflowError::Validation(e.to_string()))?; + + // 保存初始变量 + if let Some(vars) = vars_to_save { + for v in vars { + Self::save_variable( + instance_id_clone, + tenant_id_clone, + &v.name, + v.var_type.as_deref().unwrap_or("string"), + &v.value, + txn, + ) + .await?; + } + } + + // 启动执行引擎 + FlowExecutor::start( + instance_id_clone, + tenant_id_clone, + &graph, + &variables, + txn, + ) + .await?; + + Ok(()) + }) + }) + .await?; + + event_bus.publish(erp_core::events::DomainEvent::new( + "process_instance.started", + tenant_id, + serde_json::json!({ "instance_id": instance_id, "definition_id": definition.id }), + )); + + // 查询创建后的实例(包含 token) + let instance = process_instance::Entity::find_by_id(instance_id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .ok_or_else(|| WorkflowError::NotFound(format!("流程实例不存在: {instance_id}")))?; + + let active_tokens = Self::get_active_tokens(instance_id, db).await?; + + Ok(ProcessInstanceResp { + id: instance.id, + definition_id: instance.definition_id, + definition_name: Some(definition_name), + business_key: instance.business_key, + status: instance.status, + started_by: instance.started_by, + started_at: instance.started_at, + completed_at: instance.completed_at, + created_at: instance.created_at, + active_tokens, + }) + } + + /// 分页查询流程实例。 + pub async fn list( + tenant_id: Uuid, + pagination: &Pagination, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult<(Vec, u64)> { + let paginator = process_instance::Entity::find() + .filter(process_instance::Column::TenantId.eq(tenant_id)) + .filter(process_instance::Column::DeletedAt.is_null()) + .paginate(db, pagination.limit()); + + let total = paginator + .num_items() + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + let page_index = pagination.page.unwrap_or(1).saturating_sub(1) as u64; + let models = paginator + .fetch_page(page_index) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + let mut resps = Vec::new(); + for m in &models { + let active_tokens = Self::get_active_tokens(m.id, db).await.unwrap_or_default(); + let def_name = process_definition::Entity::find_by_id(m.definition_id) + .one(db) + .await + .ok() + .flatten() + .map(|d| d.name); + resps.push(ProcessInstanceResp { + id: m.id, + definition_id: m.definition_id, + definition_name: def_name, + business_key: m.business_key.clone(), + status: m.status.clone(), + started_by: m.started_by, + started_at: m.started_at, + completed_at: m.completed_at, + created_at: m.created_at, + active_tokens, + }); + } + + Ok((resps, total)) + } + + /// 获取单个流程实例详情。 + pub async fn get_by_id( + id: Uuid, + tenant_id: Uuid, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult { + let instance = process_instance::Entity::find_by_id(id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|i| i.tenant_id == tenant_id && i.deleted_at.is_none()) + .ok_or_else(|| WorkflowError::NotFound(format!("流程实例不存在: {id}")))?; + + let def_name = process_definition::Entity::find_by_id(instance.definition_id) + .one(db) + .await + .ok() + .flatten() + .map(|d| d.name); + + let active_tokens = Self::get_active_tokens(id, db).await?; + + Ok(ProcessInstanceResp { + id: instance.id, + definition_id: instance.definition_id, + definition_name: def_name, + business_key: instance.business_key, + status: instance.status, + started_by: instance.started_by, + started_at: instance.started_at, + completed_at: instance.completed_at, + created_at: instance.created_at, + active_tokens, + }) + } + + /// 挂起流程实例。 + pub async fn suspend( + id: Uuid, + tenant_id: Uuid, + operator_id: Uuid, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult<()> { + Self::change_status(id, tenant_id, operator_id, "running", "suspended", db).await + } + + /// 终止流程实例。 + pub async fn terminate( + id: Uuid, + tenant_id: Uuid, + operator_id: Uuid, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult<()> { + Self::change_status(id, tenant_id, operator_id, "running", "terminated", db).await + } + + async fn change_status( + id: Uuid, + tenant_id: Uuid, + operator_id: Uuid, + from_status: &str, + to_status: &str, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult<()> { + let instance = process_instance::Entity::find_by_id(id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|i| i.tenant_id == tenant_id && i.deleted_at.is_none()) + .ok_or_else(|| WorkflowError::NotFound(format!("流程实例不存在: {id}")))?; + + if instance.status != from_status { + return Err(WorkflowError::InvalidState(format!( + "流程实例状态不是 {},无法变更为 {}", + from_status, to_status + ))); + } + + let mut active: process_instance::ActiveModel = instance.into(); + active.status = Set(to_status.to_string()); + active.updated_at = Set(Utc::now()); + active.updated_by = Set(operator_id); + active + .update(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + Ok(()) + } + + /// 获取实例的活跃 token 列表。 + pub async fn get_active_tokens( + instance_id: Uuid, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult> { + let tokens = token::Entity::find() + .filter(token::Column::InstanceId.eq(instance_id)) + .filter(token::Column::Status.eq("active")) + .all(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + Ok(tokens + .iter() + .map(|t| TokenResp { + id: t.id, + node_id: t.node_id.clone(), + status: t.status.clone(), + created_at: t.created_at, + }) + .collect()) + } + + /// 保存流程变量。 + pub async fn save_variable( + instance_id: Uuid, + tenant_id: Uuid, + name: &str, + var_type: &str, + value: &serde_json::Value, + txn: &impl ConnectionTrait, + ) -> WorkflowResult<()> { + let id = Uuid::now_v7(); + + let (value_string, value_number, value_boolean, value_date): (Option, Option, Option, Option>) = match var_type { + "string" => (value.as_str().map(|s| s.to_string()), None, None, None), + "number" => (None, value.as_f64(), None, None), + "boolean" => (None, None, value.as_bool(), None), + _ => (Some(value.to_string()), None, None, None), + }; + + let model = process_variable::ActiveModel { + id: Set(id), + tenant_id: Set(tenant_id), + instance_id: Set(instance_id), + name: Set(name.to_string()), + var_type: Set(var_type.to_string()), + value_string: Set(value_string), + value_number: Set(value_number), + value_boolean: Set(value_boolean), + value_date: Set(None), + }; + model + .insert(txn) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + Ok(()) + } +} diff --git a/crates/erp-workflow/src/service/mod.rs b/crates/erp-workflow/src/service/mod.rs new file mode 100644 index 0000000..e752f00 --- /dev/null +++ b/crates/erp-workflow/src/service/mod.rs @@ -0,0 +1,3 @@ +pub mod definition_service; +pub mod instance_service; +pub mod task_service; diff --git a/crates/erp-workflow/src/service/task_service.rs b/crates/erp-workflow/src/service/task_service.rs new file mode 100644 index 0000000..c94eb74 --- /dev/null +++ b/crates/erp-workflow/src/service/task_service.rs @@ -0,0 +1,336 @@ +use std::collections::HashMap; + +use chrono::Utc; +use sea_orm::{ + ActiveModelTrait, ColumnTrait, EntityTrait, PaginatorTrait, QueryFilter, Set, + TransactionTrait, +}; +use uuid::Uuid; + +use crate::dto::{CompleteTaskReq, DelegateTaskReq, TaskResp}; +use crate::engine::executor::FlowExecutor; +use crate::engine::parser; +use crate::entity::{process_definition, process_instance, task}; +use crate::error::{WorkflowError, WorkflowResult}; +use erp_core::events::EventBus; +use erp_core::types::Pagination; + +/// 任务服务。 +pub struct TaskService; + +impl TaskService { + /// 查询当前用户的待办任务。 + pub async fn list_pending( + tenant_id: Uuid, + assignee_id: Uuid, + pagination: &Pagination, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult<(Vec, u64)> { + let paginator = task::Entity::find() + .filter(task::Column::TenantId.eq(tenant_id)) + .filter(task::Column::AssigneeId.eq(assignee_id)) + .filter(task::Column::Status.eq("pending")) + .filter(task::Column::DeletedAt.is_null()) + .paginate(db, pagination.limit()); + + let total = paginator + .num_items() + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + let page_index = pagination.page.unwrap_or(1).saturating_sub(1) as u64; + let models = paginator + .fetch_page(page_index) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + let mut resps = Vec::new(); + for m in &models { + let mut resp = Self::model_to_resp(m); + // 附加实例信息 + if let Some(inst) = process_instance::Entity::find_by_id(m.instance_id) + .one(db) + .await + .ok() + .flatten() + { + resp.business_key = inst.business_key; + if let Some(def) = process_definition::Entity::find_by_id(inst.definition_id) + .one(db) + .await + .ok() + .flatten() + { + resp.definition_name = Some(def.name); + } + } + resps.push(resp); + } + + Ok((resps, total)) + } + + /// 查询当前用户的已办任务。 + pub async fn list_completed( + tenant_id: Uuid, + assignee_id: Uuid, + pagination: &Pagination, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult<(Vec, u64)> { + let paginator = task::Entity::find() + .filter(task::Column::TenantId.eq(tenant_id)) + .filter(task::Column::AssigneeId.eq(assignee_id)) + .filter(task::Column::Status.is_in(["approved", "rejected", "delegated"])) + .filter(task::Column::DeletedAt.is_null()) + .paginate(db, pagination.limit()); + + let total = paginator + .num_items() + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + let page_index = pagination.page.unwrap_or(1).saturating_sub(1) as u64; + let models = paginator + .fetch_page(page_index) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + let mut resps = Vec::new(); + for m in &models { + let mut resp = Self::model_to_resp(m); + if let Some(inst) = process_instance::Entity::find_by_id(m.instance_id) + .one(db) + .await + .ok() + .flatten() + { + resp.business_key = inst.business_key; + if let Some(def) = process_definition::Entity::find_by_id(inst.definition_id) + .one(db) + .await + .ok() + .flatten() + { + resp.definition_name = Some(def.name); + } + } + resps.push(resp); + } + + Ok((resps, total)) + } + + /// 完成任务:更新任务状态 + 推进 token。 + pub async fn complete( + id: Uuid, + tenant_id: Uuid, + operator_id: Uuid, + req: &CompleteTaskReq, + db: &sea_orm::DatabaseConnection, + event_bus: &EventBus, + ) -> WorkflowResult { + let task_model = task::Entity::find_by_id(id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|t| t.tenant_id == tenant_id && t.deleted_at.is_none()) + .ok_or_else(|| WorkflowError::NotFound(format!("任务不存在: {id}")))?; + + if task_model.status != "pending" { + return Err(WorkflowError::InvalidState( + "任务状态不是 pending,无法完成".to_string(), + )); + } + + let instance_id = task_model.instance_id; + let token_id = task_model.token_id; + + // 获取流程定义和流程图 + let instance = process_instance::Entity::find_by_id(instance_id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|i| i.tenant_id == tenant_id && i.deleted_at.is_none()) + .ok_or_else(|| WorkflowError::NotFound(format!("流程实例不存在: {instance_id}")))?; + + let definition = process_definition::Entity::find_by_id(instance.definition_id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|d| d.tenant_id == tenant_id && d.deleted_at.is_none()) + .ok_or_else(|| { + WorkflowError::NotFound(format!("流程定义不存在: {}", instance.definition_id)) + })?; + + let nodes: Vec = + serde_json::from_value(definition.nodes.clone()).map_err(|e| { + WorkflowError::InvalidDiagram(format!("节点数据无效: {e}")) + })?; + let edges: Vec = + serde_json::from_value(definition.edges.clone()).map_err(|e| { + WorkflowError::InvalidDiagram(format!("连线数据无效: {e}")) + })?; + let graph = parser::parse_and_validate(&nodes, &edges)?; + + // 准备变量(从 req.form_data 中提取) + let mut variables = HashMap::new(); + if let Some(form) = &req.form_data { + if let Some(obj) = form.as_object() { + for (k, v) in obj { + variables.insert(k.clone(), v.clone()); + } + } + } + + // 在事务中更新任务 + 推进 token + let now = Utc::now(); + let outcome = req.outcome.clone(); + let form_data = req.form_data.clone(); + db.transaction::<_, (), WorkflowError>(|txn| { + let graph = graph.clone(); + let variables = variables.clone(); + let task_model = task_model.clone(); + Box::pin(async move { + // 更新任务状态 + let mut active: task::ActiveModel = task_model.clone().into(); + active.status = Set("completed".to_string()); + active.outcome = Set(Some(outcome)); + active.form_data = Set(form_data); + active.completed_at = Set(Some(now)); + active.updated_at = Set(now); + active.updated_by = Set(operator_id); + active + .update(txn) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + // 推进 token + FlowExecutor::advance( + token_id, + instance_id, + tenant_id, + &graph, + &variables, + txn, + ) + .await?; + + Ok(()) + }) + }) + .await?; + + event_bus.publish(erp_core::events::DomainEvent::new( + "task.completed", + tenant_id, + serde_json::json!({ "task_id": id, "outcome": req.outcome }), + )); + + // 重新查询任务 + let updated = task::Entity::find_by_id(id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .ok_or_else(|| WorkflowError::NotFound(format!("任务不存在: {id}")))?; + + Ok(Self::model_to_resp(&updated)) + } + + /// 委派任务给其他人。 + pub async fn delegate( + id: Uuid, + tenant_id: Uuid, + operator_id: Uuid, + req: &DelegateTaskReq, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult { + let task_model = task::Entity::find_by_id(id) + .one(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))? + .filter(|t| t.tenant_id == tenant_id && t.deleted_at.is_none()) + .ok_or_else(|| WorkflowError::NotFound(format!("任务不存在: {id}")))?; + + if task_model.status != "pending" { + return Err(WorkflowError::InvalidState( + "任务状态不是 pending,无法委派".to_string(), + )); + } + + let mut active: task::ActiveModel = task_model.into(); + active.assignee_id = Set(Some(req.delegate_to)); + active.updated_at = Set(Utc::now()); + active.updated_by = Set(operator_id); + + let updated = active + .update(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + Ok(Self::model_to_resp(&updated)) + } + + /// 创建任务记录(由执行引擎调用)。 + pub async fn create_task( + instance_id: Uuid, + tenant_id: Uuid, + token_id: Uuid, + node_id: &str, + node_name: Option<&str>, + assignee_id: Option, + candidate_groups: Option>, + db: &sea_orm::DatabaseConnection, + ) -> WorkflowResult { + let id = Uuid::now_v7(); + let now = Utc::now(); + let system_user = Uuid::nil(); + + let model = task::ActiveModel { + id: Set(id), + tenant_id: Set(tenant_id), + instance_id: Set(instance_id), + token_id: Set(token_id), + node_id: Set(node_id.to_string()), + node_name: Set(node_name.map(|s| s.to_string())), + assignee_id: Set(assignee_id), + candidate_groups: Set(candidate_groups.map(|g| serde_json::to_value(g).unwrap_or_default())), + status: Set("pending".to_string()), + outcome: Set(None), + form_data: Set(None), + due_date: Set(None), + completed_at: Set(None), + created_at: Set(now), + updated_at: Set(now), + created_by: Set(system_user), + updated_by: Set(system_user), + deleted_at: Set(None), + version: Set(1), + }; + model + .insert(db) + .await + .map_err(|e| WorkflowError::Validation(e.to_string()))?; + + Ok(id) + } + + fn model_to_resp(m: &task::Model) -> TaskResp { + TaskResp { + id: m.id, + instance_id: m.instance_id, + token_id: m.token_id, + node_id: m.node_id.clone(), + node_name: m.node_name.clone(), + assignee_id: m.assignee_id, + candidate_groups: m.candidate_groups.clone(), + status: m.status.clone(), + outcome: m.outcome.clone(), + form_data: m.form_data.clone(), + due_date: m.due_date, + completed_at: m.completed_at, + created_at: m.created_at, + definition_name: None, + business_key: None, + } + } +} diff --git a/crates/erp-workflow/src/workflow_state.rs b/crates/erp-workflow/src/workflow_state.rs new file mode 100644 index 0000000..8c0c5e2 --- /dev/null +++ b/crates/erp-workflow/src/workflow_state.rs @@ -0,0 +1,11 @@ +use erp_core::events::EventBus; +use sea_orm::DatabaseConnection; + +/// Workflow-specific state extracted from the server's AppState via `FromRef`. +/// +/// Contains the database connection and event bus needed by workflow handlers. +#[derive(Clone)] +pub struct WorkflowState { + pub db: DatabaseConnection, + pub event_bus: EventBus, +}