feat: 新增技能编排引擎和工作流构建器组件
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
refactor: 统一Hands系统常量到单个源文件 refactor: 更新Hands中文名称和描述 fix: 修复技能市场在连接状态变化时重新加载 fix: 修复身份变更提案的错误处理逻辑 docs: 更新多个功能文档的验证状态和实现位置 docs: 更新Hands系统文档 test: 添加测试文件验证工作区路径
This commit is contained in:
11
desktop/src/lib/workflow-builder/index.ts
Normal file
11
desktop/src/lib/workflow-builder/index.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
/**
|
||||
* Workflow Builder Library
|
||||
*
|
||||
* Provides types, converters, and utilities for building visual workflow editors.
|
||||
*/
|
||||
|
||||
export * from './types';
|
||||
export * from './yaml-converter';
|
||||
|
||||
// Re-export commonly used types from @xyflow/react
|
||||
export type { Node, Edge, Connection } from '@xyflow/react';
|
||||
329
desktop/src/lib/workflow-builder/types.ts
Normal file
329
desktop/src/lib/workflow-builder/types.ts
Normal file
@@ -0,0 +1,329 @@
|
||||
/**
|
||||
* Workflow Builder Types
|
||||
*
|
||||
* Core types for the visual workflow builder that creates Pipeline DSL
|
||||
* configurations through drag-and-drop node composition.
|
||||
*/
|
||||
|
||||
import type { Node, Edge } from '@xyflow/react';
|
||||
|
||||
// =============================================================================
|
||||
// Node Types
|
||||
// =============================================================================
|
||||
|
||||
export type WorkflowNodeType =
|
||||
| 'input'
|
||||
| 'llm'
|
||||
| 'skill'
|
||||
| 'hand'
|
||||
| 'orchestration'
|
||||
| 'condition'
|
||||
| 'parallel'
|
||||
| 'loop'
|
||||
| 'export'
|
||||
| 'http'
|
||||
| 'setVar'
|
||||
| 'delay';
|
||||
|
||||
// =============================================================================
|
||||
// Node Data Types
|
||||
// =============================================================================
|
||||
|
||||
// Base node data that satisfies Record<string, unknown>
|
||||
export interface BaseNodeData extends Record<string, unknown> {
|
||||
label: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface InputNodeData extends BaseNodeData {
|
||||
type: 'input';
|
||||
/** Input variable name */
|
||||
variableName: string;
|
||||
/** Default value for testing */
|
||||
defaultValue?: unknown;
|
||||
/** JSON schema for validation */
|
||||
schema?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface LlmNodeData extends BaseNodeData {
|
||||
type: 'llm';
|
||||
/** Template path or inline prompt */
|
||||
template: string;
|
||||
/** Whether template is a file path */
|
||||
isTemplateFile: boolean;
|
||||
/** Model override */
|
||||
model?: string;
|
||||
/** Temperature override */
|
||||
temperature?: number;
|
||||
/** Max tokens override */
|
||||
maxTokens?: number;
|
||||
/** JSON mode for structured output */
|
||||
jsonMode: boolean;
|
||||
}
|
||||
|
||||
export interface SkillNodeData extends BaseNodeData {
|
||||
type: 'skill';
|
||||
/** Skill ID to execute */
|
||||
skillId: string;
|
||||
/** Skill name for display */
|
||||
skillName?: string;
|
||||
/** Input variable mappings */
|
||||
inputMappings: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface HandNodeData extends BaseNodeData {
|
||||
type: 'hand';
|
||||
/** Hand ID */
|
||||
handId: string;
|
||||
/** Hand name for display */
|
||||
handName?: string;
|
||||
/** Action to perform */
|
||||
action: string;
|
||||
/** Action parameters */
|
||||
params: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface OrchestrationNodeData extends BaseNodeData {
|
||||
type: 'orchestration';
|
||||
/** Graph ID reference */
|
||||
graphId?: string;
|
||||
/** Inline graph definition */
|
||||
graph?: Record<string, unknown>;
|
||||
/** Input mappings */
|
||||
inputMappings: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface ConditionNodeData extends BaseNodeData {
|
||||
type: 'condition';
|
||||
/** Condition expression */
|
||||
condition: string;
|
||||
/** Branch definitions */
|
||||
branches: ConditionBranch[];
|
||||
/** Has default branch */
|
||||
hasDefault: boolean;
|
||||
}
|
||||
|
||||
export interface ConditionBranch {
|
||||
/** Condition expression for this branch */
|
||||
when: string;
|
||||
/** Label for display */
|
||||
label: string;
|
||||
}
|
||||
|
||||
export interface ParallelNodeData extends BaseNodeData {
|
||||
type: 'parallel';
|
||||
/** Expression to iterate over */
|
||||
each: string;
|
||||
/** Max concurrent workers */
|
||||
maxWorkers: number;
|
||||
}
|
||||
|
||||
export interface LoopNodeData extends BaseNodeData {
|
||||
type: 'loop';
|
||||
/** Expression to iterate over */
|
||||
each: string;
|
||||
/** Variable name for current item */
|
||||
itemVar: string;
|
||||
/** Variable name for index */
|
||||
indexVar: string;
|
||||
}
|
||||
|
||||
export interface ExportNodeData extends BaseNodeData {
|
||||
type: 'export';
|
||||
/** Export formats */
|
||||
formats: ExportFormat[];
|
||||
/** Output directory */
|
||||
outputDir?: string;
|
||||
}
|
||||
|
||||
export interface HttpNodeData extends BaseNodeData {
|
||||
type: 'http';
|
||||
/** URL */
|
||||
url: string;
|
||||
/** HTTP method */
|
||||
method: 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH';
|
||||
/** Headers */
|
||||
headers: Record<string, string>;
|
||||
/** Request body expression */
|
||||
body?: string;
|
||||
}
|
||||
|
||||
export interface SetVarNodeData extends BaseNodeData {
|
||||
type: 'setVar';
|
||||
/** Variable name */
|
||||
variableName: string;
|
||||
/** Value expression */
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface DelayNodeData extends BaseNodeData {
|
||||
type: 'delay';
|
||||
/** Delay in milliseconds */
|
||||
ms: number;
|
||||
}
|
||||
|
||||
export type WorkflowNodeData =
|
||||
| InputNodeData
|
||||
| LlmNodeData
|
||||
| SkillNodeData
|
||||
| HandNodeData
|
||||
| OrchestrationNodeData
|
||||
| ConditionNodeData
|
||||
| ParallelNodeData
|
||||
| LoopNodeData
|
||||
| ExportNodeData
|
||||
| HttpNodeData
|
||||
| SetVarNodeData
|
||||
| DelayNodeData;
|
||||
|
||||
// =============================================================================
|
||||
// Canvas Types
|
||||
// =============================================================================
|
||||
|
||||
// Use Record<string, unknown> as base to satisfy React Flow constraints
|
||||
// The actual data will be one of the WorkflowNodeData union types
|
||||
export type WorkflowNode = Node<Record<string, unknown>, string>;
|
||||
export type WorkflowEdge = Edge;
|
||||
|
||||
export interface WorkflowCanvas {
|
||||
/** Unique canvas ID */
|
||||
id: string;
|
||||
/** Canvas name */
|
||||
name: string;
|
||||
/** Canvas description */
|
||||
description?: string;
|
||||
/** Category for organization */
|
||||
category?: string;
|
||||
/** Nodes in the canvas */
|
||||
nodes: WorkflowNode[];
|
||||
/** Edges connecting nodes */
|
||||
edges: WorkflowEdge[];
|
||||
/** Viewport state */
|
||||
viewport: {
|
||||
x: number;
|
||||
y: number;
|
||||
zoom: number;
|
||||
};
|
||||
/** Canvas metadata */
|
||||
metadata: WorkflowMetadata;
|
||||
}
|
||||
|
||||
export interface WorkflowMetadata {
|
||||
/** Created timestamp */
|
||||
createdAt: string;
|
||||
/** Updated timestamp */
|
||||
updatedAt: string;
|
||||
/** Author */
|
||||
author?: string;
|
||||
/** Tags for search */
|
||||
tags: string[];
|
||||
/** Version */
|
||||
version: string;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Export Types
|
||||
// =============================================================================
|
||||
|
||||
export type ExportFormat = 'pptx' | 'html' | 'pdf' | 'markdown' | 'json';
|
||||
|
||||
// =============================================================================
|
||||
// Palette Types
|
||||
// =============================================================================
|
||||
|
||||
export interface NodePaletteItem {
|
||||
type: WorkflowNodeType;
|
||||
label: string;
|
||||
description: string;
|
||||
icon: string;
|
||||
category: NodeCategory;
|
||||
defaultData: Partial<WorkflowNodeData>;
|
||||
}
|
||||
|
||||
export type NodeCategory =
|
||||
| 'input'
|
||||
| 'ai'
|
||||
| 'action'
|
||||
| 'control'
|
||||
| 'output';
|
||||
|
||||
// =============================================================================
|
||||
// Conversion Types
|
||||
// =============================================================================
|
||||
|
||||
export interface PipelineYaml {
|
||||
apiVersion: 'zclaw/v1';
|
||||
kind: 'Pipeline';
|
||||
metadata: {
|
||||
name: string;
|
||||
description?: string;
|
||||
tags?: string[];
|
||||
};
|
||||
spec: {
|
||||
input?: Record<string, unknown>;
|
||||
steps: PipelineStepYaml[];
|
||||
output?: Record<string, string>;
|
||||
};
|
||||
}
|
||||
|
||||
export interface PipelineStepYaml {
|
||||
id: string;
|
||||
name?: string;
|
||||
action: Record<string, unknown>;
|
||||
when?: string;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Validation Types
|
||||
// =============================================================================
|
||||
|
||||
export interface ValidationError {
|
||||
nodeId: string;
|
||||
field?: string;
|
||||
message: string;
|
||||
severity: 'error' | 'warning';
|
||||
}
|
||||
|
||||
export interface ValidationResult {
|
||||
valid: boolean;
|
||||
errors: ValidationError[];
|
||||
warnings: ValidationError[];
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Template Types
|
||||
// =============================================================================
|
||||
|
||||
export interface WorkflowTemplate {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
category: string;
|
||||
thumbnail?: string;
|
||||
canvas: WorkflowCanvas;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Store Types
|
||||
// =============================================================================
|
||||
|
||||
export interface WorkflowBuilderState {
|
||||
/** Current canvas */
|
||||
canvas: WorkflowCanvas | null;
|
||||
/** All saved workflows */
|
||||
workflows: WorkflowCanvas[];
|
||||
/** Selected node ID */
|
||||
selectedNodeId: string | null;
|
||||
/** Is dragging from palette */
|
||||
isDragging: boolean;
|
||||
/** Is canvas dirty (unsaved changes) */
|
||||
isDirty: boolean;
|
||||
/** Validation result */
|
||||
validation: ValidationResult | null;
|
||||
/** Templates */
|
||||
templates: WorkflowTemplate[];
|
||||
/** Available skills for palette */
|
||||
availableSkills: Array<{ id: string; name: string; description: string }>;
|
||||
/** Available hands for palette */
|
||||
availableHands: Array<{ id: string; name: string; actions: string[] }>;
|
||||
}
|
||||
803
desktop/src/lib/workflow-builder/yaml-converter.ts
Normal file
803
desktop/src/lib/workflow-builder/yaml-converter.ts
Normal file
@@ -0,0 +1,803 @@
|
||||
/**
|
||||
* YAML Converter for Workflow Builder
|
||||
*
|
||||
* Bidirectional conversion between WorkflowCanvas (visual representation)
|
||||
* and Pipeline YAML (execution format).
|
||||
*/
|
||||
|
||||
import * as yaml from 'js-yaml';
|
||||
import type { Edge } from '@xyflow/react';
|
||||
import dagre from '@dagrejs/dagre';
|
||||
import type {
|
||||
WorkflowCanvas,
|
||||
WorkflowNode,
|
||||
WorkflowNodeData,
|
||||
InputNodeData,
|
||||
LlmNodeData,
|
||||
SkillNodeData,
|
||||
HandNodeData,
|
||||
ConditionNodeData,
|
||||
ParallelNodeData,
|
||||
ExportNodeData,
|
||||
PipelineYaml,
|
||||
PipelineStepYaml,
|
||||
ValidationError,
|
||||
ValidationResult,
|
||||
} from './types';
|
||||
|
||||
// =============================================================================
|
||||
// Canvas to YAML Conversion
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Convert a WorkflowCanvas to Pipeline YAML string
|
||||
*/
|
||||
export function canvasToYaml(canvas: WorkflowCanvas): string {
|
||||
const pipeline: PipelineYaml = {
|
||||
apiVersion: 'zclaw/v1',
|
||||
kind: 'Pipeline',
|
||||
metadata: {
|
||||
name: canvas.name,
|
||||
description: canvas.description,
|
||||
tags: canvas.metadata.tags,
|
||||
},
|
||||
spec: {
|
||||
input: extractInputs(canvas.nodes),
|
||||
steps: nodesToSteps(canvas.nodes, canvas.edges),
|
||||
output: extractOutputs(canvas.nodes),
|
||||
},
|
||||
};
|
||||
|
||||
return yaml.dump(pipeline, {
|
||||
indent: 2,
|
||||
lineWidth: -1,
|
||||
noRefs: true,
|
||||
sortKeys: false,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract input definitions from input nodes
|
||||
*/
|
||||
function extractInputs(nodes: WorkflowNode[]): Record<string, unknown> | undefined {
|
||||
const inputs: Record<string, unknown> = {};
|
||||
|
||||
for (const node of nodes) {
|
||||
if (node.data.type === 'input') {
|
||||
const data = node.data as InputNodeData;
|
||||
inputs[data.variableName] = data.defaultValue ?? null;
|
||||
}
|
||||
}
|
||||
|
||||
return Object.keys(inputs).length > 0 ? inputs : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract output mappings from the last nodes or explicit output nodes
|
||||
*/
|
||||
function extractOutputs(nodes: WorkflowNode[]): Record<string, string> | undefined {
|
||||
const outputs: Record<string, string> = {};
|
||||
|
||||
for (const node of nodes) {
|
||||
if (node.data.type === 'export') {
|
||||
// Export nodes define outputs
|
||||
outputs[`${node.id}_export`] = `\${steps.${node.id}.output}`;
|
||||
}
|
||||
}
|
||||
|
||||
return Object.keys(outputs).length > 0 ? outputs : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert nodes and edges to pipeline steps
|
||||
*/
|
||||
function nodesToSteps(nodes: WorkflowNode[], edges: Edge[]): PipelineStepYaml[] {
|
||||
// Topological sort to get execution order
|
||||
const sortedNodes = topologicalSort(nodes, edges);
|
||||
|
||||
return sortedNodes
|
||||
.filter(node => node.data.type !== 'input') // Skip input nodes
|
||||
.map(node => nodeToStep(node))
|
||||
.filter((step): step is PipelineStepYaml => step !== null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a single node to a pipeline step
|
||||
*/
|
||||
function nodeToStep(node: WorkflowNode): PipelineStepYaml | null {
|
||||
const data = node.data;
|
||||
const label = data.label as string | undefined;
|
||||
const base: PipelineStepYaml = {
|
||||
id: node.id,
|
||||
name: label,
|
||||
action: {},
|
||||
};
|
||||
|
||||
const nodeType = data.type as string;
|
||||
|
||||
switch (nodeType) {
|
||||
case 'llm': {
|
||||
const llmData = data as LlmNodeData;
|
||||
base.action = {
|
||||
llm_generate: {
|
||||
template: llmData.template,
|
||||
input: mapExpressionsToObject(llmData.template),
|
||||
model: llmData.model,
|
||||
temperature: llmData.temperature,
|
||||
max_tokens: llmData.maxTokens,
|
||||
json_mode: llmData.jsonMode,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'skill': {
|
||||
const skillData = data as SkillNodeData;
|
||||
base.action = {
|
||||
skill: {
|
||||
skill_id: skillData.skillId,
|
||||
input: skillData.inputMappings,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'hand': {
|
||||
const handData = data as HandNodeData;
|
||||
base.action = {
|
||||
hand: {
|
||||
hand_id: handData.handId,
|
||||
hand_action: handData.action,
|
||||
params: handData.params,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'orchestration': {
|
||||
const orchData = data as { graphId?: string; graph?: Record<string, unknown>; inputMappings?: Record<string, string> };
|
||||
base.action = {
|
||||
skill_orchestration: {
|
||||
graph_id: orchData.graphId,
|
||||
graph: orchData.graph,
|
||||
input: orchData.inputMappings,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'condition': {
|
||||
const condData = data as ConditionNodeData;
|
||||
base.action = {
|
||||
condition: {
|
||||
condition: condData.condition,
|
||||
branches: condData.branches.map((b: { when: string }) => ({
|
||||
when: b.when,
|
||||
then: { /* Will be filled by connected nodes */ },
|
||||
})),
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'parallel': {
|
||||
const parData = data as ParallelNodeData;
|
||||
base.action = {
|
||||
parallel: {
|
||||
each: parData.each,
|
||||
step: { /* Will be filled by child nodes */ },
|
||||
max_workers: parData.maxWorkers,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'loop': {
|
||||
const loopData = data as { each: string; itemVar: string; indexVar: string };
|
||||
base.action = {
|
||||
loop: {
|
||||
each: loopData.each,
|
||||
item_var: loopData.itemVar,
|
||||
index_var: loopData.indexVar,
|
||||
step: { /* Will be filled by child nodes */ },
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'export': {
|
||||
const exportData = data as ExportNodeData;
|
||||
base.action = {
|
||||
file_export: {
|
||||
formats: exportData.formats,
|
||||
input: `\${steps.${node.id}.input}`,
|
||||
output_dir: exportData.outputDir,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'http': {
|
||||
const httpData = data as { url: string; method: string; headers: Record<string, string>; body?: string };
|
||||
base.action = {
|
||||
http_request: {
|
||||
url: httpData.url,
|
||||
method: httpData.method,
|
||||
headers: httpData.headers,
|
||||
body: httpData.body,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'setVar': {
|
||||
const varData = data as { variableName: string; value: string };
|
||||
base.action = {
|
||||
set_var: {
|
||||
name: varData.variableName,
|
||||
value: varData.value,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'delay': {
|
||||
const delayData = data as { ms: number };
|
||||
base.action = {
|
||||
delay: {
|
||||
ms: delayData.ms,
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case 'input':
|
||||
// Input nodes don't become steps
|
||||
return null;
|
||||
|
||||
default:
|
||||
console.warn(`Unknown node type: ${nodeType}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return base;
|
||||
}
|
||||
|
||||
/**
|
||||
* Topological sort of nodes based on edges
|
||||
*/
|
||||
function topologicalSort(nodes: WorkflowNode[], edges: Edge[]): WorkflowNode[] {
|
||||
const nodeMap = new Map(nodes.map(n => [n.id, n]));
|
||||
const inDegree = new Map<string, number>();
|
||||
const adjacency = new Map<string, string[]>();
|
||||
|
||||
// Initialize
|
||||
for (const node of nodes) {
|
||||
inDegree.set(node.id, 0);
|
||||
adjacency.set(node.id, []);
|
||||
}
|
||||
|
||||
// Build graph
|
||||
for (const edge of edges) {
|
||||
const current = adjacency.get(edge.source) || [];
|
||||
current.push(edge.target);
|
||||
adjacency.set(edge.source, current);
|
||||
|
||||
inDegree.set(edge.target, (inDegree.get(edge.target) || 0) + 1);
|
||||
}
|
||||
|
||||
// Kahn's algorithm
|
||||
const queue: string[] = [];
|
||||
const result: WorkflowNode[] = [];
|
||||
|
||||
for (const [nodeId, degree] of inDegree) {
|
||||
if (degree === 0) {
|
||||
queue.push(nodeId);
|
||||
}
|
||||
}
|
||||
|
||||
while (queue.length > 0) {
|
||||
const nodeId = queue.shift()!;
|
||||
const node = nodeMap.get(nodeId);
|
||||
if (node) {
|
||||
result.push(node);
|
||||
}
|
||||
|
||||
const neighbors = adjacency.get(nodeId) || [];
|
||||
for (const neighbor of neighbors) {
|
||||
const newDegree = (inDegree.get(neighbor) || 0) - 1;
|
||||
inDegree.set(neighbor, newDegree);
|
||||
if (newDegree === 0) {
|
||||
queue.push(neighbor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract variable references from a template string
|
||||
*/
|
||||
function mapExpressionsToObject(template: string): Record<string, string> {
|
||||
const regex = /\$\{([^}]+)\}/g;
|
||||
const matches = template.match(regex) || [];
|
||||
const result: Record<string, string> = {};
|
||||
|
||||
for (const match of matches) {
|
||||
const expr = match.slice(2, -1); // Remove ${ and }
|
||||
const parts = expr.split('.');
|
||||
if (parts.length >= 2) {
|
||||
result[parts[parts.length - 1]] = match;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// YAML to Canvas Conversion
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Parse Pipeline YAML string to WorkflowCanvas
|
||||
*/
|
||||
export function yamlToCanvas(yamlString: string): WorkflowCanvas {
|
||||
const pipeline = yaml.load(yamlString) as PipelineYaml;
|
||||
|
||||
const nodes: WorkflowNode[] = [];
|
||||
const edges: Edge[] = [];
|
||||
|
||||
// Create input nodes from spec.input
|
||||
if (pipeline.spec.input) {
|
||||
let y = 50;
|
||||
for (const [varName, defaultValue] of Object.entries(pipeline.spec.input)) {
|
||||
nodes.push({
|
||||
id: `input_${varName}`,
|
||||
type: 'input',
|
||||
position: { x: 50, y },
|
||||
data: {
|
||||
type: 'input',
|
||||
label: varName,
|
||||
variableName: varName,
|
||||
defaultValue,
|
||||
},
|
||||
});
|
||||
y += 100;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert steps to nodes
|
||||
if (pipeline.spec.steps) {
|
||||
let x = 300;
|
||||
let y = 50;
|
||||
|
||||
for (const step of pipeline.spec.steps) {
|
||||
const node = stepToNode(step, x, y);
|
||||
if (node) {
|
||||
nodes.push(node);
|
||||
y += 150;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-layout with dagre
|
||||
const layoutedNodes = applyDagreLayout(nodes, edges);
|
||||
|
||||
return {
|
||||
id: `workflow_${Date.now()}`,
|
||||
name: pipeline.metadata?.name || 'Imported Workflow',
|
||||
description: pipeline.metadata?.description,
|
||||
category: 'imported',
|
||||
nodes: layoutedNodes,
|
||||
edges,
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
metadata: {
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
tags: pipeline.metadata?.tags || [],
|
||||
version: '1.0.0',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a pipeline step to a workflow node
|
||||
*/
|
||||
function stepToNode(step: PipelineStepYaml, x: number, y: number): WorkflowNode | null {
|
||||
const action = step.action;
|
||||
const actionType = Object.keys(action)[0];
|
||||
const actionData = action[actionType];
|
||||
|
||||
const baseData = {
|
||||
label: step.name || step.id,
|
||||
};
|
||||
|
||||
switch (actionType) {
|
||||
case 'llm_generate':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'llm',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'llm',
|
||||
...baseData,
|
||||
template: (actionData as { template?: string }).template || '',
|
||||
isTemplateFile: false,
|
||||
model: (actionData as { model?: string }).model,
|
||||
temperature: (actionData as { temperature?: number }).temperature,
|
||||
maxTokens: (actionData as { max_tokens?: number }).max_tokens,
|
||||
jsonMode: (actionData as { json_mode?: boolean }).json_mode || false,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'skill':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'skill',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'skill',
|
||||
...baseData,
|
||||
skillId: (actionData as { skill_id?: string }).skill_id || '',
|
||||
inputMappings: (actionData as { input?: Record<string, string> }).input || {},
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'hand':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'hand',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'hand',
|
||||
...baseData,
|
||||
handId: (actionData as { hand_id?: string }).hand_id || '',
|
||||
action: (actionData as { hand_action?: string }).hand_action || '',
|
||||
params: (actionData as { params?: Record<string, string> }).params || {},
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'skill_orchestration':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'orchestration',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'orchestration',
|
||||
...baseData,
|
||||
graphId: (actionData as { graph_id?: string }).graph_id,
|
||||
graph: (actionData as { graph?: Record<string, unknown> }).graph,
|
||||
inputMappings: (actionData as { input?: Record<string, string> }).input || {},
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'condition':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'condition',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'condition',
|
||||
...baseData,
|
||||
condition: (actionData as { condition?: string }).condition || '',
|
||||
branches: ((actionData as { branches?: Array<{ when: string }> }).branches || []).map(b => ({
|
||||
when: b.when,
|
||||
label: b.when.slice(0, 20),
|
||||
})),
|
||||
hasDefault: true,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'parallel':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'parallel',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'parallel',
|
||||
...baseData,
|
||||
each: (actionData as { each?: string }).each || '',
|
||||
maxWorkers: (actionData as { max_workers?: number }).max_workers || 4,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'file_export':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'export',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'export',
|
||||
...baseData,
|
||||
formats: (actionData as { formats?: string[] }).formats || [],
|
||||
outputDir: (actionData as { output_dir?: string }).output_dir,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'http_request':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'http',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'http',
|
||||
...baseData,
|
||||
url: (actionData as { url?: string }).url || '',
|
||||
method: ((actionData as { method?: string }).method || 'GET') as 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH',
|
||||
headers: (actionData as { headers?: Record<string, string> }).headers || {},
|
||||
body: (actionData as { body?: string }).body,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'set_var':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'setVar',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'setVar',
|
||||
...baseData,
|
||||
variableName: (actionData as { name?: string }).name || '',
|
||||
value: (actionData as { value?: string }).value || '',
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
case 'delay':
|
||||
return {
|
||||
id: step.id,
|
||||
type: 'delay',
|
||||
position: { x, y },
|
||||
data: {
|
||||
type: 'delay',
|
||||
...baseData,
|
||||
ms: (actionData as { ms?: number }).ms || 0,
|
||||
} as WorkflowNodeData,
|
||||
};
|
||||
|
||||
default:
|
||||
console.warn(`Unknown action type: ${actionType}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Layout Utilities
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Apply dagre layout to nodes
|
||||
*/
|
||||
export function applyDagreLayout(nodes: WorkflowNode[], edges: Edge[]): WorkflowNode[] {
|
||||
const dagreGraph = new dagre.graphlib.Graph();
|
||||
dagreGraph.setDefaultEdgeLabel(() => ({}));
|
||||
|
||||
dagreGraph.setGraph({
|
||||
rankdir: 'LR',
|
||||
nodesep: 100,
|
||||
ranksep: 150,
|
||||
marginx: 50,
|
||||
marginy: 50,
|
||||
});
|
||||
|
||||
// Add nodes to dagre
|
||||
for (const node of nodes) {
|
||||
dagreGraph.setNode(node.id, {
|
||||
width: 250,
|
||||
height: 100,
|
||||
});
|
||||
}
|
||||
|
||||
// Add edges to dagre
|
||||
for (const edge of edges) {
|
||||
dagreGraph.setEdge(edge.source, edge.target);
|
||||
}
|
||||
|
||||
// Apply layout
|
||||
dagre.layout(dagreGraph);
|
||||
|
||||
// Update node positions
|
||||
return nodes.map(node => {
|
||||
const dagreNode = dagreGraph.node(node.id);
|
||||
if (dagreNode) {
|
||||
return {
|
||||
...node,
|
||||
position: {
|
||||
x: dagreNode.x - dagreNode.width / 2,
|
||||
y: dagreNode.y - dagreNode.height / 2,
|
||||
},
|
||||
};
|
||||
}
|
||||
return node;
|
||||
});
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Validation
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Validate a workflow canvas
|
||||
*/
|
||||
export function validateCanvas(canvas: WorkflowCanvas): ValidationResult {
|
||||
const errors: ValidationError[] = [];
|
||||
const warnings: ValidationError[] = [];
|
||||
|
||||
// Check for empty canvas
|
||||
if (canvas.nodes.length === 0) {
|
||||
errors.push({
|
||||
nodeId: 'canvas',
|
||||
message: 'Workflow is empty',
|
||||
severity: 'error',
|
||||
});
|
||||
return { valid: false, errors, warnings };
|
||||
}
|
||||
|
||||
// Check for input nodes
|
||||
const hasInput = canvas.nodes.some(n => n.data.type === 'input');
|
||||
if (!hasInput) {
|
||||
warnings.push({
|
||||
nodeId: 'canvas',
|
||||
message: 'No input nodes defined',
|
||||
severity: 'warning',
|
||||
});
|
||||
}
|
||||
|
||||
// Check for disconnected nodes
|
||||
const connectedNodeIds = new Set<string>();
|
||||
for (const edge of canvas.edges) {
|
||||
connectedNodeIds.add(edge.source);
|
||||
connectedNodeIds.add(edge.target);
|
||||
}
|
||||
|
||||
for (const node of canvas.nodes) {
|
||||
if (canvas.nodes.length > 1 && !connectedNodeIds.has(node.id) && node.data.type !== 'input') {
|
||||
warnings.push({
|
||||
nodeId: node.id,
|
||||
message: `Node "${node.data.label}" is not connected`,
|
||||
severity: 'warning',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Validate individual nodes
|
||||
for (const node of canvas.nodes) {
|
||||
const nodeErrors = validateNode(node);
|
||||
errors.push(...nodeErrors);
|
||||
}
|
||||
|
||||
// Check for cycles (basic check)
|
||||
if (hasCycle(canvas.nodes, canvas.edges)) {
|
||||
errors.push({
|
||||
nodeId: 'canvas',
|
||||
message: 'Workflow contains a cycle',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a single node
|
||||
*/
|
||||
function validateNode(node: WorkflowNode): ValidationError[] {
|
||||
const errors: ValidationError[] = [];
|
||||
const data = node.data;
|
||||
|
||||
switch (data.type) {
|
||||
case 'llm':
|
||||
if (!data.template) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'template',
|
||||
message: 'Template is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case 'skill':
|
||||
if (!data.skillId) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'skillId',
|
||||
message: 'Skill ID is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case 'hand':
|
||||
if (!data.handId) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'handId',
|
||||
message: 'Hand ID is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
if (!data.action) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'action',
|
||||
message: 'Action is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case 'http':
|
||||
if (!data.url) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'url',
|
||||
message: 'URL is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case 'input':
|
||||
if (!data.variableName) {
|
||||
errors.push({
|
||||
nodeId: node.id,
|
||||
field: 'variableName',
|
||||
message: 'Variable name is required',
|
||||
severity: 'error',
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the graph has a cycle
|
||||
*/
|
||||
function hasCycle(nodes: WorkflowNode[], edges: Edge[]): boolean {
|
||||
const adjacency = new Map<string, string[]>();
|
||||
const visited = new Set<string>();
|
||||
const recStack = new Set<string>();
|
||||
|
||||
// Build adjacency list
|
||||
for (const node of nodes) {
|
||||
adjacency.set(node.id, []);
|
||||
}
|
||||
for (const edge of edges) {
|
||||
const neighbors = adjacency.get(edge.source) || [];
|
||||
neighbors.push(edge.target);
|
||||
adjacency.set(edge.source, neighbors);
|
||||
}
|
||||
|
||||
// DFS cycle detection
|
||||
function dfs(nodeId: string): boolean {
|
||||
visited.add(nodeId);
|
||||
recStack.add(nodeId);
|
||||
|
||||
const neighbors = adjacency.get(nodeId) || [];
|
||||
for (const neighbor of neighbors) {
|
||||
if (!visited.has(neighbor)) {
|
||||
if (dfs(neighbor)) return true;
|
||||
} else if (recStack.has(neighbor)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
recStack.delete(nodeId);
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const node of nodes) {
|
||||
if (!visited.has(node.id)) {
|
||||
if (dfs(node.id)) return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
Reference in New Issue
Block a user