Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
- 新增 66 个 @reserved 标注 (已有 22 个) - 覆盖: agent/butler/classroom/hand/mcp/pipeline/skill/trigger/viking/zclaw 等模块 - MCP 命令增加 @connected 注释说明前端接入路径 - @reserved 总数: 89 (含 identity_init)
323 lines
11 KiB
Rust
323 lines
11 KiB
Rust
//! Pipeline CRUD commands (Create / Update / Delete).
|
|
|
|
use std::collections::HashMap;
|
|
use std::sync::Arc;
|
|
use tauri::State;
|
|
use serde::{Deserialize, Serialize};
|
|
use serde_json::Value;
|
|
|
|
use zclaw_pipeline::{
|
|
Pipeline,
|
|
PipelineMetadata,
|
|
PipelineSpec,
|
|
PipelineStep,
|
|
Action,
|
|
ErrorStrategy,
|
|
};
|
|
|
|
use super::{PipelineState, PipelineInfo};
|
|
use super::helpers::{get_pipelines_directory, pipeline_to_info};
|
|
|
|
/// Create pipeline request
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
#[serde(rename_all = "camelCase")]
|
|
pub struct CreatePipelineRequest {
|
|
pub name: String,
|
|
pub description: Option<String>,
|
|
pub steps: Vec<WorkflowStepInput>,
|
|
}
|
|
|
|
/// Update pipeline request
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
#[serde(rename_all = "camelCase")]
|
|
pub struct UpdatePipelineRequest {
|
|
pub name: Option<String>,
|
|
pub description: Option<String>,
|
|
pub steps: Option<Vec<WorkflowStepInput>>,
|
|
}
|
|
|
|
/// Workflow step input from frontend
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
#[serde(rename_all = "camelCase")]
|
|
pub struct WorkflowStepInput {
|
|
/// Action type discriminator (P2-12: enables non-Hand action types)
|
|
pub action_type: Option<String>,
|
|
pub hand_name: String,
|
|
pub name: Option<String>,
|
|
pub params: Option<HashMap<String, Value>>,
|
|
pub condition: Option<String>,
|
|
/// LLM generation template (for action_type = "llm_generate")
|
|
pub template: Option<String>,
|
|
/// Parallel collection path (for action_type = "parallel")
|
|
pub each: Option<String>,
|
|
/// Condition branches (for action_type = "condition")
|
|
pub branches: Option<HashMap<String, Value>>,
|
|
}
|
|
|
|
/// Create a new pipeline as a YAML file
|
|
// @reserved: pipeline workflow management
|
|
// @connected
|
|
#[tauri::command]
|
|
pub async fn pipeline_create(
|
|
state: State<'_, Arc<PipelineState>>,
|
|
request: CreatePipelineRequest,
|
|
) -> Result<PipelineInfo, String> {
|
|
let name = request.name.trim().to_string();
|
|
if name.is_empty() {
|
|
return Err("Pipeline name cannot be empty".to_string());
|
|
}
|
|
|
|
let pipelines_dir = get_pipelines_directory()?;
|
|
if !pipelines_dir.exists() {
|
|
std::fs::create_dir_all(&pipelines_dir)
|
|
.map_err(|e| format!("Failed to create pipelines directory: {}", e))?;
|
|
}
|
|
|
|
// Generate pipeline ID from name
|
|
let pipeline_id = name.to_lowercase()
|
|
.replace(' ', "-")
|
|
.replace(|c: char| !c.is_alphanumeric() && c != '-', "");
|
|
|
|
let file_path = pipelines_dir.join(format!("{}.yaml", pipeline_id));
|
|
if file_path.exists() {
|
|
return Err(format!("Pipeline file already exists: {}", file_path.display()));
|
|
}
|
|
|
|
// P2-12: Build PipelineSteps with proper action type from WorkflowStepInput
|
|
let steps: Vec<PipelineStep> = request.steps.into_iter().enumerate().map(|(i, s)| {
|
|
let step_id = s.name.clone().unwrap_or_else(|| format!("step-{}", i + 1));
|
|
let params_map: HashMap<String, String> = s.params
|
|
.unwrap_or_default()
|
|
.into_iter()
|
|
.map(|(k, v)| (k, v.to_string()))
|
|
.collect();
|
|
|
|
let action = match s.action_type.as_deref().unwrap_or("hand") {
|
|
"llm_generate" => Action::LlmGenerate {
|
|
template: s.template.unwrap_or_default(),
|
|
input: params_map,
|
|
model: None,
|
|
temperature: None,
|
|
max_tokens: None,
|
|
json_mode: false,
|
|
},
|
|
"parallel" => Action::Parallel {
|
|
each: s.each.unwrap_or_else(|| "item".to_string()),
|
|
step: Box::new(PipelineStep {
|
|
id: format!("{}-body", step_id),
|
|
action: Action::Hand {
|
|
hand_id: s.hand_name.clone(),
|
|
hand_action: "execute".to_string(),
|
|
params: params_map,
|
|
},
|
|
description: None,
|
|
when: None,
|
|
retry: None,
|
|
timeout_secs: None,
|
|
}),
|
|
max_workers: None,
|
|
},
|
|
"condition" => Action::Condition {
|
|
condition: s.condition.unwrap_or_default(),
|
|
branches: vec![],
|
|
default: None,
|
|
},
|
|
_ => Action::Hand {
|
|
hand_id: s.hand_name.clone(),
|
|
hand_action: "execute".to_string(),
|
|
params: params_map,
|
|
},
|
|
};
|
|
|
|
PipelineStep {
|
|
id: step_id,
|
|
action,
|
|
description: s.name,
|
|
when: None,
|
|
retry: None,
|
|
timeout_secs: None,
|
|
}
|
|
}).collect();
|
|
|
|
let pipeline = Pipeline {
|
|
api_version: "zclaw/v1".to_string(),
|
|
kind: "Pipeline".to_string(),
|
|
metadata: PipelineMetadata {
|
|
name: pipeline_id.clone(),
|
|
display_name: Some(name),
|
|
description: request.description,
|
|
category: None,
|
|
industry: None,
|
|
tags: vec![],
|
|
icon: None,
|
|
author: None,
|
|
version: "1.0.0".to_string(),
|
|
annotations: None,
|
|
},
|
|
spec: PipelineSpec {
|
|
inputs: vec![],
|
|
steps,
|
|
outputs: HashMap::new(),
|
|
on_error: ErrorStrategy::Stop,
|
|
timeout_secs: 0,
|
|
max_workers: 4,
|
|
},
|
|
};
|
|
|
|
// Serialize to YAML
|
|
let yaml_content = serde_yaml::to_string(&pipeline)
|
|
.map_err(|e| format!("Failed to serialize pipeline: {}", e))?;
|
|
|
|
std::fs::write(&file_path, yaml_content)
|
|
.map_err(|e| format!("Failed to write pipeline file: {}", e))?;
|
|
|
|
// Register in state
|
|
let mut state_pipelines = state.pipelines.write().await;
|
|
let mut state_paths = state.pipeline_paths.write().await;
|
|
state_pipelines.insert(pipeline_id.clone(), pipeline.clone());
|
|
state_paths.insert(pipeline_id, file_path);
|
|
|
|
Ok(pipeline_to_info(&pipeline))
|
|
}
|
|
|
|
/// Update an existing pipeline
|
|
// @reserved: pipeline workflow management
|
|
// @connected
|
|
#[tauri::command]
|
|
pub async fn pipeline_update(
|
|
state: State<'_, Arc<PipelineState>>,
|
|
pipeline_id: String,
|
|
request: UpdatePipelineRequest,
|
|
) -> Result<PipelineInfo, String> {
|
|
let pipelines = state.pipelines.read().await;
|
|
let paths = state.pipeline_paths.read().await;
|
|
|
|
let existing = pipelines.get(&pipeline_id)
|
|
.ok_or_else(|| format!("Pipeline not found: {}", pipeline_id))?;
|
|
let file_path = paths.get(&pipeline_id)
|
|
.ok_or_else(|| format!("Pipeline file path not found: {}", pipeline_id))?
|
|
.clone();
|
|
|
|
// Build updated pipeline
|
|
let updated_metadata = PipelineMetadata {
|
|
display_name: request.name.or(existing.metadata.display_name.clone()),
|
|
description: request.description.or(existing.metadata.description.clone()),
|
|
..existing.metadata.clone()
|
|
};
|
|
|
|
// P2-12: Build PipelineSteps with proper action type (mirrors pipeline_create logic)
|
|
let updated_steps = match request.steps {
|
|
Some(steps) => steps.into_iter().enumerate().map(|(i, s)| {
|
|
let step_id = s.name.clone().unwrap_or_else(|| format!("step-{}", i + 1));
|
|
let params_map: HashMap<String, String> = s.params
|
|
.unwrap_or_default()
|
|
.into_iter()
|
|
.map(|(k, v)| (k, v.to_string()))
|
|
.collect();
|
|
|
|
let action = match s.action_type.as_deref().unwrap_or("hand") {
|
|
"llm_generate" => Action::LlmGenerate {
|
|
template: s.template.unwrap_or_default(),
|
|
input: params_map,
|
|
model: None,
|
|
temperature: None,
|
|
max_tokens: None,
|
|
json_mode: false,
|
|
},
|
|
"parallel" => Action::Parallel {
|
|
each: s.each.unwrap_or_else(|| "item".to_string()),
|
|
step: Box::new(PipelineStep {
|
|
id: format!("{}-body", step_id),
|
|
action: Action::Hand {
|
|
hand_id: s.hand_name.clone(),
|
|
hand_action: "execute".to_string(),
|
|
params: params_map,
|
|
},
|
|
description: None,
|
|
when: None,
|
|
retry: None,
|
|
timeout_secs: None,
|
|
}),
|
|
max_workers: None,
|
|
},
|
|
"condition" => Action::Condition {
|
|
condition: s.condition.unwrap_or_default(),
|
|
branches: vec![],
|
|
default: None,
|
|
},
|
|
_ => Action::Hand {
|
|
hand_id: s.hand_name.clone(),
|
|
hand_action: "execute".to_string(),
|
|
params: params_map,
|
|
},
|
|
};
|
|
|
|
PipelineStep {
|
|
id: step_id,
|
|
action,
|
|
description: s.name,
|
|
when: None,
|
|
retry: None,
|
|
timeout_secs: None,
|
|
}
|
|
}).collect(),
|
|
None => existing.spec.steps.clone(),
|
|
};
|
|
|
|
let updated_pipeline = Pipeline {
|
|
metadata: updated_metadata,
|
|
spec: PipelineSpec {
|
|
steps: updated_steps,
|
|
..existing.spec.clone()
|
|
},
|
|
..existing.clone()
|
|
};
|
|
|
|
// Write to file
|
|
let yaml_content = serde_yaml::to_string(&updated_pipeline)
|
|
.map_err(|e| format!("Failed to serialize pipeline: {}", e))?;
|
|
|
|
// Drop read locks before write
|
|
drop(pipelines);
|
|
drop(paths);
|
|
|
|
std::fs::write(file_path, yaml_content)
|
|
.map_err(|e| format!("Failed to write pipeline file: {}", e))?;
|
|
|
|
// Update state
|
|
let mut state_pipelines = state.pipelines.write().await;
|
|
state_pipelines.insert(pipeline_id.clone(), updated_pipeline.clone());
|
|
|
|
Ok(pipeline_to_info(&updated_pipeline))
|
|
}
|
|
|
|
/// Delete a pipeline
|
|
// @connected
|
|
#[tauri::command]
|
|
pub async fn pipeline_delete(
|
|
state: State<'_, Arc<PipelineState>>,
|
|
pipeline_id: String,
|
|
) -> Result<(), String> {
|
|
let paths = state.pipeline_paths.read().await;
|
|
|
|
let file_path = paths.get(&pipeline_id)
|
|
.ok_or_else(|| format!("Pipeline not found: {}", pipeline_id))?;
|
|
|
|
let path = file_path.clone();
|
|
drop(paths);
|
|
|
|
// Remove file
|
|
if path.exists() {
|
|
std::fs::remove_file(&path)
|
|
.map_err(|e| format!("Failed to delete pipeline file: {}", e))?;
|
|
}
|
|
|
|
// Remove from state
|
|
let mut state_pipelines = state.pipelines.write().await;
|
|
let mut state_paths = state.pipeline_paths.write().await;
|
|
state_pipelines.remove(&pipeline_id);
|
|
state_paths.remove(&pipeline_id);
|
|
|
|
Ok(())
|
|
}
|