feat(pipeline): implement Pipeline DSL system for automated workflows
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
Add complete Pipeline DSL system including:
- Rust backend (zclaw-pipeline crate) with parser, executor, and state management
- Frontend components: PipelinesPanel, PipelineResultPreview, ClassroomPreviewer
- Pipeline recommender for Agent conversation integration
- 5 pipeline templates: education, marketing, legal, research, productivity
- Documentation for Pipeline DSL architecture
Pipeline DSL enables declarative workflow definitions with:
- YAML-based configuration
- Expression resolution (${inputs.topic}, ${steps.step1.output})
- LLM integration, parallel execution, file export
- Agent smart recommendations in conversations
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
33
crates/zclaw-pipeline/Cargo.toml
Normal file
33
crates/zclaw-pipeline/Cargo.toml
Normal file
@@ -0,0 +1,33 @@
|
||||
[package]
|
||||
name = "zclaw-pipeline"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
description = "Pipeline DSL and execution engine for ZCLAW"
|
||||
|
||||
[dependencies]
|
||||
# Workspace dependencies
|
||||
tokio = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
serde_yaml = "0.9"
|
||||
thiserror = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
|
||||
# Internal crates
|
||||
zclaw-types = { workspace = true }
|
||||
zclaw-runtime = { workspace = true }
|
||||
zclaw-kernel = { workspace = true }
|
||||
zclaw-skills = { workspace = true }
|
||||
zclaw-hands = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio-test = "0.4"
|
||||
161
crates/zclaw-pipeline/src/actions/export.rs
Normal file
161
crates/zclaw-pipeline/src/actions/export.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
//! File export action
|
||||
|
||||
use std::path::PathBuf;
|
||||
use serde_json::Value;
|
||||
use tokio::fs;
|
||||
|
||||
use crate::types::ExportFormat;
|
||||
use super::ActionError;
|
||||
|
||||
/// Export files in specified formats
|
||||
pub async fn export_files(
|
||||
formats: &[ExportFormat],
|
||||
data: &Value,
|
||||
output_dir: Option<&str>,
|
||||
) -> Result<Value, ActionError> {
|
||||
let dir = output_dir
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|| std::env::temp_dir());
|
||||
|
||||
// Ensure directory exists
|
||||
fs::create_dir_all(&dir).await
|
||||
.map_err(|e| ActionError::Export(format!("Failed to create directory: {}", e)))?;
|
||||
|
||||
let mut paths = Vec::new();
|
||||
let timestamp = chrono::Utc::now().format("%Y%m%d_%H%M%S");
|
||||
|
||||
for format in formats {
|
||||
let filename = format!("output_{}.{}", timestamp, format.extension());
|
||||
let path = dir.join(&filename);
|
||||
|
||||
match format {
|
||||
ExportFormat::Json => {
|
||||
let content = serde_json::to_string_pretty(data)
|
||||
.map_err(|e| ActionError::Export(format!("JSON serialization error: {}", e)))?;
|
||||
fs::write(&path, content).await
|
||||
.map_err(|e| ActionError::Export(format!("Write error: {}", e)))?;
|
||||
}
|
||||
ExportFormat::Markdown => {
|
||||
let content = render_markdown(data);
|
||||
fs::write(&path, content).await
|
||||
.map_err(|e| ActionError::Export(format!("Write error: {}", e)))?;
|
||||
}
|
||||
ExportFormat::Html => {
|
||||
let content = render_html(data);
|
||||
fs::write(&path, content).await
|
||||
.map_err(|e| ActionError::Export(format!("Write error: {}", e)))?;
|
||||
}
|
||||
ExportFormat::Pptx => {
|
||||
// Will integrate with zclaw-kernel export
|
||||
return Err(ActionError::Export("PPTX export requires kernel integration".to_string()));
|
||||
}
|
||||
ExportFormat::Pdf => {
|
||||
return Err(ActionError::Export("PDF export not yet implemented".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
paths.push(serde_json::json!({
|
||||
"format": format.extension(),
|
||||
"path": path.to_string_lossy(),
|
||||
"filename": filename,
|
||||
}));
|
||||
}
|
||||
|
||||
Ok(Value::Array(paths))
|
||||
}
|
||||
|
||||
/// Render data to markdown
|
||||
fn render_markdown(data: &Value) -> String {
|
||||
let mut md = String::new();
|
||||
|
||||
if let Some(title) = data.get("title").and_then(|v| v.as_str()) {
|
||||
md.push_str(&format!("# {}\n\n", title));
|
||||
}
|
||||
|
||||
if let Some(description) = data.get("description").and_then(|v| v.as_str()) {
|
||||
md.push_str(&format!("{}\n\n", description));
|
||||
}
|
||||
|
||||
if let Some(outline) = data.get("outline") {
|
||||
md.push_str("## 大纲\n\n");
|
||||
if let Some(items) = outline.get("items").and_then(|v| v.as_array()) {
|
||||
for (i, item) in items.iter().enumerate() {
|
||||
if let Some(text) = item.get("title").and_then(|v| v.as_str()) {
|
||||
md.push_str(&format!("{}. {}\n", i + 1, text));
|
||||
}
|
||||
}
|
||||
md.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(scenes) = data.get("scenes").and_then(|v| v.as_array()) {
|
||||
md.push_str("## 场景\n\n");
|
||||
for scene in scenes {
|
||||
if let Some(title) = scene.get("title").and_then(|v| v.as_str()) {
|
||||
md.push_str(&format!("### {}\n\n", title));
|
||||
}
|
||||
if let Some(content) = scene.get("content").and_then(|v| v.as_str()) {
|
||||
md.push_str(&format!("{}\n\n", content));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
md
|
||||
}
|
||||
|
||||
/// Render data to HTML
|
||||
fn render_html(data: &Value) -> String {
|
||||
let mut html = String::from(r#"<!DOCTYPE html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Export</title>
|
||||
<style>
|
||||
body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; max-width: 800px; margin: 0 auto; padding: 20px; }
|
||||
h1 { color: #333; }
|
||||
h2 { color: #555; border-bottom: 1px solid #eee; padding-bottom: 10px; }
|
||||
h3 { color: #666; }
|
||||
.scene { margin: 20px 0; padding: 15px; background: #f9f9f9; border-radius: 8px; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
"#);
|
||||
|
||||
if let Some(title) = data.get("title").and_then(|v| v.as_str()) {
|
||||
html.push_str(&format!("<h1>{}</h1>", title));
|
||||
}
|
||||
|
||||
if let Some(description) = data.get("description").and_then(|v| v.as_str()) {
|
||||
html.push_str(&format!("<p>{}</p>", description));
|
||||
}
|
||||
|
||||
if let Some(outline) = data.get("outline") {
|
||||
html.push_str("<h2>大纲</h2><ol>");
|
||||
if let Some(items) = outline.get("items").and_then(|v| v.as_array()) {
|
||||
for item in items {
|
||||
if let Some(text) = item.get("title").and_then(|v| v.as_str()) {
|
||||
html.push_str(&format!("<li>{}</li>", text));
|
||||
}
|
||||
}
|
||||
}
|
||||
html.push_str("</ol>");
|
||||
}
|
||||
|
||||
if let Some(scenes) = data.get("scenes").and_then(|v| v.as_array()) {
|
||||
html.push_str("<h2>场景</h2>");
|
||||
for scene in scenes {
|
||||
html.push_str("<div class=\"scene\">");
|
||||
if let Some(title) = scene.get("title").and_then(|v| v.as_str()) {
|
||||
html.push_str(&format!("<h3>{}</h3>", title));
|
||||
}
|
||||
if let Some(content) = scene.get("content").and_then(|v| v.as_str()) {
|
||||
html.push_str(&format!("<p>{}</p>", content));
|
||||
}
|
||||
html.push_str("</div>");
|
||||
}
|
||||
}
|
||||
|
||||
html.push_str("</body></html>");
|
||||
html
|
||||
}
|
||||
21
crates/zclaw-pipeline/src/actions/hand.rs
Normal file
21
crates/zclaw-pipeline/src/actions/hand.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
//! Hand execution action
|
||||
|
||||
use std::collections::HashMap;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::ActionError;
|
||||
|
||||
/// Execute a hand action
|
||||
pub async fn execute_hand(
|
||||
hand_id: &str,
|
||||
action: &str,
|
||||
params: HashMap<String, Value>,
|
||||
) -> Result<Value, ActionError> {
|
||||
// This will be implemented by injecting the hand registry
|
||||
// For now, return an error indicating it needs configuration
|
||||
|
||||
Err(ActionError::Hand(format!(
|
||||
"Hand '{}' action '{}' requires hand registry configuration",
|
||||
hand_id, action
|
||||
)))
|
||||
}
|
||||
61
crates/zclaw-pipeline/src/actions/http.rs
Normal file
61
crates/zclaw-pipeline/src/actions/http.rs
Normal file
@@ -0,0 +1,61 @@
|
||||
//! HTTP request action
|
||||
|
||||
use std::collections::HashMap;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::ActionError;
|
||||
|
||||
/// Execute HTTP request
|
||||
pub async fn http_request(
|
||||
url: &str,
|
||||
method: &str,
|
||||
headers: &HashMap<String, String>,
|
||||
body: Option<&Value>,
|
||||
) -> Result<Value, ActionError> {
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.build()
|
||||
.map_err(|e| ActionError::Http(e.to_string()))?;
|
||||
|
||||
let mut request = match method.to_uppercase().as_str() {
|
||||
"GET" => client.get(url),
|
||||
"POST" => client.post(url),
|
||||
"PUT" => client.put(url),
|
||||
"DELETE" => client.delete(url),
|
||||
"PATCH" => client.patch(url),
|
||||
"HEAD" => client.head(url),
|
||||
_ => return Err(ActionError::Http(format!("Unsupported HTTP method: {}", method))),
|
||||
};
|
||||
|
||||
for (key, value) in headers {
|
||||
request = request.header(key, value);
|
||||
}
|
||||
|
||||
if let Some(body) = body {
|
||||
request = request.json(body);
|
||||
}
|
||||
|
||||
let response = request.send()
|
||||
.await
|
||||
.map_err(|e| ActionError::Http(e.to_string()))?;
|
||||
|
||||
let status = response.status();
|
||||
let headers_out: HashMap<String, String> = response.headers()
|
||||
.iter()
|
||||
.filter_map(|(k, v)| Some((k.to_string(), v.to_str().ok()?.to_string())))
|
||||
.collect();
|
||||
|
||||
let body = response.text()
|
||||
.await
|
||||
.map_err(|e| ActionError::Http(e.to_string()))?;
|
||||
|
||||
// Try to parse as JSON, fallback to string
|
||||
let body_value = serde_json::from_str(&body).unwrap_or(Value::String(body));
|
||||
|
||||
Ok(serde_json::json!({
|
||||
"status": status.as_u16(),
|
||||
"status_text": status.canonical_reason().unwrap_or(""),
|
||||
"headers": headers_out,
|
||||
"body": body_value,
|
||||
}))
|
||||
}
|
||||
28
crates/zclaw-pipeline/src/actions/llm.rs
Normal file
28
crates/zclaw-pipeline/src/actions/llm.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
//! LLM generation action
|
||||
|
||||
use std::collections::HashMap;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::ActionError;
|
||||
|
||||
/// Execute LLM generation
|
||||
pub async fn execute_llm_generation(
|
||||
driver: &dyn super::LlmActionDriver,
|
||||
template: &str,
|
||||
input: HashMap<String, Value>,
|
||||
model: Option<String>,
|
||||
temperature: Option<f32>,
|
||||
max_tokens: Option<u32>,
|
||||
json_mode: bool,
|
||||
) -> Result<Value, ActionError> {
|
||||
driver.generate(
|
||||
template.to_string(),
|
||||
input,
|
||||
model,
|
||||
temperature,
|
||||
max_tokens,
|
||||
json_mode,
|
||||
)
|
||||
.await
|
||||
.map_err(ActionError::Llm)
|
||||
}
|
||||
379
crates/zclaw-pipeline/src/actions/mod.rs
Normal file
379
crates/zclaw-pipeline/src/actions/mod.rs
Normal file
@@ -0,0 +1,379 @@
|
||||
//! Pipeline actions module
|
||||
//!
|
||||
//! Built-in actions that can be used in pipelines.
|
||||
|
||||
mod llm;
|
||||
mod parallel;
|
||||
mod render;
|
||||
mod export;
|
||||
mod http;
|
||||
mod skill;
|
||||
mod hand;
|
||||
|
||||
pub use llm::*;
|
||||
pub use parallel::*;
|
||||
pub use render::*;
|
||||
pub use export::*;
|
||||
pub use http::*;
|
||||
pub use skill::*;
|
||||
pub use hand::*;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use serde_json::Value;
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::types::ExportFormat;
|
||||
|
||||
/// Action execution error
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ActionError {
|
||||
#[error("LLM error: {0}")]
|
||||
Llm(String),
|
||||
|
||||
#[error("Skill error: {0}")]
|
||||
Skill(String),
|
||||
|
||||
#[error("Hand error: {0}")]
|
||||
Hand(String),
|
||||
|
||||
#[error("Render error: {0}")]
|
||||
Render(String),
|
||||
|
||||
#[error("Export error: {0}")]
|
||||
Export(String),
|
||||
|
||||
#[error("HTTP error: {0}")]
|
||||
Http(String),
|
||||
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error("JSON error: {0}")]
|
||||
Json(#[from] serde_json::Error),
|
||||
|
||||
#[error("Template not found: {0}")]
|
||||
TemplateNotFound(String),
|
||||
|
||||
#[error("Invalid input: {0}")]
|
||||
InvalidInput(String),
|
||||
}
|
||||
|
||||
/// Action registry - holds references to all action executors
|
||||
pub struct ActionRegistry {
|
||||
/// LLM driver (injected from runtime)
|
||||
llm_driver: Option<Arc<dyn LlmActionDriver>>,
|
||||
|
||||
/// Skill registry (injected from kernel)
|
||||
skill_registry: Option<Arc<dyn SkillActionDriver>>,
|
||||
|
||||
/// Hand registry (injected from kernel)
|
||||
hand_registry: Option<Arc<dyn HandActionDriver>>,
|
||||
|
||||
/// Template directory
|
||||
template_dir: Option<std::path::PathBuf>,
|
||||
}
|
||||
|
||||
impl ActionRegistry {
|
||||
/// Create a new action registry
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
llm_driver: None,
|
||||
skill_registry: None,
|
||||
hand_registry: None,
|
||||
template_dir: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set LLM driver
|
||||
pub fn with_llm_driver(mut self, driver: Arc<dyn LlmActionDriver>) -> Self {
|
||||
self.llm_driver = Some(driver);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set skill registry
|
||||
pub fn with_skill_registry(mut self, registry: Arc<dyn SkillActionDriver>) -> Self {
|
||||
self.skill_registry = Some(registry);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set hand registry
|
||||
pub fn with_hand_registry(mut self, registry: Arc<dyn HandActionDriver>) -> Self {
|
||||
self.hand_registry = Some(registry);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set template directory
|
||||
pub fn with_template_dir(mut self, dir: std::path::PathBuf) -> Self {
|
||||
self.template_dir = Some(dir);
|
||||
self
|
||||
}
|
||||
|
||||
/// Execute LLM generation
|
||||
pub async fn execute_llm(
|
||||
&self,
|
||||
template: &str,
|
||||
input: HashMap<String, Value>,
|
||||
model: Option<String>,
|
||||
temperature: Option<f32>,
|
||||
max_tokens: Option<u32>,
|
||||
json_mode: bool,
|
||||
) -> Result<Value, ActionError> {
|
||||
if let Some(driver) = &self.llm_driver {
|
||||
// Load template if it's a file path
|
||||
let prompt = if template.ends_with(".md") || template.contains('/') {
|
||||
self.load_template(template)?
|
||||
} else {
|
||||
template.to_string()
|
||||
};
|
||||
|
||||
driver.generate(prompt, input, model, temperature, max_tokens, json_mode)
|
||||
.await
|
||||
.map_err(ActionError::Llm)
|
||||
} else {
|
||||
Err(ActionError::Llm("LLM driver not configured".to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute a skill
|
||||
pub async fn execute_skill(
|
||||
&self,
|
||||
skill_id: &str,
|
||||
input: HashMap<String, Value>,
|
||||
) -> Result<Value, ActionError> {
|
||||
if let Some(registry) = &self.skill_registry {
|
||||
registry.execute(skill_id, input)
|
||||
.await
|
||||
.map_err(ActionError::Skill)
|
||||
} else {
|
||||
Err(ActionError::Skill("Skill registry not configured".to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute a hand action
|
||||
pub async fn execute_hand(
|
||||
&self,
|
||||
hand_id: &str,
|
||||
action: &str,
|
||||
params: HashMap<String, Value>,
|
||||
) -> Result<Value, ActionError> {
|
||||
if let Some(registry) = &self.hand_registry {
|
||||
registry.execute(hand_id, action, params)
|
||||
.await
|
||||
.map_err(ActionError::Hand)
|
||||
} else {
|
||||
Err(ActionError::Hand("Hand registry not configured".to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Render classroom
|
||||
pub async fn render_classroom(&self, data: &Value) -> Result<Value, ActionError> {
|
||||
// This will integrate with the classroom renderer
|
||||
// For now, return the data as-is
|
||||
Ok(data.clone())
|
||||
}
|
||||
|
||||
/// Export files
|
||||
pub async fn export_files(
|
||||
&self,
|
||||
formats: &[ExportFormat],
|
||||
data: &Value,
|
||||
output_dir: Option<&str>,
|
||||
) -> Result<Value, ActionError> {
|
||||
let mut paths = Vec::new();
|
||||
|
||||
let dir = output_dir
|
||||
.map(std::path::PathBuf::from)
|
||||
.unwrap_or_else(|| std::env::temp_dir());
|
||||
|
||||
for format in formats {
|
||||
let path = self.export_single(format, data, &dir).await?;
|
||||
paths.push(path);
|
||||
}
|
||||
|
||||
Ok(serde_json::to_value(paths).unwrap_or(Value::Null))
|
||||
}
|
||||
|
||||
async fn export_single(
|
||||
&self,
|
||||
format: &ExportFormat,
|
||||
data: &Value,
|
||||
dir: &std::path::Path,
|
||||
) -> Result<String, ActionError> {
|
||||
let filename = format!("output_{}.{}", chrono::Utc::now().format("%Y%m%d_%H%M%S"), format.extension());
|
||||
let path = dir.join(&filename);
|
||||
|
||||
match format {
|
||||
ExportFormat::Json => {
|
||||
let content = serde_json::to_string_pretty(data)?;
|
||||
tokio::fs::write(&path, content).await?;
|
||||
}
|
||||
ExportFormat::Markdown => {
|
||||
let content = self.render_markdown(data)?;
|
||||
tokio::fs::write(&path, content).await?;
|
||||
}
|
||||
ExportFormat::Html => {
|
||||
let content = self.render_html(data)?;
|
||||
tokio::fs::write(&path, content).await?;
|
||||
}
|
||||
ExportFormat::Pptx => {
|
||||
// Will integrate with pptx exporter
|
||||
return Err(ActionError::Export("PPTX export not yet implemented".to_string()));
|
||||
}
|
||||
ExportFormat::Pdf => {
|
||||
return Err(ActionError::Export("PDF export not yet implemented".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(path.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
/// Make HTTP request
|
||||
pub async fn http_request(
|
||||
&self,
|
||||
url: &str,
|
||||
method: &str,
|
||||
headers: &HashMap<String, String>,
|
||||
body: Option<&Value>,
|
||||
) -> Result<Value, ActionError> {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
let mut request = match method.to_uppercase().as_str() {
|
||||
"GET" => client.get(url),
|
||||
"POST" => client.post(url),
|
||||
"PUT" => client.put(url),
|
||||
"DELETE" => client.delete(url),
|
||||
"PATCH" => client.patch(url),
|
||||
_ => return Err(ActionError::Http(format!("Unsupported HTTP method: {}", method))),
|
||||
};
|
||||
|
||||
for (key, value) in headers {
|
||||
request = request.header(key, value);
|
||||
}
|
||||
|
||||
if let Some(body) = body {
|
||||
request = request.json(body);
|
||||
}
|
||||
|
||||
let response = request.send()
|
||||
.await
|
||||
.map_err(|e| ActionError::Http(e.to_string()))?;
|
||||
|
||||
let status = response.status();
|
||||
let body = response.text()
|
||||
.await
|
||||
.map_err(|e| ActionError::Http(e.to_string()))?;
|
||||
|
||||
Ok(serde_json::json!({
|
||||
"status": status.as_u16(),
|
||||
"body": body,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Load a template file
|
||||
fn load_template(&self, path: &str) -> Result<String, ActionError> {
|
||||
let template_path = if let Some(dir) = &self.template_dir {
|
||||
dir.join(path)
|
||||
} else {
|
||||
std::path::PathBuf::from(path)
|
||||
};
|
||||
|
||||
std::fs::read_to_string(&template_path)
|
||||
.map_err(|_| ActionError::TemplateNotFound(path.to_string()))
|
||||
}
|
||||
|
||||
/// Render data to markdown
|
||||
fn render_markdown(&self, data: &Value) -> Result<String, ActionError> {
|
||||
// Simple markdown rendering
|
||||
let mut md = String::new();
|
||||
|
||||
if let Some(title) = data.get("title").and_then(|v| v.as_str()) {
|
||||
md.push_str(&format!("# {}\n\n", title));
|
||||
}
|
||||
|
||||
if let Some(items) = data.get("items").and_then(|v| v.as_array()) {
|
||||
for item in items {
|
||||
if let Some(text) = item.as_str() {
|
||||
md.push_str(&format!("- {}\n", text));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(md)
|
||||
}
|
||||
|
||||
/// Render data to HTML
|
||||
fn render_html(&self, data: &Value) -> Result<String, ActionError> {
|
||||
let mut html = String::from("<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>Export</title></head><body>");
|
||||
|
||||
if let Some(title) = data.get("title").and_then(|v| v.as_str()) {
|
||||
html.push_str(&format!("<h1>{}</h1>", title));
|
||||
}
|
||||
|
||||
if let Some(items) = data.get("items").and_then(|v| v.as_array()) {
|
||||
html.push_str("<ul>");
|
||||
for item in items {
|
||||
if let Some(text) = item.as_str() {
|
||||
html.push_str(&format!("<li>{}</li>", text));
|
||||
}
|
||||
}
|
||||
html.push_str("</ul>");
|
||||
}
|
||||
|
||||
html.push_str("</body></html>");
|
||||
Ok(html)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExportFormat {
|
||||
fn extension(&self) -> &'static str {
|
||||
match self {
|
||||
ExportFormat::Pptx => "pptx",
|
||||
ExportFormat::Html => "html",
|
||||
ExportFormat::Pdf => "pdf",
|
||||
ExportFormat::Markdown => "md",
|
||||
ExportFormat::Json => "json",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ActionRegistry {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// LLM action driver trait
|
||||
#[async_trait]
|
||||
pub trait LlmActionDriver: Send + Sync {
|
||||
async fn generate(
|
||||
&self,
|
||||
prompt: String,
|
||||
input: HashMap<String, Value>,
|
||||
model: Option<String>,
|
||||
temperature: Option<f32>,
|
||||
max_tokens: Option<u32>,
|
||||
json_mode: bool,
|
||||
) -> Result<Value, String>;
|
||||
}
|
||||
|
||||
/// Skill action driver trait
|
||||
#[async_trait]
|
||||
pub trait SkillActionDriver: Send + Sync {
|
||||
async fn execute(
|
||||
&self,
|
||||
skill_id: &str,
|
||||
input: HashMap<String, Value>,
|
||||
) -> Result<Value, String>;
|
||||
}
|
||||
|
||||
/// Hand action driver trait
|
||||
#[async_trait]
|
||||
pub trait HandActionDriver: Send + Sync {
|
||||
async fn execute(
|
||||
&self,
|
||||
hand_id: &str,
|
||||
action: &str,
|
||||
params: HashMap<String, Value>,
|
||||
) -> Result<Value, String>;
|
||||
}
|
||||
33
crates/zclaw-pipeline/src/actions/parallel.rs
Normal file
33
crates/zclaw-pipeline/src/actions/parallel.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
//! Parallel execution action
|
||||
|
||||
use futures::stream::{self, StreamExt};
|
||||
use serde_json::Value;
|
||||
|
||||
use super::ActionError;
|
||||
|
||||
/// Execute steps in parallel
|
||||
pub async fn execute_parallel<F, Fut>(
|
||||
items: &[Value],
|
||||
max_workers: usize,
|
||||
executor: F,
|
||||
) -> Result<Vec<Value>, ActionError>
|
||||
where
|
||||
F: Fn(Value, usize) -> Fut,
|
||||
Fut: std::future::Future<Output = Result<Value, ActionError>>,
|
||||
{
|
||||
let results: Vec<Result<Value, ActionError>> = stream::iter(items.iter().enumerate())
|
||||
.map(|(index, item)| {
|
||||
let item = item.clone();
|
||||
executor(item, index)
|
||||
})
|
||||
.buffer_unordered(max_workers)
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
let mut outputs = Vec::new();
|
||||
for result in results {
|
||||
outputs.push(result?);
|
||||
}
|
||||
|
||||
Ok(outputs)
|
||||
}
|
||||
32
crates/zclaw-pipeline/src/actions/render.rs
Normal file
32
crates/zclaw-pipeline/src/actions/render.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
//! Classroom render action
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use super::ActionError;
|
||||
|
||||
/// Render classroom data
|
||||
pub async fn render_classroom(data: &Value) -> Result<Value, ActionError> {
|
||||
// This will integrate with the classroom renderer
|
||||
// For now, validate and pass through
|
||||
|
||||
let title = data.get("title")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| ActionError::Render("Missing 'title' field".to_string()))?;
|
||||
|
||||
let outline = data.get("outline")
|
||||
.ok_or_else(|| ActionError::Render("Missing 'outline' field".to_string()))?;
|
||||
|
||||
let scenes = data.get("scenes")
|
||||
.ok_or_else(|| ActionError::Render("Missing 'scenes' field".to_string()))?;
|
||||
|
||||
// Generate classroom ID
|
||||
let classroom_id = uuid::Uuid::new_v4().to_string();
|
||||
|
||||
Ok(serde_json::json!({
|
||||
"id": classroom_id,
|
||||
"title": title,
|
||||
"outline": outline,
|
||||
"scenes": scenes,
|
||||
"preview_url": format!("/classroom/{}", classroom_id),
|
||||
}))
|
||||
}
|
||||
20
crates/zclaw-pipeline/src/actions/skill.rs
Normal file
20
crates/zclaw-pipeline/src/actions/skill.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
//! Skill execution action
|
||||
|
||||
use std::collections::HashMap;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::ActionError;
|
||||
|
||||
/// Execute a skill by ID
|
||||
pub async fn execute_skill(
|
||||
skill_id: &str,
|
||||
input: HashMap<String, Value>,
|
||||
) -> Result<Value, ActionError> {
|
||||
// This will be implemented by injecting the skill registry
|
||||
// For now, return an error indicating it needs configuration
|
||||
|
||||
Err(ActionError::Skill(format!(
|
||||
"Skill '{}' execution requires skill registry configuration",
|
||||
skill_id
|
||||
)))
|
||||
}
|
||||
428
crates/zclaw-pipeline/src/executor.rs
Normal file
428
crates/zclaw-pipeline/src/executor.rs
Normal file
@@ -0,0 +1,428 @@
|
||||
//! Pipeline Executor
|
||||
//!
|
||||
//! Executes pipelines step by step, managing state and calling actions.
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
use tokio::sync::RwLock;
|
||||
use serde_json::Value;
|
||||
use uuid::Uuid;
|
||||
use chrono::Utc;
|
||||
use futures::stream::{self, StreamExt};
|
||||
use futures::future::{BoxFuture, FutureExt};
|
||||
|
||||
use crate::types::{Pipeline, PipelineRun, PipelineProgress, RunStatus, PipelineStep, Action};
|
||||
use crate::state::{ExecutionContext, StateError};
|
||||
use crate::actions::ActionRegistry;
|
||||
|
||||
/// Pipeline execution errors
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ExecuteError {
|
||||
#[error("State error: {0}")]
|
||||
State(#[from] StateError),
|
||||
|
||||
#[error("Action error: {0}")]
|
||||
Action(String),
|
||||
|
||||
#[error("Step not found: {0}")]
|
||||
StepNotFound(String),
|
||||
|
||||
#[error("Timeout exceeded")]
|
||||
Timeout,
|
||||
|
||||
#[error("Cancelled")]
|
||||
Cancelled,
|
||||
|
||||
#[error("Condition not met: {0}")]
|
||||
ConditionNotMet(String),
|
||||
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
/// Pipeline executor
|
||||
pub struct PipelineExecutor {
|
||||
/// Action registry
|
||||
action_registry: Arc<ActionRegistry>,
|
||||
|
||||
/// Active runs (run_id -> run state)
|
||||
runs: RwLock<HashMap<String, PipelineRun>>,
|
||||
|
||||
/// Cancellation flags
|
||||
cancellations: RwLock<HashMap<String, bool>>,
|
||||
}
|
||||
|
||||
impl PipelineExecutor {
|
||||
/// Create a new executor
|
||||
pub fn new(action_registry: Arc<ActionRegistry>) -> Self {
|
||||
Self {
|
||||
action_registry,
|
||||
runs: RwLock::new(HashMap::new()),
|
||||
cancellations: RwLock::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute a pipeline
|
||||
pub async fn execute(
|
||||
&self,
|
||||
pipeline: &Pipeline,
|
||||
inputs: HashMap<String, Value>,
|
||||
) -> Result<PipelineRun, ExecuteError> {
|
||||
let run_id = Uuid::new_v4().to_string();
|
||||
let pipeline_id = pipeline.metadata.name.clone();
|
||||
|
||||
// Create run record
|
||||
let run = PipelineRun {
|
||||
id: run_id.clone(),
|
||||
pipeline_id: pipeline_id.clone(),
|
||||
status: RunStatus::Running,
|
||||
inputs: serde_json::to_value(&inputs).unwrap_or(Value::Null),
|
||||
current_step: None,
|
||||
step_results: HashMap::new(),
|
||||
outputs: None,
|
||||
error: None,
|
||||
started_at: Utc::now(),
|
||||
ended_at: None,
|
||||
};
|
||||
|
||||
// Store run
|
||||
self.runs.write().await.insert(run_id.clone(), run);
|
||||
|
||||
// Create execution context
|
||||
let mut context = ExecutionContext::new(inputs);
|
||||
|
||||
// Execute steps
|
||||
let result = self.execute_steps(pipeline, &mut context, &run_id).await;
|
||||
|
||||
// Update run state
|
||||
let mut runs = self.runs.write().await;
|
||||
if let Some(run) = runs.get_mut(&run_id) {
|
||||
match result {
|
||||
Ok(outputs) => {
|
||||
run.status = RunStatus::Completed;
|
||||
run.outputs = Some(serde_json::to_value(&outputs).unwrap_or(Value::Null));
|
||||
}
|
||||
Err(e) => {
|
||||
run.status = RunStatus::Failed;
|
||||
run.error = Some(e.to_string());
|
||||
}
|
||||
}
|
||||
run.ended_at = Some(Utc::now());
|
||||
return Ok(run.clone());
|
||||
}
|
||||
|
||||
Err(ExecuteError::Action("Run not found after execution".to_string()))
|
||||
}
|
||||
|
||||
/// Execute pipeline steps
|
||||
async fn execute_steps(
|
||||
&self,
|
||||
pipeline: &Pipeline,
|
||||
context: &mut ExecutionContext,
|
||||
run_id: &str,
|
||||
) -> Result<HashMap<String, Value>, ExecuteError> {
|
||||
let total_steps = pipeline.spec.steps.len();
|
||||
|
||||
for (idx, step) in pipeline.spec.steps.iter().enumerate() {
|
||||
// Check cancellation
|
||||
if *self.cancellations.read().await.get(run_id).unwrap_or(&false) {
|
||||
return Err(ExecuteError::Cancelled);
|
||||
}
|
||||
|
||||
// Update current step
|
||||
if let Some(run) = self.runs.write().await.get_mut(run_id) {
|
||||
run.current_step = Some(step.id.clone());
|
||||
}
|
||||
|
||||
// Check condition
|
||||
if let Some(condition) = &step.when {
|
||||
let should_execute = self.evaluate_condition(condition, context)?;
|
||||
if !should_execute {
|
||||
tracing::info!("Skipping step {} (condition not met)", step.id);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::info!("Executing step {} ({}/{})", step.id, idx + 1, total_steps);
|
||||
|
||||
// Execute action
|
||||
let result = self.execute_action(&step.action, context).await?;
|
||||
|
||||
// Store result
|
||||
context.set_output(&step.id, result.clone());
|
||||
|
||||
// Update step results in run
|
||||
if let Some(run) = self.runs.write().await.get_mut(run_id) {
|
||||
run.step_results.insert(step.id.clone(), result);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract outputs
|
||||
Ok(context.extract_outputs(&pipeline.spec.outputs)
|
||||
.map_err(ExecuteError::State)?)
|
||||
}
|
||||
|
||||
/// Execute a single action (returns BoxFuture for recursion support)
|
||||
fn execute_action<'a>(
|
||||
&'a self,
|
||||
action: &'a Action,
|
||||
context: &'a mut ExecutionContext,
|
||||
) -> BoxFuture<'a, Result<Value, ExecuteError>> {
|
||||
async move {
|
||||
match action {
|
||||
Action::LlmGenerate { template, input, model, temperature, max_tokens, json_mode } => {
|
||||
let resolved_input = context.resolve_map(input)?;
|
||||
self.action_registry.execute_llm(
|
||||
template,
|
||||
resolved_input,
|
||||
model.clone(),
|
||||
*temperature,
|
||||
*max_tokens,
|
||||
*json_mode,
|
||||
).await.map_err(|e| ExecuteError::Action(e.to_string()))
|
||||
}
|
||||
|
||||
Action::Parallel { each, step, max_workers } => {
|
||||
let items = context.resolve(each)?;
|
||||
let items_array = items.as_array()
|
||||
.ok_or_else(|| ExecuteError::Action("Parallel 'each' must resolve to an array".to_string()))?;
|
||||
|
||||
let workers = max_workers.unwrap_or(4);
|
||||
let results = self.execute_parallel(step, items_array.clone(), workers).await?;
|
||||
|
||||
Ok(Value::Array(results))
|
||||
}
|
||||
|
||||
Action::Sequential { steps } => {
|
||||
let mut last_result = Value::Null;
|
||||
for step in steps {
|
||||
last_result = self.execute_action(&step.action, context).await?;
|
||||
context.set_output(&step.id, last_result.clone());
|
||||
}
|
||||
Ok(last_result)
|
||||
}
|
||||
|
||||
Action::Condition { branches, default, .. } => {
|
||||
for branch in branches {
|
||||
if self.evaluate_condition(&branch.when, context)? {
|
||||
return self.execute_action(&branch.then.action, context).await;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(default_step) = default {
|
||||
return self.execute_action(&default_step.action, context).await;
|
||||
}
|
||||
|
||||
Ok(Value::Null)
|
||||
}
|
||||
|
||||
Action::Skill { skill_id, input } => {
|
||||
let resolved_input = context.resolve_map(input)?;
|
||||
self.action_registry.execute_skill(skill_id, resolved_input)
|
||||
.await
|
||||
.map_err(|e| ExecuteError::Action(e.to_string()))
|
||||
}
|
||||
|
||||
Action::Hand { hand_id, hand_action, params } => {
|
||||
let resolved_params = context.resolve_map(params)?;
|
||||
self.action_registry.execute_hand(hand_id, hand_action, resolved_params)
|
||||
.await
|
||||
.map_err(|e| ExecuteError::Action(e.to_string()))
|
||||
}
|
||||
|
||||
Action::ClassroomRender { input } => {
|
||||
let data = context.resolve(input)?;
|
||||
self.action_registry.render_classroom(&data)
|
||||
.await
|
||||
.map_err(|e| ExecuteError::Action(e.to_string()))
|
||||
}
|
||||
|
||||
Action::FileExport { formats, input, output_dir } => {
|
||||
let data = context.resolve(input)?;
|
||||
let dir = match output_dir {
|
||||
Some(s) => {
|
||||
let resolved = context.resolve(s)?;
|
||||
resolved.as_str().map(|s| s.to_string())
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
self.action_registry.export_files(formats, &data, dir.as_deref())
|
||||
.await
|
||||
.map_err(|e| ExecuteError::Action(e.to_string()))
|
||||
}
|
||||
|
||||
Action::HttpRequest { url, method, headers, body } => {
|
||||
let resolved_url = context.resolve(url)?;
|
||||
let url_str = resolved_url.as_str()
|
||||
.ok_or_else(|| ExecuteError::Action("URL must be a string".to_string()))?;
|
||||
|
||||
let resolved_body = match body {
|
||||
Some(b) => Some(context.resolve(b)?),
|
||||
None => None,
|
||||
};
|
||||
|
||||
self.action_registry.http_request(
|
||||
url_str,
|
||||
method,
|
||||
headers,
|
||||
resolved_body.as_ref(),
|
||||
).await
|
||||
.map_err(|e| ExecuteError::Action(e.to_string()))
|
||||
}
|
||||
|
||||
Action::SetVar { name, value } => {
|
||||
let resolved = context.resolve(value)?;
|
||||
context.set_var(name, resolved.clone());
|
||||
Ok(resolved)
|
||||
}
|
||||
|
||||
Action::Delay { ms } => {
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(*ms)).await;
|
||||
Ok(Value::Null)
|
||||
}
|
||||
}
|
||||
}.boxed()
|
||||
}
|
||||
|
||||
/// Execute parallel steps
|
||||
async fn execute_parallel(
|
||||
&self,
|
||||
step: &PipelineStep,
|
||||
items: Vec<Value>,
|
||||
max_workers: usize,
|
||||
) -> Result<Vec<Value>, ExecuteError> {
|
||||
let action_registry = self.action_registry.clone();
|
||||
let action = step.action.clone();
|
||||
|
||||
let results: Vec<Result<Value, ExecuteError>> = stream::iter(items.into_iter().enumerate())
|
||||
.map(|(index, item)| {
|
||||
let action_registry = action_registry.clone();
|
||||
let action = action.clone();
|
||||
|
||||
async move {
|
||||
// Create child context with loop variables
|
||||
let mut child_ctx = ExecutionContext::new(HashMap::new());
|
||||
child_ctx.set_loop_context(item, index);
|
||||
|
||||
// Execute the step's action
|
||||
let executor = PipelineExecutor::new(action_registry);
|
||||
executor.execute_action(&action, &mut child_ctx).await
|
||||
}
|
||||
})
|
||||
.buffer_unordered(max_workers)
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
let mut outputs = Vec::new();
|
||||
for result in results {
|
||||
outputs.push(result?);
|
||||
}
|
||||
|
||||
Ok(outputs)
|
||||
}
|
||||
|
||||
/// Evaluate a condition expression
|
||||
fn evaluate_condition(&self, condition: &str, context: &ExecutionContext) -> Result<bool, ExecuteError> {
|
||||
let resolved = context.resolve(condition)?;
|
||||
|
||||
// If resolved to a boolean, return it
|
||||
if let Value::Bool(b) = resolved {
|
||||
return Ok(b);
|
||||
}
|
||||
|
||||
// Check for comparison operators
|
||||
let condition = condition.trim();
|
||||
|
||||
// Equality check
|
||||
if let Some(eq_pos) = condition.find("==") {
|
||||
let left = condition[..eq_pos].trim();
|
||||
let right = condition[eq_pos + 2..].trim();
|
||||
|
||||
let left_val = context.resolve(left)?;
|
||||
let right_val = context.resolve(right)?;
|
||||
|
||||
return Ok(left_val == right_val);
|
||||
}
|
||||
|
||||
// Inequality check
|
||||
if let Some(ne_pos) = condition.find("!=") {
|
||||
let left = condition[..ne_pos].trim();
|
||||
let right = condition[ne_pos + 2..].trim();
|
||||
|
||||
let left_val = context.resolve(left)?;
|
||||
let right_val = context.resolve(right)?;
|
||||
|
||||
return Ok(left_val != right_val);
|
||||
}
|
||||
|
||||
// Default: treat as truthy check
|
||||
Ok(!resolved.is_null())
|
||||
}
|
||||
|
||||
/// Get run status
|
||||
pub async fn get_run(&self, run_id: &str) -> Option<PipelineRun> {
|
||||
self.runs.read().await.get(run_id).cloned()
|
||||
}
|
||||
|
||||
/// Get run progress
|
||||
pub async fn get_progress(&self, run_id: &str) -> Option<PipelineProgress> {
|
||||
let run = self.runs.read().await.get(run_id)?.clone();
|
||||
|
||||
let (current_step, percentage) = if run.step_results.is_empty() {
|
||||
("starting".to_string(), 0)
|
||||
} else if let Some(step) = &run.current_step {
|
||||
(step.clone(), 50)
|
||||
} else {
|
||||
("completed".to_string(), 100)
|
||||
};
|
||||
|
||||
Some(PipelineProgress {
|
||||
run_id: run.id,
|
||||
current_step,
|
||||
message: run.current_step.clone().unwrap_or_default(),
|
||||
percentage,
|
||||
status: run.status,
|
||||
})
|
||||
}
|
||||
|
||||
/// Cancel a run
|
||||
pub async fn cancel(&self, run_id: &str) {
|
||||
self.cancellations.write().await.insert(run_id.to_string(), true);
|
||||
}
|
||||
|
||||
/// List all runs
|
||||
pub async fn list_runs(&self) -> Vec<PipelineRun> {
|
||||
self.runs.read().await.values().cloned().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn test_evaluate_condition_bool() {
|
||||
let registry = Arc::new(ActionRegistry::new());
|
||||
let executor = PipelineExecutor::new(registry);
|
||||
let ctx = ExecutionContext::new(HashMap::new());
|
||||
|
||||
assert!(executor.evaluate_condition("true", &ctx).unwrap());
|
||||
assert!(!executor.evaluate_condition("false", &ctx).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_evaluate_condition_equality() {
|
||||
let registry = Arc::new(ActionRegistry::new());
|
||||
let executor = PipelineExecutor::new(registry);
|
||||
let ctx = ExecutionContext::new(
|
||||
vec![("type".to_string(), json!("video"))]
|
||||
.into_iter()
|
||||
.collect()
|
||||
);
|
||||
|
||||
assert!(executor.evaluate_condition("${inputs.type} == 'video'", &ctx).unwrap());
|
||||
assert!(!executor.evaluate_condition("${inputs.type} == 'text'", &ctx).unwrap());
|
||||
}
|
||||
}
|
||||
56
crates/zclaw-pipeline/src/lib.rs
Normal file
56
crates/zclaw-pipeline/src/lib.rs
Normal file
@@ -0,0 +1,56 @@
|
||||
//! ZCLAW Pipeline Engine
|
||||
//!
|
||||
//! Declarative pipeline system for multi-step automation workflows.
|
||||
//! Pipelines orchestrate Skills and Hands to accomplish complex tasks.
|
||||
//!
|
||||
//! # Architecture
|
||||
//!
|
||||
//! ```text
|
||||
//! Pipeline YAML → Parser → Pipeline struct → Executor → Output
|
||||
//! ↓
|
||||
//! ExecutionContext (state)
|
||||
//! ```
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
//! ```yaml
|
||||
//! apiVersion: zclaw/v1
|
||||
//! kind: Pipeline
|
||||
//! metadata:
|
||||
//! name: classroom-generator
|
||||
//! displayName: 互动课堂生成器
|
||||
//! category: education
|
||||
//! spec:
|
||||
//! inputs:
|
||||
//! - name: topic
|
||||
//! type: string
|
||||
//! required: true
|
||||
//! steps:
|
||||
//! - id: parse
|
||||
//! action: llm.generate
|
||||
//! template: skills/classroom/parse.md
|
||||
//! output: parsed
|
||||
//! - id: render
|
||||
//! action: classroom.render
|
||||
//! input: ${steps.parse.output}
|
||||
//! output: result
|
||||
//! outputs:
|
||||
//! classroom_id: ${steps.render.output.id}
|
||||
//! ```
|
||||
|
||||
pub mod types;
|
||||
pub mod parser;
|
||||
pub mod state;
|
||||
pub mod executor;
|
||||
pub mod actions;
|
||||
|
||||
pub use types::*;
|
||||
pub use parser::*;
|
||||
pub use state::*;
|
||||
pub use executor::*;
|
||||
pub use actions::ActionRegistry;
|
||||
|
||||
/// Convenience function to parse pipeline YAML
|
||||
pub fn parse_pipeline_yaml(yaml: &str) -> Result<Pipeline, parser::ParseError> {
|
||||
parser::PipelineParser::parse(yaml)
|
||||
}
|
||||
211
crates/zclaw-pipeline/src/parser.rs
Normal file
211
crates/zclaw-pipeline/src/parser.rs
Normal file
@@ -0,0 +1,211 @@
|
||||
//! Pipeline DSL Parser
|
||||
//!
|
||||
//! Parses YAML pipeline definitions into Pipeline structs.
|
||||
|
||||
use std::path::Path;
|
||||
use serde_yaml;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::types::{Pipeline, API_VERSION};
|
||||
|
||||
/// Parser errors
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ParseError {
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error("YAML parse error: {0}")]
|
||||
Yaml(#[from] serde_yaml::Error),
|
||||
|
||||
#[error("Invalid API version: expected '{expected}', got '{actual}'")]
|
||||
InvalidVersion { expected: String, actual: String },
|
||||
|
||||
#[error("Invalid kind: expected 'Pipeline', got '{0}'")]
|
||||
InvalidKind(String),
|
||||
|
||||
#[error("Missing required field: {0}")]
|
||||
MissingField(String),
|
||||
|
||||
#[error("Invalid action type: {0}")]
|
||||
InvalidAction(String),
|
||||
|
||||
#[error("Validation error: {0}")]
|
||||
Validation(String),
|
||||
}
|
||||
|
||||
/// Pipeline parser
|
||||
pub struct PipelineParser;
|
||||
|
||||
impl PipelineParser {
|
||||
/// Parse a pipeline from YAML string
|
||||
pub fn parse(yaml: &str) -> Result<Pipeline, ParseError> {
|
||||
let pipeline: Pipeline = serde_yaml::from_str(yaml)?;
|
||||
|
||||
// Validate API version
|
||||
if pipeline.api_version != API_VERSION {
|
||||
return Err(ParseError::InvalidVersion {
|
||||
expected: API_VERSION.to_string(),
|
||||
actual: pipeline.api_version.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
// Validate kind
|
||||
if pipeline.kind != "Pipeline" {
|
||||
return Err(ParseError::InvalidKind(pipeline.kind.clone()));
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
if pipeline.metadata.name.is_empty() {
|
||||
return Err(ParseError::MissingField("metadata.name".to_string()));
|
||||
}
|
||||
|
||||
if pipeline.spec.steps.is_empty() {
|
||||
return Err(ParseError::Validation("Pipeline must have at least one step".to_string()));
|
||||
}
|
||||
|
||||
// Validate step IDs are unique
|
||||
let mut seen_ids = std::collections::HashSet::new();
|
||||
for step in &pipeline.spec.steps {
|
||||
if !seen_ids.insert(&step.id) {
|
||||
return Err(ParseError::Validation(
|
||||
format!("Duplicate step ID: {}", step.id)
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Validate input names are unique
|
||||
let mut seen_inputs = std::collections::HashSet::new();
|
||||
for input in &pipeline.spec.inputs {
|
||||
if !seen_inputs.insert(&input.name) {
|
||||
return Err(ParseError::Validation(
|
||||
format!("Duplicate input name: {}", input.name)
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
/// Parse a pipeline from file
|
||||
pub fn parse_file(path: &Path) -> Result<Pipeline, ParseError> {
|
||||
let content = std::fs::read_to_string(path)?;
|
||||
Self::parse(&content)
|
||||
}
|
||||
|
||||
/// Parse and validate all pipelines in a directory
|
||||
pub fn parse_directory(dir: &Path) -> Result<Vec<(String, Pipeline)>, ParseError> {
|
||||
let mut pipelines = Vec::new();
|
||||
|
||||
if !dir.exists() {
|
||||
return Ok(pipelines);
|
||||
}
|
||||
|
||||
for entry in std::fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.extension().map(|e| e == "yaml" || e == "yml").unwrap_or(false) {
|
||||
match Self::parse_file(&path) {
|
||||
Ok(pipeline) => {
|
||||
let filename = path.file_stem()
|
||||
.map(|s| s.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
pipelines.push((filename, pipeline));
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("Failed to parse pipeline {:?}: {}", path, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(pipelines)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_valid_pipeline() {
|
||||
let yaml = r#"
|
||||
apiVersion: zclaw/v1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: test-pipeline
|
||||
spec:
|
||||
steps:
|
||||
- id: step1
|
||||
action:
|
||||
type: llm_generate
|
||||
template: "test"
|
||||
"#;
|
||||
let pipeline = PipelineParser::parse(yaml).unwrap();
|
||||
assert_eq!(pipeline.metadata.name, "test-pipeline");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_invalid_version() {
|
||||
let yaml = r#"
|
||||
apiVersion: invalid/v1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: test
|
||||
spec:
|
||||
steps: []
|
||||
"#;
|
||||
let result = PipelineParser::parse(yaml);
|
||||
assert!(matches!(result, Err(ParseError::InvalidVersion { .. })));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_invalid_kind() {
|
||||
let yaml = r#"
|
||||
apiVersion: zclaw/v1
|
||||
kind: NotPipeline
|
||||
metadata:
|
||||
name: test
|
||||
spec:
|
||||
steps: []
|
||||
"#;
|
||||
let result = PipelineParser::parse(yaml);
|
||||
assert!(matches!(result, Err(ParseError::InvalidKind(_))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_empty_steps() {
|
||||
let yaml = r#"
|
||||
apiVersion: zclaw/v1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: test
|
||||
spec:
|
||||
steps: []
|
||||
"#;
|
||||
let result = PipelineParser::parse(yaml);
|
||||
assert!(matches!(result, Err(ParseError::Validation(_))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_duplicate_step_ids() {
|
||||
let yaml = r#"
|
||||
apiVersion: zclaw/v1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: test
|
||||
spec:
|
||||
steps:
|
||||
- id: step1
|
||||
action:
|
||||
type: llm_generate
|
||||
template: "test"
|
||||
- id: step1
|
||||
action:
|
||||
type: llm_generate
|
||||
template: "test2"
|
||||
"#;
|
||||
let result = PipelineParser::parse(yaml);
|
||||
assert!(matches!(result, Err(ParseError::Validation(_))));
|
||||
}
|
||||
}
|
||||
377
crates/zclaw-pipeline/src/state.rs
Normal file
377
crates/zclaw-pipeline/src/state.rs
Normal file
@@ -0,0 +1,377 @@
|
||||
//! Pipeline execution state management
|
||||
//!
|
||||
//! Manages state during pipeline execution, including:
|
||||
//! - Input parameters
|
||||
//! - Step outputs
|
||||
//! - Loop variables (item, index)
|
||||
//! - Custom variables
|
||||
|
||||
use std::collections::HashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use regex::Regex;
|
||||
|
||||
/// Execution context for a running pipeline
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExecutionContext {
|
||||
/// Pipeline input values
|
||||
inputs: HashMap<String, Value>,
|
||||
|
||||
/// Step outputs (step_id -> output)
|
||||
steps_output: HashMap<String, Value>,
|
||||
|
||||
/// Custom variables (set by set_var action)
|
||||
variables: HashMap<String, Value>,
|
||||
|
||||
/// Loop context (item, index for parallel/each)
|
||||
loop_context: Option<LoopContext>,
|
||||
|
||||
/// Expression parser
|
||||
expr_regex: Regex,
|
||||
}
|
||||
|
||||
/// Loop context for parallel/each iterations
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LoopContext {
|
||||
/// Current item
|
||||
pub item: Value,
|
||||
/// Current index
|
||||
pub index: usize,
|
||||
/// Parent loop context (for nested loops)
|
||||
pub parent: Option<Box<LoopContext>>,
|
||||
}
|
||||
|
||||
impl ExecutionContext {
|
||||
/// Create a new execution context with inputs
|
||||
pub fn new(inputs: HashMap<String, Value>) -> Self {
|
||||
Self {
|
||||
inputs,
|
||||
steps_output: HashMap::new(),
|
||||
variables: HashMap::new(),
|
||||
loop_context: None,
|
||||
expr_regex: Regex::new(r"\$\{([^}]+)\}").unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create from JSON value
|
||||
pub fn from_value(inputs: Value) -> Self {
|
||||
let inputs_map = if let Value::Object(obj) = inputs {
|
||||
obj.into_iter().collect()
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
Self::new(inputs_map)
|
||||
}
|
||||
|
||||
/// Get an input value
|
||||
pub fn get_input(&self, name: &str) -> Option<&Value> {
|
||||
self.inputs.get(name)
|
||||
}
|
||||
|
||||
/// Set a step output
|
||||
pub fn set_output(&mut self, step_id: &str, value: Value) {
|
||||
self.steps_output.insert(step_id.to_string(), value);
|
||||
}
|
||||
|
||||
/// Get a step output
|
||||
pub fn get_output(&self, step_id: &str) -> Option<&Value> {
|
||||
self.steps_output.get(step_id)
|
||||
}
|
||||
|
||||
/// Set a variable
|
||||
pub fn set_var(&mut self, name: &str, value: Value) {
|
||||
self.variables.insert(name.to_string(), value);
|
||||
}
|
||||
|
||||
/// Get a variable
|
||||
pub fn get_var(&self, name: &str) -> Option<&Value> {
|
||||
self.variables.get(name)
|
||||
}
|
||||
|
||||
/// Set loop context
|
||||
pub fn set_loop_context(&mut self, item: Value, index: usize) {
|
||||
self.loop_context = Some(LoopContext {
|
||||
item,
|
||||
index,
|
||||
parent: self.loop_context.take().map(Box::new),
|
||||
});
|
||||
}
|
||||
|
||||
/// Clear loop context
|
||||
pub fn clear_loop_context(&mut self) {
|
||||
if let Some(ctx) = self.loop_context.take() {
|
||||
self.loop_context = ctx.parent.map(|b| *b);
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve an expression to a value
|
||||
///
|
||||
/// Supported expressions:
|
||||
/// - `${inputs.topic}` - Input parameter
|
||||
/// - `${steps.step_id.output}` - Step output
|
||||
/// - `${steps.step_id.output.field}` - Nested field access
|
||||
/// - `${item}` - Current loop item
|
||||
/// - `${index}` - Current loop index
|
||||
/// - `${var.name}` - Custom variable
|
||||
pub fn resolve(&self, expr: &str) -> Result<Value, StateError> {
|
||||
// If not an expression, return as-is
|
||||
if !expr.contains("${") {
|
||||
return Ok(Value::String(expr.to_string()));
|
||||
}
|
||||
|
||||
// Replace all expressions
|
||||
let result = self.expr_regex.replace_all(expr, |caps: ®ex::Captures| {
|
||||
let path = &caps[1];
|
||||
match self.resolve_path(path) {
|
||||
Ok(value) => value_to_string(&value),
|
||||
Err(_) => caps[0].to_string(), // Keep original if not found
|
||||
}
|
||||
});
|
||||
|
||||
// If the result is a valid JSON value, parse it
|
||||
if result.starts_with('{') || result.starts_with('[') || result.starts_with('"') {
|
||||
if let Ok(value) = serde_json::from_str(&result) {
|
||||
return Ok(value);
|
||||
}
|
||||
}
|
||||
|
||||
// If the entire string was an expression, try to return the actual value
|
||||
if expr.starts_with("${") && expr.ends_with("}") {
|
||||
let path = &expr[2..expr.len()-1];
|
||||
return self.resolve_path(path);
|
||||
}
|
||||
|
||||
Ok(Value::String(result.to_string()))
|
||||
}
|
||||
|
||||
/// Resolve a path like "inputs.topic" or "steps.step1.output.field"
|
||||
fn resolve_path(&self, path: &str) -> Result<Value, StateError> {
|
||||
let parts: Vec<&str> = path.split('.').collect();
|
||||
if parts.is_empty() {
|
||||
return Err(StateError::InvalidPath(path.to_string()));
|
||||
}
|
||||
|
||||
let first = parts[0];
|
||||
let rest = &parts[1..];
|
||||
|
||||
match first {
|
||||
"inputs" => self.resolve_from_map(&self.inputs, rest, path),
|
||||
"steps" => self.resolve_from_map(&self.steps_output, rest, path),
|
||||
"vars" | "var" => self.resolve_from_map(&self.variables, rest, path),
|
||||
"item" => {
|
||||
if let Some(ctx) = &self.loop_context {
|
||||
if rest.is_empty() {
|
||||
Ok(ctx.item.clone())
|
||||
} else {
|
||||
self.resolve_from_value(&ctx.item, rest, path)
|
||||
}
|
||||
} else {
|
||||
Err(StateError::VariableNotFound("item".to_string()))
|
||||
}
|
||||
}
|
||||
"index" => {
|
||||
if let Some(ctx) = &self.loop_context {
|
||||
Ok(Value::Number(ctx.index.into()))
|
||||
} else {
|
||||
Err(StateError::VariableNotFound("index".to_string()))
|
||||
}
|
||||
}
|
||||
_ => Err(StateError::InvalidPath(path.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a path from a map
|
||||
fn resolve_from_map(
|
||||
&self,
|
||||
map: &HashMap<String, Value>,
|
||||
path_parts: &[&str],
|
||||
full_path: &str,
|
||||
) -> Result<Value, StateError> {
|
||||
if path_parts.is_empty() {
|
||||
return Err(StateError::InvalidPath(full_path.to_string()));
|
||||
}
|
||||
|
||||
let key = path_parts[0];
|
||||
let value = map.get(key)
|
||||
.ok_or_else(|| StateError::VariableNotFound(key.to_string()))?;
|
||||
|
||||
if path_parts.len() == 1 {
|
||||
Ok(value.clone())
|
||||
} else {
|
||||
self.resolve_from_value(value, &path_parts[1..], full_path)
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a path from a value (nested access)
|
||||
fn resolve_from_value(
|
||||
&self,
|
||||
value: &Value,
|
||||
path_parts: &[&str],
|
||||
full_path: &str,
|
||||
) -> Result<Value, StateError> {
|
||||
let mut current = value;
|
||||
|
||||
for part in path_parts {
|
||||
current = match current {
|
||||
Value::Object(map) => map.get(*part)
|
||||
.ok_or_else(|| StateError::FieldNotFound(part.to_string()))?,
|
||||
Value::Array(arr) => {
|
||||
// Try to parse as index
|
||||
if let Ok(idx) = part.parse::<usize>() {
|
||||
arr.get(idx)
|
||||
.ok_or_else(|| StateError::IndexOutOfBounds(idx))?
|
||||
} else {
|
||||
return Err(StateError::InvalidPath(full_path.to_string()));
|
||||
}
|
||||
}
|
||||
_ => return Err(StateError::InvalidPath(full_path.to_string())),
|
||||
};
|
||||
}
|
||||
|
||||
Ok(current.clone())
|
||||
}
|
||||
|
||||
/// Resolve multiple expressions in a map
|
||||
pub fn resolve_map(&self, input: &HashMap<String, String>) -> Result<HashMap<String, Value>, StateError> {
|
||||
let mut result = HashMap::new();
|
||||
for (key, expr) in input {
|
||||
let value = self.resolve(expr)?;
|
||||
result.insert(key.clone(), value);
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Get all step outputs
|
||||
pub fn all_outputs(&self) -> &HashMap<String, Value> {
|
||||
&self.steps_output
|
||||
}
|
||||
|
||||
/// Extract final outputs from the context
|
||||
pub fn extract_outputs(&self, output_defs: &HashMap<String, String>) -> Result<HashMap<String, Value>, StateError> {
|
||||
let mut outputs = HashMap::new();
|
||||
for (name, expr) in output_defs {
|
||||
let value = self.resolve(expr)?;
|
||||
outputs.insert(name.clone(), value);
|
||||
}
|
||||
Ok(outputs)
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a value to string for template replacement
|
||||
fn value_to_string(value: &Value) -> String {
|
||||
match value {
|
||||
Value::String(s) => s.clone(),
|
||||
Value::Number(n) => n.to_string(),
|
||||
Value::Bool(b) => b.to_string(),
|
||||
Value::Null => String::new(),
|
||||
Value::Array(arr) => {
|
||||
serde_json::to_string(arr).unwrap_or_default()
|
||||
}
|
||||
Value::Object(obj) => {
|
||||
serde_json::to_string(obj).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// State errors
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum StateError {
|
||||
#[error("Invalid path: {0}")]
|
||||
InvalidPath(String),
|
||||
|
||||
#[error("Variable not found: {0}")]
|
||||
VariableNotFound(String),
|
||||
|
||||
#[error("Field not found: {0}")]
|
||||
FieldNotFound(String),
|
||||
|
||||
#[error("Index out of bounds: {0}")]
|
||||
IndexOutOfBounds(usize),
|
||||
|
||||
#[error("Type error: {0}")]
|
||||
TypeError(String),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn test_resolve_input() {
|
||||
let ctx = ExecutionContext::new(
|
||||
vec![("topic".to_string(), json!("physics"))]
|
||||
.into_iter()
|
||||
.collect()
|
||||
);
|
||||
|
||||
let result = ctx.resolve("${inputs.topic}").unwrap();
|
||||
assert_eq!(result, json!("physics"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_step_output() {
|
||||
let mut ctx = ExecutionContext::new(HashMap::new());
|
||||
ctx.set_output("step1", json!({"result": "hello", "count": 42}));
|
||||
|
||||
let result = ctx.resolve("${steps.step1.output.result}").unwrap();
|
||||
assert_eq!(result, json!("hello"));
|
||||
|
||||
let count = ctx.resolve("${steps.step1.output.count}").unwrap();
|
||||
assert_eq!(count, json!(42));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_loop_context() {
|
||||
let mut ctx = ExecutionContext::new(HashMap::new());
|
||||
ctx.set_loop_context(json!({"name": "item1"}), 2);
|
||||
|
||||
let item = ctx.resolve("${item}").unwrap();
|
||||
assert_eq!(item, json!({"name": "item1"}));
|
||||
|
||||
let index = ctx.resolve("${index}").unwrap();
|
||||
assert_eq!(index, json!(2));
|
||||
|
||||
let name = ctx.resolve("${item.name}").unwrap();
|
||||
assert_eq!(name, json!("item1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_array_access() {
|
||||
let mut ctx = ExecutionContext::new(HashMap::new());
|
||||
ctx.set_output("step1", json!({"items": ["a", "b", "c"]}));
|
||||
|
||||
let result = ctx.resolve("${steps.step1.output.items.0}").unwrap();
|
||||
assert_eq!(result, json!("a"));
|
||||
|
||||
let result = ctx.resolve("${steps.step1.output.items.2}").unwrap();
|
||||
assert_eq!(result, json!("c"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_mixed_string() {
|
||||
let ctx = ExecutionContext::new(
|
||||
vec![("name".to_string(), json!("World"))]
|
||||
.into_iter()
|
||||
.collect()
|
||||
);
|
||||
|
||||
let result = ctx.resolve("Hello, ${inputs.name}!").unwrap();
|
||||
assert_eq!(result, json!("Hello, World!"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_outputs() {
|
||||
let mut ctx = ExecutionContext::new(HashMap::new());
|
||||
ctx.set_output("render", json!({"id": "classroom-123", "url": "/preview"}));
|
||||
|
||||
let outputs = vec![
|
||||
("classroom_id".to_string(), "${steps.render.output.id}".to_string()),
|
||||
("preview_url".to_string(), "${steps.render.output.url}".to_string()),
|
||||
].into_iter().collect();
|
||||
|
||||
let result = ctx.extract_outputs(&outputs).unwrap();
|
||||
assert_eq!(result.get("classroom_id").unwrap(), &json!("classroom-123"));
|
||||
assert_eq!(result.get("preview_url").unwrap(), &json!("/preview"));
|
||||
}
|
||||
}
|
||||
496
crates/zclaw-pipeline/src/types.rs
Normal file
496
crates/zclaw-pipeline/src/types.rs
Normal file
@@ -0,0 +1,496 @@
|
||||
//! Pipeline type definitions
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Pipeline version identifier
|
||||
pub const API_VERSION: &str = "zclaw/v1";
|
||||
|
||||
/// A complete pipeline definition
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Pipeline {
|
||||
/// API version (must be "zclaw/v1")
|
||||
pub api_version: String,
|
||||
|
||||
/// Resource kind (must be "Pipeline")
|
||||
pub kind: String,
|
||||
|
||||
/// Pipeline metadata
|
||||
pub metadata: PipelineMetadata,
|
||||
|
||||
/// Pipeline specification
|
||||
pub spec: PipelineSpec,
|
||||
}
|
||||
|
||||
/// Pipeline metadata
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PipelineMetadata {
|
||||
/// Unique identifier (e.g., "classroom-generator")
|
||||
pub name: String,
|
||||
|
||||
/// Human-readable display name
|
||||
#[serde(default)]
|
||||
pub display_name: Option<String>,
|
||||
|
||||
/// Category for grouping (e.g., "education", "marketing")
|
||||
#[serde(default)]
|
||||
pub category: Option<String>,
|
||||
|
||||
/// Description of what this pipeline does
|
||||
#[serde(default)]
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Tags for search/filtering
|
||||
#[serde(default)]
|
||||
pub tags: Vec<String>,
|
||||
|
||||
/// Icon (emoji or icon name)
|
||||
#[serde(default)]
|
||||
pub icon: Option<String>,
|
||||
|
||||
/// Author information
|
||||
#[serde(default)]
|
||||
pub author: Option<String>,
|
||||
|
||||
/// Version string
|
||||
#[serde(default = "default_version")]
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
fn default_version() -> String {
|
||||
"1.0.0".to_string()
|
||||
}
|
||||
|
||||
/// Pipeline specification
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PipelineSpec {
|
||||
/// Input parameters definition
|
||||
#[serde(default)]
|
||||
pub inputs: Vec<PipelineInput>,
|
||||
|
||||
/// Execution steps
|
||||
pub steps: Vec<PipelineStep>,
|
||||
|
||||
/// Output mappings
|
||||
#[serde(default)]
|
||||
pub outputs: HashMap<String, String>,
|
||||
|
||||
/// Error handling strategy
|
||||
#[serde(default)]
|
||||
pub on_error: ErrorStrategy,
|
||||
|
||||
/// Timeout in seconds (0 = no timeout)
|
||||
#[serde(default)]
|
||||
pub timeout_secs: u64,
|
||||
|
||||
/// Maximum parallel workers
|
||||
#[serde(default = "default_max_workers")]
|
||||
pub max_workers: usize,
|
||||
}
|
||||
|
||||
fn default_max_workers() -> usize {
|
||||
4
|
||||
}
|
||||
|
||||
/// Input parameter definition
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PipelineInput {
|
||||
/// Parameter name
|
||||
pub name: String,
|
||||
|
||||
/// Parameter type
|
||||
#[serde(rename = "type", default)]
|
||||
pub input_type: InputType,
|
||||
|
||||
/// Is this parameter required?
|
||||
#[serde(default)]
|
||||
pub required: bool,
|
||||
|
||||
/// Human-readable label
|
||||
#[serde(default)]
|
||||
pub label: Option<String>,
|
||||
|
||||
/// Placeholder text for input
|
||||
#[serde(default)]
|
||||
pub placeholder: Option<String>,
|
||||
|
||||
/// Default value
|
||||
#[serde(default)]
|
||||
pub default: Option<serde_json::Value>,
|
||||
|
||||
/// Options for select/multi-select types
|
||||
#[serde(default)]
|
||||
pub options: Vec<String>,
|
||||
|
||||
/// Validation rules
|
||||
#[serde(default)]
|
||||
pub validation: Option<ValidationRules>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum InputType {
|
||||
#[default]
|
||||
String,
|
||||
Number,
|
||||
Boolean,
|
||||
Select,
|
||||
MultiSelect,
|
||||
File,
|
||||
Text, // Multi-line text
|
||||
}
|
||||
|
||||
/// Validation rules for input
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ValidationRules {
|
||||
/// Minimum length (for strings)
|
||||
#[serde(default)]
|
||||
pub min_length: Option<usize>,
|
||||
|
||||
/// Maximum length (for strings)
|
||||
#[serde(default)]
|
||||
pub max_length: Option<usize>,
|
||||
|
||||
/// Minimum value (for numbers)
|
||||
#[serde(default)]
|
||||
pub min: Option<f64>,
|
||||
|
||||
/// Maximum value (for numbers)
|
||||
#[serde(default)]
|
||||
pub max: Option<f64>,
|
||||
|
||||
/// Regex pattern (for strings)
|
||||
#[serde(default)]
|
||||
pub pattern: Option<String>,
|
||||
}
|
||||
|
||||
/// A single step in the pipeline
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PipelineStep {
|
||||
/// Unique step identifier
|
||||
pub id: String,
|
||||
|
||||
/// Action to perform
|
||||
pub action: Action,
|
||||
|
||||
/// Human-readable description
|
||||
#[serde(default)]
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Condition for execution (expression)
|
||||
#[serde(default)]
|
||||
pub when: Option<String>,
|
||||
|
||||
/// Retry configuration
|
||||
#[serde(default)]
|
||||
pub retry: Option<RetryConfig>,
|
||||
|
||||
/// Timeout in seconds (overrides pipeline timeout)
|
||||
#[serde(default)]
|
||||
pub timeout_secs: Option<u64>,
|
||||
}
|
||||
|
||||
/// Action types
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum Action {
|
||||
/// LLM generation
|
||||
LlmGenerate {
|
||||
/// Template path or inline prompt
|
||||
template: String,
|
||||
|
||||
/// Input variables (expressions)
|
||||
#[serde(default)]
|
||||
input: HashMap<String, String>,
|
||||
|
||||
/// Model override
|
||||
#[serde(default)]
|
||||
model: Option<String>,
|
||||
|
||||
/// Temperature override
|
||||
#[serde(default)]
|
||||
temperature: Option<f32>,
|
||||
|
||||
/// Max tokens override
|
||||
#[serde(default)]
|
||||
max_tokens: Option<u32>,
|
||||
|
||||
/// JSON mode (structured output)
|
||||
#[serde(default)]
|
||||
json_mode: bool,
|
||||
},
|
||||
|
||||
/// Parallel execution
|
||||
Parallel {
|
||||
/// Expression to iterate over
|
||||
each: String,
|
||||
|
||||
/// Step to execute for each item
|
||||
step: Box<PipelineStep>,
|
||||
|
||||
/// Maximum concurrent workers
|
||||
#[serde(default)]
|
||||
max_workers: Option<usize>,
|
||||
},
|
||||
|
||||
/// Sequential execution (sub-pipeline)
|
||||
Sequential {
|
||||
/// Steps to execute in sequence
|
||||
steps: Vec<PipelineStep>,
|
||||
},
|
||||
|
||||
/// Condition branching
|
||||
Condition {
|
||||
/// Condition expression
|
||||
condition: String,
|
||||
|
||||
/// Branches
|
||||
branches: Vec<ConditionBranch>,
|
||||
|
||||
/// Default branch (optional)
|
||||
#[serde(default)]
|
||||
default: Option<Box<PipelineStep>>,
|
||||
},
|
||||
|
||||
/// Skill execution
|
||||
Skill {
|
||||
/// Skill ID
|
||||
skill_id: String,
|
||||
|
||||
/// Input variables
|
||||
#[serde(default)]
|
||||
input: HashMap<String, String>,
|
||||
},
|
||||
|
||||
/// Hand execution
|
||||
Hand {
|
||||
/// Hand ID
|
||||
hand_id: String,
|
||||
|
||||
/// Action to perform on the hand
|
||||
hand_action: String,
|
||||
|
||||
/// Input parameters
|
||||
#[serde(default)]
|
||||
params: HashMap<String, String>,
|
||||
},
|
||||
|
||||
/// Classroom render
|
||||
ClassroomRender {
|
||||
/// Input data (expression)
|
||||
input: String,
|
||||
},
|
||||
|
||||
/// File export
|
||||
FileExport {
|
||||
/// Formats to export
|
||||
formats: Vec<ExportFormat>,
|
||||
|
||||
/// Input data (expression)
|
||||
input: String,
|
||||
|
||||
/// Output directory (optional)
|
||||
#[serde(default)]
|
||||
output_dir: Option<String>,
|
||||
},
|
||||
|
||||
/// HTTP request
|
||||
HttpRequest {
|
||||
/// URL (can be expression)
|
||||
url: String,
|
||||
|
||||
/// HTTP method
|
||||
#[serde(default = "default_http_method")]
|
||||
method: String,
|
||||
|
||||
/// Headers
|
||||
#[serde(default)]
|
||||
headers: HashMap<String, String>,
|
||||
|
||||
/// Request body (expression)
|
||||
#[serde(default)]
|
||||
body: Option<String>,
|
||||
},
|
||||
|
||||
/// Set variable
|
||||
SetVar {
|
||||
/// Variable name
|
||||
name: String,
|
||||
|
||||
/// Value (expression)
|
||||
value: String,
|
||||
},
|
||||
|
||||
/// Delay/sleep
|
||||
Delay {
|
||||
/// Duration in milliseconds
|
||||
ms: u64,
|
||||
},
|
||||
}
|
||||
|
||||
fn default_http_method() -> String {
|
||||
"GET".to_string()
|
||||
}
|
||||
|
||||
/// Export format
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ExportFormat {
|
||||
Pptx,
|
||||
Html,
|
||||
Pdf,
|
||||
Markdown,
|
||||
Json,
|
||||
}
|
||||
|
||||
/// Condition branch
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ConditionBranch {
|
||||
/// Condition expression (e.g., "${inputs.type} == 'video'")
|
||||
pub when: String,
|
||||
|
||||
/// Step to execute
|
||||
pub then: PipelineStep,
|
||||
}
|
||||
|
||||
/// Retry configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RetryConfig {
|
||||
/// Maximum retry attempts
|
||||
#[serde(default = "default_max_retries")]
|
||||
pub max_attempts: usize,
|
||||
|
||||
/// Delay between retries in milliseconds
|
||||
#[serde(default)]
|
||||
pub delay_ms: u64,
|
||||
|
||||
/// Exponential backoff multiplier
|
||||
#[serde(default)]
|
||||
pub backoff: Option<f32>,
|
||||
}
|
||||
|
||||
fn default_max_retries() -> usize {
|
||||
3
|
||||
}
|
||||
|
||||
/// Error handling strategy
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ErrorStrategy {
|
||||
/// Stop on first error
|
||||
#[default]
|
||||
Stop,
|
||||
/// Continue with next step
|
||||
Continue,
|
||||
/// Retry the step
|
||||
Retry,
|
||||
}
|
||||
|
||||
/// Pipeline run status
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum RunStatus {
|
||||
Pending,
|
||||
Running,
|
||||
Completed,
|
||||
Failed,
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for RunStatus {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
RunStatus::Pending => write!(f, "pending"),
|
||||
RunStatus::Running => write!(f, "running"),
|
||||
RunStatus::Completed => write!(f, "completed"),
|
||||
RunStatus::Failed => write!(f, "failed"),
|
||||
RunStatus::Cancelled => write!(f, "cancelled"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Pipeline run information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PipelineRun {
|
||||
/// Unique run ID
|
||||
pub id: String,
|
||||
|
||||
/// Pipeline ID
|
||||
pub pipeline_id: String,
|
||||
|
||||
/// Run status
|
||||
pub status: RunStatus,
|
||||
|
||||
/// Input values
|
||||
pub inputs: serde_json::Value,
|
||||
|
||||
/// Current step (if running)
|
||||
pub current_step: Option<String>,
|
||||
|
||||
/// Step results
|
||||
pub step_results: HashMap<String, serde_json::Value>,
|
||||
|
||||
/// Final outputs
|
||||
pub outputs: Option<serde_json::Value>,
|
||||
|
||||
/// Error message (if failed)
|
||||
pub error: Option<String>,
|
||||
|
||||
/// Start time
|
||||
pub started_at: chrono::DateTime<chrono::Utc>,
|
||||
|
||||
/// End time
|
||||
pub ended_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||
}
|
||||
|
||||
/// Progress information for a running pipeline
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PipelineProgress {
|
||||
/// Run ID
|
||||
pub run_id: String,
|
||||
|
||||
/// Current step ID
|
||||
pub current_step: String,
|
||||
|
||||
/// Step description
|
||||
pub message: String,
|
||||
|
||||
/// Percentage complete (0-100)
|
||||
pub percentage: u8,
|
||||
|
||||
/// Status
|
||||
pub status: RunStatus,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_pipeline_deserialize() {
|
||||
let yaml = r#"
|
||||
apiVersion: zclaw/v1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: test-pipeline
|
||||
display_name: Test Pipeline
|
||||
category: test
|
||||
spec:
|
||||
inputs:
|
||||
- name: topic
|
||||
type: string
|
||||
required: true
|
||||
steps:
|
||||
- id: step1
|
||||
action:
|
||||
type: llm_generate
|
||||
template: "Hello {{topic}}"
|
||||
outputs:
|
||||
result: ${steps.step1.output}
|
||||
"#;
|
||||
let pipeline: Pipeline = serde_yaml::from_str(yaml).unwrap();
|
||||
assert_eq!(pipeline.metadata.name, "test-pipeline");
|
||||
assert_eq!(pipeline.spec.inputs.len(), 1);
|
||||
assert_eq!(pipeline.spec.steps.len(), 1);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user