feat(kernel): add multi-skill orchestration bridge + true parallel execution
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

- Kernel orchestration bridge: execute_orchestration, auto_compose_skills,
  validate_orchestration methods on Kernel struct
- True parallel execution: replace sequential for-loop with tokio::JoinSet
  for concurrent node execution within parallel groups
- Tauri commands: orchestration_execute (auto-compose or pre-defined graph),
  orchestration_validate (dry-run validation)
- Full type conversions: OrchestrationRequest/Response with camelCase serde

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
iven
2026-04-04 09:18:26 +08:00
parent 1399054547
commit f4ed1b33e0
6 changed files with 422 additions and 37 deletions

View File

@@ -13,6 +13,7 @@ pub mod chat;
pub mod hand;
pub mod lifecycle;
pub mod mcp;
pub mod orchestration;
pub mod scheduled_task;
pub mod skill;
pub mod trigger;

View File

@@ -0,0 +1,273 @@
//! Skill orchestration Tauri commands
//!
//! Exposes the multi-skill orchestration engine to the frontend,
//! enabling parallel/serial/conditional execution of skill graphs.
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use tauri::State;
use super::KernelState;
// ============================================================================
// Request/Response Types
// ============================================================================
/// Orchestration execution request
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct OrchestrationRequest {
/// Skill IDs to orchestrate (for auto-compose)
pub skill_ids: Vec<String>,
/// Optional pre-defined graph (overrides auto-compose)
pub graph: Option<OrchestrationGraphInput>,
/// Input data for the orchestration
pub inputs: HashMap<String, Value>,
/// Agent context
pub agent_id: String,
pub session_id: String,
}
/// Graph input for pre-defined orchestration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct OrchestrationGraphInput {
pub id: String,
pub name: String,
#[serde(default)]
pub description: String,
pub nodes: Vec<NodeInput>,
#[serde(default)]
pub edges: Vec<EdgeInput>,
#[serde(default)]
pub on_error: Option<String>,
#[serde(default)]
pub timeout_secs: Option<u64>,
}
/// Node input for orchestration graph
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct NodeInput {
pub id: String,
pub skill_id: String,
#[serde(default)]
pub description: String,
#[serde(default)]
pub input_mappings: HashMap<String, String>,
#[serde(default)]
pub when: Option<String>,
#[serde(default)]
pub skip_on_error: Option<bool>,
#[serde(default)]
pub timeout_secs: Option<u64>,
}
/// Edge input for orchestration graph
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct EdgeInput {
pub from_node: String,
pub to_node: String,
#[serde(default)]
pub field_mapping: HashMap<String, String>,
#[serde(default)]
pub condition: Option<String>,
}
/// Orchestration execution result for frontend
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct OrchestrationResponse {
pub success: bool,
pub output: Value,
pub node_count: usize,
pub completed_nodes: usize,
pub failed_nodes: usize,
pub duration_ms: u64,
pub error: Option<String>,
pub node_results: HashMap<String, NodeResultResponse>,
}
/// Node execution result for frontend
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct NodeResultResponse {
pub success: bool,
pub output: Value,
pub error: Option<String>,
pub duration_ms: u64,
pub retries: u32,
pub skipped: bool,
}
/// Validation response for frontend
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ValidationResponse {
pub valid: bool,
pub errors: Vec<ValidationErrorInfo>,
}
/// Validation error info
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ValidationErrorInfo {
pub code: String,
pub message: String,
pub location: Option<String>,
}
// ============================================================================
// Type Conversions
// ============================================================================
impl TryFrom<OrchestrationGraphInput> for zclaw_skills::orchestration::SkillGraph {
type Error = String;
fn try_from(input: OrchestrationGraphInput) -> Result<Self, Self::Error> {
let nodes: Vec<zclaw_skills::orchestration::SkillNode> = input.nodes.into_iter().map(|n| {
zclaw_skills::orchestration::SkillNode {
id: n.id,
skill_id: zclaw_types::SkillId::new(&n.skill_id),
description: n.description,
input_mappings: n.input_mappings,
retry: None,
timeout_secs: n.timeout_secs,
when: n.when,
skip_on_error: n.skip_on_error.unwrap_or(false),
}
}).collect();
let edges: Vec<zclaw_skills::orchestration::SkillEdge> = input.edges.into_iter().map(|e| {
zclaw_skills::orchestration::SkillEdge {
from_node: e.from_node,
to_node: e.to_node,
field_mapping: e.field_mapping,
condition: e.condition,
}
}).collect();
let on_error = match input.on_error.as_deref() {
Some("continue") => zclaw_skills::orchestration::ErrorStrategy::Continue,
Some("retry") => zclaw_skills::orchestration::ErrorStrategy::Retry,
_ => zclaw_skills::orchestration::ErrorStrategy::Stop,
};
Ok(zclaw_skills::orchestration::SkillGraph {
id: input.id,
name: input.name,
description: input.description,
nodes,
edges,
input_schema: None,
output_mapping: HashMap::new(),
on_error,
timeout_secs: input.timeout_secs.unwrap_or(300),
})
}
}
impl From<zclaw_skills::orchestration::OrchestrationResult> for OrchestrationResponse {
fn from(result: zclaw_skills::orchestration::OrchestrationResult) -> Self {
let completed = result.node_results.values().filter(|r| r.success).count();
let failed = result.node_results.values().filter(|r| !r.success).count();
let node_results = result.node_results.into_iter().map(|(id, nr)| {
(id, NodeResultResponse {
success: nr.success,
output: nr.output,
error: nr.error,
duration_ms: nr.duration_ms,
retries: nr.retries,
skipped: nr.skipped,
})
}).collect();
Self {
success: result.success,
output: result.output,
node_count: completed + failed,
completed_nodes: completed,
failed_nodes: failed,
duration_ms: result.duration_ms,
error: result.error,
node_results,
}
}
}
// ============================================================================
// Tauri Commands
// ============================================================================
/// Execute a skill orchestration
///
/// Either auto-composes a graph from skill_ids, or uses a pre-defined graph.
/// Executes with true parallel execution within each dependency level.
#[tauri::command]
pub async fn orchestration_execute(
state: State<'_, KernelState>,
request: OrchestrationRequest,
) -> Result<OrchestrationResponse, String> {
let kernel_lock = state.lock().await;
let kernel = kernel_lock.as_ref()
.ok_or_else(|| "Kernel not initialized".to_string())?;
// Build or auto-compose graph
let graph = if let Some(graph_input) = request.graph {
graph_input.try_into()?
} else if request.skill_ids.is_empty() {
return Err("Must provide either skill_ids or graph".to_string());
} else {
let skill_ids: Vec<zclaw_types::SkillId> = request.skill_ids.iter()
.map(|s| zclaw_types::SkillId::new(s))
.collect();
kernel.auto_compose_skills(&skill_ids).await
.map_err(|e| format!("Auto-compose failed: {}", e))?
};
// Build skill context
let context = zclaw_skills::SkillContext {
agent_id: request.agent_id,
session_id: request.session_id,
working_dir: None,
env: HashMap::new(),
timeout_secs: graph.timeout_secs,
network_allowed: true,
file_access_allowed: true,
llm: None,
};
// Execute orchestration
let result = kernel.execute_orchestration(&graph, request.inputs, &context).await
.map_err(|e| format!("Orchestration failed: {}", e))?;
Ok(OrchestrationResponse::from(result))
}
/// Validate an orchestration graph without executing it
#[tauri::command]
pub async fn orchestration_validate(
state: State<'_, KernelState>,
graph: OrchestrationGraphInput,
) -> Result<ValidationResponse, String> {
let kernel_lock = state.lock().await;
let kernel = kernel_lock.as_ref()
.ok_or_else(|| "Kernel not initialized".to_string())?;
let skill_graph: zclaw_skills::orchestration::SkillGraph = graph.try_into()?;
let errors = kernel.validate_orchestration(&skill_graph).await;
let valid = errors.is_empty();
let error_infos = errors.into_iter().map(|e| ValidationErrorInfo {
code: e.code,
message: e.message,
location: e.location,
}).collect();
Ok(ValidationResponse { valid, errors: error_infos })
}