feat: P0 KernelClient功能修复 + P1/P2/P3质量改进
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

P0 KernelClient 功能断裂修复:
- Skill CUD: registry.rs create/update/delete + serialize_skill_md + kernel proxy
- Workflow CUD: pipeline_commands.rs create/update/delete + serde_yaml依赖
- Agent更新: registry update方法 + AgentConfigUpdated事件 + agent_update命令
- Hand流式事件: HandStart/HandEnd变体替换ToolStart/ToolEnd
- 后端验证: hand_get/hand_run_status/hand_run_list确认实现完整
- Approval闭环: respond_to_approval后台spawn+5分钟超时轮询

P2/P3 质量改进:
- Browser WebDriver: TCP探测ChromeDriver/GeckoDriver/Edge端口替换硬编码true
- api-fallbacks: 移除假技能和16个捏造安全层,替换为真实能力映射
- dead_code清理: 移除5个模块级#![allow(dead_code)],删除3个真正死方法,
  删除未注册的compactor_compact_llm命令,warnings从8降到3
- 所有变更通过cargo check + tsc --noEmit验证
This commit is contained in:
iven
2026-03-30 10:55:08 +08:00
parent d345e60a6a
commit 813b49a986
19 changed files with 951 additions and 102 deletions

View File

@@ -45,6 +45,7 @@ async-trait = { workspace = true }
# Serialization
serde = { workspace = true }
serde_json = { workspace = true }
serde_yaml = "0.9"
toml = "0.8"
# HTTP client

View File

@@ -14,7 +14,6 @@
// NOTE: #[tauri::command] functions are registered via invoke_handler! at runtime,
// which the Rust compiler does not track as "use". Module-level allow required
// for Tauri-commanded functions. Genuinely unused methods annotated individually.
#![allow(dead_code)]
use serde::{Deserialize, Serialize};
use regex::Regex;
@@ -99,6 +98,8 @@ pub struct CompactionCheck {
}
/// Configuration for LLM-based summary generation
/// NOTE: Reserved for future LLM compaction Tauri command
#[allow(dead_code)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LlmSummaryConfig {
pub provider: String,
@@ -250,6 +251,7 @@ impl ContextCompactor {
/// - `use_llm` is false
/// - LLM config is not provided
/// - LLM call fails and `llm_fallback_to_rules` is true
#[allow(dead_code)] // Reserved: Tauri command removed during refactor, will be re-registered
pub async fn compact_with_llm(
&self,
messages: &[CompactableMessage],
@@ -499,18 +501,6 @@ impl ContextCompactor {
conclusions.into_iter().take(3).collect()
}
/// Get current configuration
#[allow(dead_code)] // Reserved: no Tauri command yet
pub fn get_config(&self) -> &CompactionConfig {
&self.config
}
/// Update configuration
#[allow(dead_code)] // Reserved: no Tauri command yet
pub fn update_config(&mut self, updates: CompactionConfig) {
self.config = updates;
}
}
// === Tauri Commands ===
@@ -561,33 +551,6 @@ pub async fn compactor_compact(
result
}
/// Execute compaction with optional LLM-based summary
#[tauri::command]
pub async fn compactor_compact_llm(
messages: Vec<CompactableMessage>,
agent_id: String,
conversation_id: Option<String>,
compaction_config: Option<CompactionConfig>,
llm_config: Option<LlmSummaryConfig>,
) -> CompactionResult {
let memory_flush = compaction_config
.as_ref()
.map(|c| c.memory_flush_enabled)
.unwrap_or(false);
let flushed = if memory_flush {
flush_old_messages_to_memory(&messages, &agent_id, conversation_id.as_deref()).await
} else {
0
};
let compactor = ContextCompactor::new(compaction_config);
let mut result = compactor
.compact_with_llm(&messages, &agent_id, conversation_id.as_deref(), llm_config.as_ref())
.await;
result.flushed_memories = flushed;
result
}
/// Flush important messages from the old (pre-compaction) portion to VikingStorage.
///
/// Extracts user messages and key assistant responses as session memories

View File

@@ -93,7 +93,7 @@ pub enum HeartbeatStatus {
/// Type alias for heartbeat check function
#[allow(dead_code)] // Reserved for future proactive check registration
pub type HeartbeatCheckFn = Box<dyn Fn(String) -> std::pin::Pin<Box<dyn std::future::Future<Output = Option<HeartbeatAlert>> + Send>> + Send + Sync>;
type HeartbeatCheckFn = Box<dyn Fn(String) -> std::pin::Pin<Box<dyn std::future::Future<Output = Option<HeartbeatAlert>> + Send>> + Send + Sync>;
// === Default Config ===

View File

@@ -13,10 +13,8 @@
//! NOTE: Some methods are reserved for future integration.
// NOTE: #[tauri::command] functions are registered via invoke_handler! at runtime,
// which the Rust compiler does not track as "use". This module-level allow is
// required for all Tauri-commanded functions. Only genuinely unused non-command
// methods have individual #[allow(dead_code)] annotations below.
#![allow(dead_code)]
// which the Rust compiler does not track as "use". Module-level allow required
// for Tauri-commanded functions. Genuinely unused methods annotated individually.
use chrono::Utc;
use serde::{Deserialize, Serialize};
@@ -541,6 +539,7 @@ pub type IdentityManagerState = Arc<Mutex<AgentIdentityManager>>;
/// Initialize identity manager
#[tauri::command]
#[allow(dead_code)] // Registered via invoke_handler! at runtime
pub async fn identity_init() -> Result<IdentityManagerState, String> {
Ok(Arc::new(Mutex::new(AgentIdentityManager::new())))
}

View File

@@ -583,12 +583,6 @@ impl ReflectionEngine {
self.history.iter().rev().take(limit).collect()
}
/// Get last reflection result
#[allow(dead_code)] // Reserved: no Tauri command yet
pub fn get_last_result(&self) -> Option<&ReflectionResult> {
self.history.last()
}
/// Get current state
pub fn get_state(&self) -> &ReflectionState {
&self.state

View File

@@ -5,8 +5,6 @@
//!
//! NOTE: Some functions are defined for future use and external API exposure.
#![allow(dead_code)] // Validation functions reserved for future API endpoints
use std::fmt;
/// Maximum length for identifier strings (agent_id, pipeline_id, skill_id, etc.)
@@ -201,6 +199,7 @@ pub fn validate_string_length(value: &str, field_name: &str, max_length: usize)
/// Validate an optional identifier field
///
/// Returns Ok if the value is None or if it contains a valid identifier.
#[allow(dead_code)] // Reserved for optional-ID Tauri commands
pub fn validate_optional_identifier(value: Option<&str>, field_name: &str) -> Result<(), ValidationError> {
match value {
None => Ok(()),
@@ -210,6 +209,7 @@ pub fn validate_optional_identifier(value: Option<&str>, field_name: &str) -> Re
}
/// Validate a list of identifiers
#[allow(dead_code)] // Reserved for batch-ID Tauri commands
pub fn validate_identifiers<'a, I>(values: I, field_name: &str) -> Result<(), ValidationError>
where
I: IntoIterator<Item = &'a str>,
@@ -221,6 +221,7 @@ where
}
/// Sanitize a string for safe logging (remove control characters, limit length)
#[allow(dead_code)] // Reserved for log-sanitization Tauri commands
pub fn sanitize_for_logging(value: &str, max_len: usize) -> String {
let sanitized: String = value
.chars()

View File

@@ -410,6 +410,72 @@ pub async fn agent_delete(
.map_err(|e| format!("Failed to delete agent: {}", e))
}
/// Update an agent's configuration
#[tauri::command]
pub async fn agent_update(
state: State<'_, KernelState>,
agent_id: String,
updates: AgentUpdateRequest,
) -> Result<AgentInfo, String> {
let agent_id = validate_agent_id(&agent_id)?;
let kernel_lock = state.lock().await;
let kernel = kernel_lock.as_ref()
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
let id: AgentId = agent_id.parse()
.map_err(|_| "Invalid agent ID format".to_string())?;
// Get existing config
let mut config = kernel.get_agent_config(&id)
.ok_or_else(|| format!("Agent not found: {}", agent_id))?;
// Apply updates
if let Some(name) = updates.name {
config.name = name;
}
if let Some(description) = updates.description {
config.description = Some(description);
}
if let Some(system_prompt) = updates.system_prompt {
config.system_prompt = Some(system_prompt);
}
if let Some(model) = updates.model {
config.model.model = model;
}
if let Some(provider) = updates.provider {
config.model.provider = provider;
}
if let Some(max_tokens) = updates.max_tokens {
config.max_tokens = Some(max_tokens);
}
if let Some(temperature) = updates.temperature {
config.temperature = Some(temperature);
}
// Save updated config
kernel.update_agent(config)
.await
.map_err(|e| format!("Failed to update agent: {}", e))?;
// Return updated info
kernel.get_agent(&id)
.ok_or_else(|| format!("Agent not found after update: {}", agent_id))
}
/// Agent update request
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AgentUpdateRequest {
pub name: Option<String>,
pub description: Option<String>,
pub system_prompt: Option<String>,
pub model: Option<String>,
pub provider: Option<String>,
pub max_tokens: Option<u32>,
pub temperature: Option<f32>,
}
/// Export an agent configuration as JSON
#[tauri::command]
pub async fn agent_export(
@@ -500,6 +566,10 @@ pub enum StreamChatEvent {
ToolEnd { name: String, output: serde_json::Value },
/// New iteration started (multi-turn tool calling)
IterationStart { iteration: usize, max_iterations: usize },
/// Hand execution started
HandStart { name: String, params: serde_json::Value },
/// Hand execution completed
HandEnd { name: String, result: serde_json::Value },
/// Stream completed
Complete { input_tokens: u32, output_tokens: u32 },
/// Error occurred
@@ -644,11 +714,21 @@ pub async fn agent_chat_stream(
}
LoopEvent::ToolStart { name, input } => {
tracing::debug!("[agent_chat_stream] ToolStart: {}", name);
StreamChatEvent::ToolStart { name: name.clone(), input: input.clone() }
// Emit hand event if this is a hand tool
if name.starts_with("hand_") {
StreamChatEvent::HandStart { name: name.clone(), params: input.clone() }
} else {
StreamChatEvent::ToolStart { name: name.clone(), input: input.clone() }
}
}
LoopEvent::ToolEnd { name, output } => {
tracing::debug!("[agent_chat_stream] ToolEnd: {}", name);
StreamChatEvent::ToolEnd { name: name.clone(), output: output.clone() }
// Emit hand event if this is a hand tool
if name.starts_with("hand_") {
StreamChatEvent::HandEnd { name: name.clone(), result: output.clone() }
} else {
StreamChatEvent::ToolEnd { name: name.clone(), output: output.clone() }
}
}
LoopEvent::IterationStart { iteration, max_iterations } => {
tracing::debug!("[agent_chat_stream] IterationStart: {}/{}", iteration, max_iterations);
@@ -811,6 +891,142 @@ pub async fn skill_refresh(
Ok(skills.into_iter().map(SkillInfoResponse::from).collect())
}
// ============================================================================
// Skill CRUD Commands
// ============================================================================
/// Request body for creating a new skill
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CreateSkillRequest {
pub name: String,
pub description: Option<String>,
pub triggers: Vec<String>,
pub actions: Vec<String>,
pub enabled: Option<bool>,
}
/// Request body for updating a skill
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UpdateSkillRequest {
pub name: Option<String>,
pub description: Option<String>,
pub triggers: Option<Vec<String>>,
pub actions: Option<Vec<String>>,
pub enabled: Option<bool>,
}
/// Create a new skill in the skills directory
#[tauri::command]
pub async fn skill_create(
state: State<'_, KernelState>,
request: CreateSkillRequest,
) -> Result<SkillInfoResponse, String> {
let name = request.name.trim().to_string();
if name.is_empty() {
return Err("Skill name cannot be empty".to_string());
}
// Generate skill ID from name
let id = name.to_lowercase()
.replace(' ', "-")
.replace(|c: char| !c.is_alphanumeric() && c != '-', "");
validate_identifier(&id, "skill_id")
.map_err(|e| e.to_string())?;
let kernel_lock = state.lock().await;
let kernel = kernel_lock.as_ref()
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
let manifest = zclaw_skills::SkillManifest {
id: SkillId::new(&id),
name: name.clone(),
description: request.description.unwrap_or_default(),
version: "1.0.0".to_string(),
author: None,
mode: zclaw_skills::SkillMode::PromptOnly,
capabilities: request.actions,
input_schema: None,
output_schema: None,
tags: vec![],
category: None,
triggers: request.triggers,
enabled: request.enabled.unwrap_or(true),
};
kernel.create_skill(manifest.clone())
.await
.map_err(|e| format!("Failed to create skill: {}", e))?;
Ok(SkillInfoResponse::from(manifest))
}
/// Update an existing skill
#[tauri::command]
pub async fn skill_update(
state: State<'_, KernelState>,
id: String,
request: UpdateSkillRequest,
) -> Result<SkillInfoResponse, String> {
validate_identifier(&id, "skill_id")
.map_err(|e| e.to_string())?;
let kernel_lock = state.lock().await;
let kernel = kernel_lock.as_ref()
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
// Get existing manifest
let existing = kernel.skills()
.get_manifest(&SkillId::new(&id))
.await
.ok_or_else(|| format!("Skill not found: {}", id))?;
// Build updated manifest from existing + request fields
let updated = zclaw_skills::SkillManifest {
id: existing.id.clone(),
name: request.name.unwrap_or(existing.name),
description: request.description.unwrap_or(existing.description),
version: existing.version.clone(),
author: existing.author.clone(),
mode: existing.mode.clone(),
capabilities: request.actions.unwrap_or(existing.capabilities),
input_schema: existing.input_schema.clone(),
output_schema: existing.output_schema.clone(),
tags: existing.tags.clone(),
category: existing.category.clone(),
triggers: request.triggers.unwrap_or(existing.triggers),
enabled: request.enabled.unwrap_or(existing.enabled),
};
let result = kernel.update_skill(&SkillId::new(&id), updated)
.await
.map_err(|e| format!("Failed to update skill: {}", e))?;
Ok(SkillInfoResponse::from(result))
}
/// Delete a skill
#[tauri::command]
pub async fn skill_delete(
state: State<'_, KernelState>,
id: String,
) -> Result<(), String> {
validate_identifier(&id, "skill_id")
.map_err(|e| e.to_string())?;
let kernel_lock = state.lock().await;
let kernel = kernel_lock.as_ref()
.ok_or_else(|| "Kernel not initialized. Call kernel_init first.".to_string())?;
kernel.delete_skill(&SkillId::new(&id))
.await
.map_err(|e| format!("Failed to delete skill: {}", e))?;
Ok(())
}
// ============================================================================
// Skill Execution Command
// ============================================================================

View File

@@ -1331,6 +1331,7 @@ pub fn run() {
kernel_commands::agent_list,
kernel_commands::agent_get,
kernel_commands::agent_delete,
kernel_commands::agent_update,
kernel_commands::agent_export,
kernel_commands::agent_import,
kernel_commands::agent_chat,
@@ -1339,6 +1340,9 @@ pub fn run() {
kernel_commands::skill_list,
kernel_commands::skill_refresh,
kernel_commands::skill_execute,
kernel_commands::skill_create,
kernel_commands::skill_update,
kernel_commands::skill_delete,
// Hands commands (autonomous capabilities)
kernel_commands::hand_list,
kernel_commands::hand_execute,
@@ -1365,6 +1369,9 @@ pub fn run() {
// Pipeline commands (DSL-based workflows)
pipeline_commands::pipeline_list,
pipeline_commands::pipeline_templates, pipeline_commands::pipeline_get,
pipeline_commands::pipeline_create,
pipeline_commands::pipeline_update,
pipeline_commands::pipeline_delete,
pipeline_commands::pipeline_run,
pipeline_commands::pipeline_progress,
pipeline_commands::pipeline_cancel,

View File

@@ -20,6 +20,11 @@ use zclaw_pipeline::{
LlmActionDriver,
SkillActionDriver,
HandActionDriver,
PipelineMetadata,
PipelineSpec,
PipelineStep,
Action,
ErrorStrategy,
};
use zclaw_runtime::{LlmDriver, CompletionRequest};
use zclaw_skills::SkillContext;
@@ -624,6 +629,221 @@ pub async fn pipeline_refresh(
Ok(pipelines.into_iter().map(|(_, p)| pipeline_to_info(&p)).collect())
}
// ============================================================================
// Pipeline CRUD Commands (Create / Update / Delete)
// ============================================================================
/// Create pipeline request
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CreatePipelineRequest {
pub name: String,
pub description: Option<String>,
pub steps: Vec<WorkflowStepInput>,
}
/// Update pipeline request
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UpdatePipelineRequest {
pub name: Option<String>,
pub description: Option<String>,
pub steps: Option<Vec<WorkflowStepInput>>,
}
/// Workflow step input from frontend
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct WorkflowStepInput {
pub hand_name: String,
pub name: Option<String>,
pub params: Option<HashMap<String, Value>>,
pub condition: Option<String>,
}
/// Create a new pipeline as a YAML file
#[tauri::command]
pub async fn pipeline_create(
state: State<'_, Arc<PipelineState>>,
request: CreatePipelineRequest,
) -> Result<PipelineInfo, String> {
let name = request.name.trim().to_string();
if name.is_empty() {
return Err("Pipeline name cannot be empty".to_string());
}
let pipelines_dir = get_pipelines_directory()?;
if !pipelines_dir.exists() {
std::fs::create_dir_all(&pipelines_dir)
.map_err(|e| format!("Failed to create pipelines directory: {}", e))?;
}
// Generate pipeline ID from name
let pipeline_id = name.to_lowercase()
.replace(' ', "-")
.replace(|c: char| !c.is_alphanumeric() && c != '-', "");
let file_path = pipelines_dir.join(format!("{}.yaml", pipeline_id));
if file_path.exists() {
return Err(format!("Pipeline file already exists: {}", file_path.display()));
}
// Build Pipeline struct
let steps: Vec<PipelineStep> = request.steps.into_iter().enumerate().map(|(i, s)| {
let step_id = s.name.clone().unwrap_or_else(|| format!("step-{}", i + 1));
PipelineStep {
id: step_id,
action: Action::Hand {
hand_id: s.hand_name.clone(),
hand_action: "execute".to_string(),
params: s.params.unwrap_or_default().into_iter().map(|(k, v)| (k, v.to_string())).collect(),
},
description: s.name,
when: s.condition,
retry: None,
timeout_secs: None,
}
}).collect();
let pipeline = Pipeline {
api_version: "zclaw/v1".to_string(),
kind: "Pipeline".to_string(),
metadata: PipelineMetadata {
name: pipeline_id.clone(),
display_name: Some(name),
description: request.description,
category: None,
industry: None,
tags: vec![],
icon: None,
author: None,
version: "1.0.0".to_string(),
annotations: None,
},
spec: PipelineSpec {
inputs: vec![],
steps,
outputs: HashMap::new(),
on_error: ErrorStrategy::Stop,
timeout_secs: 0,
max_workers: 4,
},
};
// Serialize to YAML
let yaml_content = serde_yaml::to_string(&pipeline)
.map_err(|e| format!("Failed to serialize pipeline: {}", e))?;
std::fs::write(&file_path, yaml_content)
.map_err(|e| format!("Failed to write pipeline file: {}", e))?;
// Register in state
let mut state_pipelines = state.pipelines.write().await;
let mut state_paths = state.pipeline_paths.write().await;
state_pipelines.insert(pipeline_id.clone(), pipeline.clone());
state_paths.insert(pipeline_id, file_path);
Ok(pipeline_to_info(&pipeline))
}
/// Update an existing pipeline
#[tauri::command]
pub async fn pipeline_update(
state: State<'_, Arc<PipelineState>>,
pipeline_id: String,
request: UpdatePipelineRequest,
) -> Result<PipelineInfo, String> {
let pipelines = state.pipelines.read().await;
let paths = state.pipeline_paths.read().await;
let existing = pipelines.get(&pipeline_id)
.ok_or_else(|| format!("Pipeline not found: {}", pipeline_id))?;
let file_path = paths.get(&pipeline_id)
.ok_or_else(|| format!("Pipeline file path not found: {}", pipeline_id))?
.clone();
// Build updated pipeline
let updated_metadata = PipelineMetadata {
display_name: request.name.or(existing.metadata.display_name.clone()),
description: request.description.or(existing.metadata.description.clone()),
..existing.metadata.clone()
};
let updated_steps = match request.steps {
Some(steps) => steps.into_iter().enumerate().map(|(i, s)| {
let step_id = s.name.clone().unwrap_or_else(|| format!("step-{}", i + 1));
PipelineStep {
id: step_id,
action: Action::Hand {
hand_id: s.hand_name.clone(),
hand_action: "execute".to_string(),
params: s.params.unwrap_or_default().into_iter().map(|(k, v)| (k, v.to_string())).collect(),
},
description: s.name,
when: s.condition,
retry: None,
timeout_secs: None,
}
}).collect(),
None => existing.spec.steps.clone(),
};
let updated_pipeline = Pipeline {
metadata: updated_metadata,
spec: PipelineSpec {
steps: updated_steps,
..existing.spec.clone()
},
..existing.clone()
};
// Write to file
let yaml_content = serde_yaml::to_string(&updated_pipeline)
.map_err(|e| format!("Failed to serialize pipeline: {}", e))?;
// Drop read locks before write
drop(pipelines);
drop(paths);
std::fs::write(file_path, yaml_content)
.map_err(|e| format!("Failed to write pipeline file: {}", e))?;
// Update state
let mut state_pipelines = state.pipelines.write().await;
state_pipelines.insert(pipeline_id.clone(), updated_pipeline.clone());
Ok(pipeline_to_info(&updated_pipeline))
}
/// Delete a pipeline
#[tauri::command]
pub async fn pipeline_delete(
state: State<'_, Arc<PipelineState>>,
pipeline_id: String,
) -> Result<(), String> {
let paths = state.pipeline_paths.read().await;
let file_path = paths.get(&pipeline_id)
.ok_or_else(|| format!("Pipeline not found: {}", pipeline_id))?;
let path = file_path.clone();
drop(paths);
// Remove file
if path.exists() {
std::fs::remove_file(&path)
.map_err(|e| format!("Failed to delete pipeline file: {}", e))?;
}
// Remove from state
let mut state_pipelines = state.pipelines.write().await;
let mut state_paths = state.pipeline_paths.write().await;
state_pipelines.remove(&pipeline_id);
state_paths.remove(&pipeline_id);
Ok(())
}
// Helper functions
fn get_pipelines_directory() -> Result<PathBuf, String> {