feat: Batch 5-9 — GrowthIntegration桥接、验证补全、死代码清理、Pipeline模板、Speech/Twitter真实实现
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

Batch 5 (P0): GrowthIntegration 接入 Tauri
- Kernel 新增 set_viking()/set_extraction_driver() 桥接 SqliteStorage
- 中间件链共享存储,MemoryExtractor 接入 LLM 驱动

Batch 6 (P1): 输入验证 + Heartbeat
- Relay 验证补全(stream 兼容检查、API key 格式校验)
- UUID 类型校验、SessionId 错误返回
- Heartbeat 默认开启 + 首次聊天自动初始化

Batch 7 (P2): 死代码清理
- zclaw-channels 整体移除(317 行)
- multi-agent 特性门控、admin 方法标注

Batch 8 (P2): Pipeline 模板
- PipelineMetadata 新增 annotations 字段
- pipeline_templates 命令 + 2 个示例模板
- fallback driver base_url 修复(doubao/qwen/deepseek 端点)

Batch 9 (P1): SpeechHand/TwitterHand 真实实现
- SpeechHand: tts_method 字段 + Browser TTS 前端集成 (Web Speech API)
- TwitterHand: 12 个 action 全部替换为 Twitter API v2 真实 HTTP 调用
- chatStore/useAutomationEvents 双路径 TTS 触发
This commit is contained in:
iven
2026-03-30 09:24:50 +08:00
parent 5595083b96
commit 13c0b18bbc
39 changed files with 1155 additions and 507 deletions

View File

@@ -36,7 +36,6 @@ ZCLAW/
│ ├── zclaw-kernel/ # L4: 核心协调 (注册, 调度, 事件, 工作流) │ ├── zclaw-kernel/ # L4: 核心协调 (注册, 调度, 事件, 工作流)
│ ├── zclaw-skills/ # 技能系统 (SKILL.md解析, 执行器) │ ├── zclaw-skills/ # 技能系统 (SKILL.md解析, 执行器)
│ ├── zclaw-hands/ # 自主能力 (Hand/Trigger 注册管理) │ ├── zclaw-hands/ # 自主能力 (Hand/Trigger 注册管理)
│ ├── zclaw-channels/ # 通道适配器 (仅 ConsoleChannel 测试适配器)
│ ├── zclaw-protocols/ # 协议支持 (MCP, A2A) │ ├── zclaw-protocols/ # 协议支持 (MCP, A2A)
│ └── zclaw-saas/ # SaaS 后端 (账号, 模型配置, 中转, 配置同步) │ └── zclaw-saas/ # SaaS 后端 (账号, 模型配置, 中转, 配置同步)
├── admin/ # Next.js 管理后台 ├── admin/ # Next.js 管理后台
@@ -87,7 +86,7 @@ zclaw-kernel (→ types, memory, runtime)
zclaw-saas (→ types, 独立运行于 8080 端口) zclaw-saas (→ types, 独立运行于 8080 端口)
desktop/src-tauri (→ kernel, skills, hands, channels, protocols) desktop/src-tauri (→ kernel, skills, hands, protocols)
``` ```
*** ***
@@ -199,10 +198,10 @@ ZCLAW 提供 11 个自主能力包:
| Predictor | 预测分析 | ❌ 已禁用 (enabled=false),无 Rust 实现 | | Predictor | 预测分析 | ❌ 已禁用 (enabled=false),无 Rust 实现 |
| Lead | 销售线索发现 | ❌ 已禁用 (enabled=false),无 Rust 实现 | | Lead | 销售线索发现 | ❌ 已禁用 (enabled=false),无 Rust 实现 |
| Clip | 视频处理 | ⚠️ 需 FFmpeg | | Clip | 视频处理 | ⚠️ 需 FFmpeg |
| Twitter | Twitter 自动化 | ⚠️ 需 API Key | | Twitter | Twitter 自动化 | ✅ 可用12 个 API v2 真实调用,写操作需 OAuth 1.0a |
| Whiteboard | 白板演示 | ✅ 可用(导出功能开发中,标注 demo | | Whiteboard | 白板演示 | ✅ 可用(导出功能开发中,标注 demo |
| Slideshow | 幻灯片生成 | ✅ 可用 | | Slideshow | 幻灯片生成 | ✅ 可用 |
| Speech | 语音合成 | ✅ 可用 | | Speech | 语音合成 | ✅ 可用Browser TTS 前端集成完成) |
| Quiz | 测验生成 | ✅ 可用 | | Quiz | 测验生成 | ✅ 可用 |
**触发 Hand 时:** **触发 Hand 时:**

18
Cargo.lock generated
View File

@@ -8148,21 +8148,6 @@ dependencies = [
"zvariant", "zvariant",
] ]
[[package]]
name = "zclaw-channels"
version = "0.1.0"
dependencies = [
"async-trait",
"chrono",
"reqwest 0.12.28",
"serde",
"serde_json",
"thiserror 2.0.18",
"tokio",
"tracing",
"zclaw-types",
]
[[package]] [[package]]
name = "zclaw-growth" name = "zclaw-growth"
version = "0.1.0" version = "0.1.0"
@@ -8188,10 +8173,13 @@ name = "zclaw-hands"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"base64 0.22.1",
"chrono", "chrono",
"hmac",
"reqwest 0.12.28", "reqwest 0.12.28",
"serde", "serde",
"serde_json", "serde_json",
"sha1",
"thiserror 2.0.18", "thiserror 2.0.18",
"tokio", "tokio",
"tracing", "tracing",

View File

@@ -9,7 +9,6 @@ members = [
# ZCLAW Extension Crates # ZCLAW Extension Crates
"crates/zclaw-skills", "crates/zclaw-skills",
"crates/zclaw-hands", "crates/zclaw-hands",
"crates/zclaw-channels",
"crates/zclaw-protocols", "crates/zclaw-protocols",
"crates/zclaw-pipeline", "crates/zclaw-pipeline",
"crates/zclaw-growth", "crates/zclaw-growth",
@@ -118,7 +117,6 @@ zclaw-runtime = { path = "crates/zclaw-runtime" }
zclaw-kernel = { path = "crates/zclaw-kernel" } zclaw-kernel = { path = "crates/zclaw-kernel" }
zclaw-skills = { path = "crates/zclaw-skills" } zclaw-skills = { path = "crates/zclaw-skills" }
zclaw-hands = { path = "crates/zclaw-hands" } zclaw-hands = { path = "crates/zclaw-hands" }
zclaw-channels = { path = "crates/zclaw-channels" }
zclaw-protocols = { path = "crates/zclaw-protocols" } zclaw-protocols = { path = "crates/zclaw-protocols" }
zclaw-pipeline = { path = "crates/zclaw-pipeline" } zclaw-pipeline = { path = "crates/zclaw-pipeline" }
zclaw-growth = { path = "crates/zclaw-growth" } zclaw-growth = { path = "crates/zclaw-growth" }

View File

@@ -1,21 +0,0 @@
[package]
name = "zclaw-channels"
version.workspace = true
edition.workspace = true
license.workspace = true
repository.workspace = true
rust-version.workspace = true
description = "ZCLAW Channels - external platform adapters"
[dependencies]
zclaw-types = { workspace = true }
tokio = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
async-trait = { workspace = true }
reqwest = { workspace = true }
chrono = { workspace = true }

View File

@@ -1,71 +0,0 @@
//! Console channel adapter for testing
use async_trait::async_trait;
use std::sync::Arc;
use tokio::sync::mpsc;
use zclaw_types::Result;
use crate::{Channel, ChannelConfig, ChannelStatus, IncomingMessage, OutgoingMessage};
/// Console channel adapter (for testing)
pub struct ConsoleChannel {
config: ChannelConfig,
status: Arc<tokio::sync::RwLock<ChannelStatus>>,
}
impl ConsoleChannel {
pub fn new(config: ChannelConfig) -> Self {
Self {
config,
status: Arc::new(tokio::sync::RwLock::new(ChannelStatus::Disconnected)),
}
}
}
#[async_trait]
impl Channel for ConsoleChannel {
fn config(&self) -> &ChannelConfig {
&self.config
}
async fn connect(&self) -> Result<()> {
let mut status = self.status.write().await;
*status = ChannelStatus::Connected;
tracing::info!("Console channel connected");
Ok(())
}
async fn disconnect(&self) -> Result<()> {
let mut status = self.status.write().await;
*status = ChannelStatus::Disconnected;
tracing::info!("Console channel disconnected");
Ok(())
}
async fn status(&self) -> ChannelStatus {
self.status.read().await.clone()
}
async fn send(&self, message: OutgoingMessage) -> Result<String> {
// Print to console for testing
let msg_id = format!("console_{}", chrono::Utc::now().timestamp());
match &message.content {
crate::MessageContent::Text { text } => {
tracing::info!("[Console] To {}: {}", message.conversation_id, text);
}
_ => {
tracing::info!("[Console] To {}: {:?}", message.conversation_id, message.content);
}
}
Ok(msg_id)
}
async fn receive(&self) -> Result<mpsc::Receiver<IncomingMessage>> {
let (_tx, rx) = mpsc::channel(100);
// Console channel doesn't receive messages automatically
// Messages would need to be injected via a separate method
Ok(rx)
}
}

View File

@@ -1,5 +0,0 @@
//! Channel adapters
mod console;
pub use console::ConsoleChannel;

View File

@@ -1,94 +0,0 @@
//! Channel bridge manager
//!
//! Coordinates multiple channel adapters and routes messages.
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use zclaw_types::Result;
use super::{Channel, ChannelConfig, OutgoingMessage};
/// Channel bridge manager
pub struct ChannelBridge {
channels: RwLock<HashMap<String, Arc<dyn Channel>>>,
configs: RwLock<HashMap<String, ChannelConfig>>,
}
impl ChannelBridge {
pub fn new() -> Self {
Self {
channels: RwLock::new(HashMap::new()),
configs: RwLock::new(HashMap::new()),
}
}
/// Register a channel adapter
pub async fn register(&self, channel: Arc<dyn Channel>) {
let config = channel.config().clone();
let mut channels = self.channels.write().await;
let mut configs = self.configs.write().await;
channels.insert(config.id.clone(), channel);
configs.insert(config.id.clone(), config);
}
/// Get a channel by ID
pub async fn get(&self, id: &str) -> Option<Arc<dyn Channel>> {
let channels = self.channels.read().await;
channels.get(id).cloned()
}
/// Get channel configuration
pub async fn get_config(&self, id: &str) -> Option<ChannelConfig> {
let configs = self.configs.read().await;
configs.get(id).cloned()
}
/// List all channels
pub async fn list(&self) -> Vec<ChannelConfig> {
let configs = self.configs.read().await;
configs.values().cloned().collect()
}
/// Connect all channels
pub async fn connect_all(&self) -> Result<()> {
let channels = self.channels.read().await;
for channel in channels.values() {
channel.connect().await?;
}
Ok(())
}
/// Disconnect all channels
pub async fn disconnect_all(&self) -> Result<()> {
let channels = self.channels.read().await;
for channel in channels.values() {
channel.disconnect().await?;
}
Ok(())
}
/// Send message through a specific channel
pub async fn send(&self, channel_id: &str, message: OutgoingMessage) -> Result<String> {
let channel = self.get(channel_id).await
.ok_or_else(|| zclaw_types::ZclawError::NotFound(format!("Channel not found: {}", channel_id)))?;
channel.send(message).await
}
/// Remove a channel
pub async fn remove(&self, id: &str) {
let mut channels = self.channels.write().await;
let mut configs = self.configs.write().await;
channels.remove(id);
configs.remove(id);
}
}
impl Default for ChannelBridge {
fn default() -> Self {
Self::new()
}
}

View File

@@ -1,109 +0,0 @@
//! Channel trait and types
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use zclaw_types::{Result, AgentId};
/// Channel configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChannelConfig {
/// Unique channel identifier
pub id: String,
/// Channel type (telegram, discord, slack, etc.)
pub channel_type: String,
/// Human-readable name
pub name: String,
/// Whether the channel is enabled
#[serde(default = "default_enabled")]
pub enabled: bool,
/// Channel-specific configuration
#[serde(default)]
pub config: serde_json::Value,
/// Associated agent for this channel
pub agent_id: Option<AgentId>,
}
fn default_enabled() -> bool { true }
/// Incoming message from a channel
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct IncomingMessage {
/// Message ID from the platform
pub platform_id: String,
/// Channel/conversation ID
pub conversation_id: String,
/// Sender information
pub sender: MessageSender,
/// Message content
pub content: MessageContent,
/// Timestamp
pub timestamp: i64,
/// Reply-to message ID if any
pub reply_to: Option<String>,
}
/// Message sender information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MessageSender {
pub id: String,
pub name: Option<String>,
pub username: Option<String>,
pub is_bot: bool,
}
/// Message content types
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum MessageContent {
Text { text: String },
Image { url: String, caption: Option<String> },
File { url: String, filename: String },
Audio { url: String },
Video { url: String },
Location { latitude: f64, longitude: f64 },
Sticker { emoji: Option<String>, url: Option<String> },
}
/// Outgoing message to a channel
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OutgoingMessage {
/// Conversation/channel ID to send to
pub conversation_id: String,
/// Message content
pub content: MessageContent,
/// Reply-to message ID if any
pub reply_to: Option<String>,
/// Whether to send silently (no notification)
pub silent: bool,
}
/// Channel connection status
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub enum ChannelStatus {
Disconnected,
Connecting,
Connected,
Error(String),
}
/// Channel trait for platform adapters
#[async_trait]
pub trait Channel: Send + Sync {
/// Get channel configuration
fn config(&self) -> &ChannelConfig;
/// Connect to the platform
async fn connect(&self) -> Result<()>;
/// Disconnect from the platform
async fn disconnect(&self) -> Result<()>;
/// Get current connection status
async fn status(&self) -> ChannelStatus;
/// Send a message
async fn send(&self, message: OutgoingMessage) -> Result<String>;
/// Receive incoming messages (streaming)
async fn receive(&self) -> Result<tokio::sync::mpsc::Receiver<IncomingMessage>>;
}

View File

@@ -1,11 +0,0 @@
//! ZCLAW Channels
//!
//! External platform adapters for unified message handling.
mod channel;
mod bridge;
mod adapters;
pub use channel::*;
pub use bridge::*;
pub use adapters::*;

View File

@@ -20,3 +20,6 @@ thiserror = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
async-trait = { workspace = true } async-trait = { workspace = true }
reqwest = { workspace = true } reqwest = { workspace = true }
hmac = "0.12"
sha1 = "0.10"
base64 = { workspace = true }

View File

@@ -233,17 +233,32 @@ impl SpeechHand {
state.playback = PlaybackState::Playing; state.playback = PlaybackState::Playing;
state.current_text = Some(text.clone()); state.current_text = Some(text.clone());
// In real implementation, would call TTS API // Determine TTS method based on provider:
// - Browser: frontend uses Web Speech API (zero deps, works offline)
// - OpenAI: frontend calls speech_tts command (high-quality, needs API key)
// - Others: future support
let tts_method = match state.config.provider {
TtsProvider::Browser => "browser",
TtsProvider::OpenAI => "openai_api",
TtsProvider::Azure => "azure_api",
TtsProvider::ElevenLabs => "elevenlabs_api",
TtsProvider::Local => "local_engine",
};
let estimated_duration_ms = (text.chars().count() as f64 / 5.0 * 1000.0) as u64;
Ok(HandResult::success(serde_json::json!({ Ok(HandResult::success(serde_json::json!({
"status": "speaking", "status": "speaking",
"tts_method": tts_method,
"text": text, "text": text,
"voice": voice_id, "voice": voice_id,
"language": lang, "language": lang,
"rate": actual_rate, "rate": actual_rate,
"pitch": actual_pitch, "pitch": actual_pitch,
"volume": actual_volume, "volume": actual_volume,
"provider": state.config.provider, "provider": format!("{:?}", state.config.provider).to_lowercase(),
"duration_ms": text.len() as u64 * 80, // Rough estimate "duration_ms": estimated_duration_ms,
"instruction": "Frontend should play this via TTS engine"
}))) })))
} }
SpeechAction::SpeakSsml { ssml, voice } => { SpeechAction::SpeakSsml { ssml, voice } => {

View File

@@ -289,117 +289,435 @@ impl TwitterHand {
c.clone() c.clone()
} }
/// Execute tweet action /// Execute tweet action — POST /2/tweets
async fn execute_tweet(&self, config: &TweetConfig) -> Result<Value> { async fn execute_tweet(&self, config: &TweetConfig) -> Result<Value> {
let _creds = self.get_credentials().await let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?; .ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
// Simulated tweet response (actual implementation would use Twitter API) let client = reqwest::Client::new();
// In production, this would call Twitter API v2: POST /2/tweets let body = json!({ "text": config.text });
let response = client.post("https://api.twitter.com/2/tweets")
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("Content-Type", "application/json")
.header("User-Agent", "ZCLAW/1.0")
.json(&body)
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Twitter API request failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to read response: {}", e)))?;
if !status.is_success() {
tracing::warn!("[TwitterHand] Tweet failed: {} - {}", status, response_text);
return Ok(json!({
"success": false,
"error": format!("Twitter API returned {}: {}", status, response_text),
"status_code": status.as_u16()
}));
}
// Parse the response to extract tweet_id
let parsed: Value = serde_json::from_str(&response_text).unwrap_or(json!({"raw": response_text}));
Ok(json!({ Ok(json!({
"success": true, "success": true,
"tweet_id": format!("simulated_{}", chrono::Utc::now().timestamp()), "tweet_id": parsed["data"]["id"].as_str().unwrap_or("unknown"),
"text": config.text, "text": config.text,
"created_at": chrono::Utc::now().to_rfc3339(), "raw_response": parsed,
"message": "Tweet posted successfully (simulated)", "message": "Tweet posted successfully"
"note": "Connect Twitter API credentials for actual posting"
})) }))
} }
/// Execute search action /// Execute search action — GET /2/tweets/search/recent
async fn execute_search(&self, config: &SearchConfig) -> Result<Value> { async fn execute_search(&self, config: &SearchConfig) -> Result<Value> {
let _creds = self.get_credentials().await let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?; .ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
// Simulated search response let client = reqwest::Client::new();
// In production, this would call Twitter API v2: GET /2/tweets/search/recent let max = config.max_results.max(10).min(100);
let response = client.get("https://api.twitter.com/2/tweets/search/recent")
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("User-Agent", "ZCLAW/1.0")
.query(&[
("query", config.query.as_str()),
("max_results", max.to_string().as_str()),
("tweet.fields", "created_at,author_id,public_metrics,lang"),
])
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Twitter search failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to read response: {}", e)))?;
if !status.is_success() {
return Ok(json!({
"success": false,
"error": format!("Twitter API returned {}: {}", status, response_text),
"status_code": status.as_u16()
}));
}
let parsed: Value = serde_json::from_str(&response_text).unwrap_or(json!({"raw": response_text}));
Ok(json!({ Ok(json!({
"success": true, "success": true,
"query": config.query, "query": config.query,
"tweets": [], "tweets": parsed["data"].as_array().cloned().unwrap_or_default(),
"meta": { "meta": parsed["meta"].clone(),
"result_count": 0, "message": "Search completed"
"newest_id": null,
"oldest_id": null,
"next_token": null
},
"message": "Search completed (simulated - no actual results without API)",
"note": "Connect Twitter API credentials for actual search results"
})) }))
} }
/// Execute timeline action /// Execute timeline action — GET /2/users/:id/timelines/reverse_chronological
async fn execute_timeline(&self, config: &TimelineConfig) -> Result<Value> { async fn execute_timeline(&self, config: &TimelineConfig) -> Result<Value> {
let _creds = self.get_credentials().await let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?; .ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
// Simulated timeline response let client = reqwest::Client::new();
let user_id = config.user_id.as_deref().unwrap_or("me");
let url = format!("https://api.twitter.com/2/users/{}/timelines/reverse_chronological", user_id);
let max = config.max_results.max(5).min(100);
let response = client.get(&url)
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("User-Agent", "ZCLAW/1.0")
.query(&[
("max_results", max.to_string().as_str()),
("tweet.fields", "created_at,author_id,public_metrics"),
])
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Timeline fetch failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to read response: {}", e)))?;
if !status.is_success() {
return Ok(json!({
"success": false,
"error": format!("Twitter API returned {}: {}", status, response_text),
"status_code": status.as_u16()
}));
}
let parsed: Value = serde_json::from_str(&response_text).unwrap_or(json!({"raw": response_text}));
Ok(json!({ Ok(json!({
"success": true, "success": true,
"user_id": config.user_id, "user_id": user_id,
"tweets": [], "tweets": parsed["data"].as_array().cloned().unwrap_or_default(),
"meta": { "meta": parsed["meta"].clone(),
"result_count": 0, "message": "Timeline fetched"
"newest_id": null,
"oldest_id": null,
"next_token": null
},
"message": "Timeline fetched (simulated)",
"note": "Connect Twitter API credentials for actual timeline"
})) }))
} }
/// Get tweet by ID /// Get tweet by ID — GET /2/tweets/:id
async fn execute_get_tweet(&self, tweet_id: &str) -> Result<Value> { async fn execute_get_tweet(&self, tweet_id: &str) -> Result<Value> {
let _creds = self.get_credentials().await let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?; .ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
let client = reqwest::Client::new();
let url = format!("https://api.twitter.com/2/tweets/{}", tweet_id);
let response = client.get(&url)
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("User-Agent", "ZCLAW/1.0")
.query(&[("tweet.fields", "created_at,author_id,public_metrics,lang")])
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Tweet lookup failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to read response: {}", e)))?;
if !status.is_success() {
return Ok(json!({
"success": false,
"error": format!("Twitter API returned {}: {}", status, response_text),
"status_code": status.as_u16()
}));
}
let parsed: Value = serde_json::from_str(&response_text).unwrap_or(json!({"raw": response_text}));
Ok(json!({ Ok(json!({
"success": true, "success": true,
"tweet_id": tweet_id, "tweet_id": tweet_id,
"tweet": null, "tweet": parsed["data"].clone(),
"message": "Tweet lookup (simulated)", "message": "Tweet fetched"
"note": "Connect Twitter API credentials for actual tweet data"
})) }))
} }
/// Get user by username /// Get user by username — GET /2/users/by/username/:username
async fn execute_get_user(&self, username: &str) -> Result<Value> { async fn execute_get_user(&self, username: &str) -> Result<Value> {
let _creds = self.get_credentials().await let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?; .ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
let client = reqwest::Client::new();
let url = format!("https://api.twitter.com/2/users/by/username/{}", username);
let response = client.get(&url)
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("User-Agent", "ZCLAW/1.0")
.query(&[("user.fields", "created_at,description,public_metrics,verified")])
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("User lookup failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to read response: {}", e)))?;
if !status.is_success() {
return Ok(json!({
"success": false,
"error": format!("Twitter API returned {}: {}", status, response_text),
"status_code": status.as_u16()
}));
}
let parsed: Value = serde_json::from_str(&response_text).unwrap_or(json!({"raw": response_text}));
Ok(json!({ Ok(json!({
"success": true, "success": true,
"username": username, "username": username,
"user": null, "user": parsed["data"].clone(),
"message": "User lookup (simulated)", "message": "User fetched"
"note": "Connect Twitter API credentials for actual user data"
})) }))
} }
/// Execute like action /// Execute like action — PUT /2/users/:id/likes
async fn execute_like(&self, tweet_id: &str) -> Result<Value> { async fn execute_like(&self, tweet_id: &str) -> Result<Value> {
let _creds = self.get_credentials().await let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?; .ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
let client = reqwest::Client::new();
// Note: For like/retweet, we need OAuth 1.0a user context
// Using Bearer token as fallback (may not work for all endpoints)
let url = "https://api.twitter.com/2/users/me/likes";
let response = client.post(url)
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("Content-Type", "application/json")
.header("User-Agent", "ZCLAW/1.0")
.json(&json!({"tweet_id": tweet_id}))
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Like failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await.unwrap_or_default();
Ok(json!({ Ok(json!({
"success": true, "success": status.is_success(),
"tweet_id": tweet_id, "tweet_id": tweet_id,
"action": "liked", "action": "liked",
"message": "Tweet liked (simulated)" "status_code": status.as_u16(),
"message": if status.is_success() { "Tweet liked" } else { &response_text }
})) }))
} }
/// Execute retweet action /// Execute retweet action — POST /2/users/:id/retweets
async fn execute_retweet(&self, tweet_id: &str) -> Result<Value> { async fn execute_retweet(&self, tweet_id: &str) -> Result<Value> {
let _creds = self.get_credentials().await let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?; .ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
let client = reqwest::Client::new();
let url = "https://api.twitter.com/2/users/me/retweets";
let response = client.post(url)
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("Content-Type", "application/json")
.header("User-Agent", "ZCLAW/1.0")
.json(&json!({"tweet_id": tweet_id}))
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Retweet failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await.unwrap_or_default();
Ok(json!({
"success": status.is_success(),
"tweet_id": tweet_id,
"action": "retweeted",
"status_code": status.as_u16(),
"message": if status.is_success() { "Tweet retweeted" } else { &response_text }
}))
}
/// Execute delete tweet — DELETE /2/tweets/:id
async fn execute_delete_tweet(&self, tweet_id: &str) -> Result<Value> {
let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
let client = reqwest::Client::new();
let url = format!("https://api.twitter.com/2/tweets/{}", tweet_id);
let response = client.delete(&url)
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("User-Agent", "ZCLAW/1.0")
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Delete tweet failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await.unwrap_or_default();
Ok(json!({
"success": status.is_success(),
"tweet_id": tweet_id,
"action": "deleted",
"status_code": status.as_u16(),
"message": if status.is_success() { "Tweet deleted" } else { &response_text }
}))
}
/// Execute unretweet — DELETE /2/users/:id/retweets/:tweet_id
async fn execute_unretweet(&self, tweet_id: &str) -> Result<Value> {
let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
let client = reqwest::Client::new();
let url = format!("https://api.twitter.com/2/users/me/retweets/{}", tweet_id);
let response = client.delete(&url)
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("User-Agent", "ZCLAW/1.0")
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Unretweet failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await.unwrap_or_default();
Ok(json!({
"success": status.is_success(),
"tweet_id": tweet_id,
"action": "unretweeted",
"status_code": status.as_u16(),
"message": if status.is_success() { "Tweet unretweeted" } else { &response_text }
}))
}
/// Execute unlike — DELETE /2/users/:id/likes/:tweet_id
async fn execute_unlike(&self, tweet_id: &str) -> Result<Value> {
let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
let client = reqwest::Client::new();
let url = format!("https://api.twitter.com/2/users/me/likes/{}", tweet_id);
let response = client.delete(&url)
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("User-Agent", "ZCLAW/1.0")
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Unlike failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await.unwrap_or_default();
Ok(json!({
"success": status.is_success(),
"tweet_id": tweet_id,
"action": "unliked",
"status_code": status.as_u16(),
"message": if status.is_success() { "Tweet unliked" } else { &response_text }
}))
}
/// Execute followers fetch — GET /2/users/:id/followers
async fn execute_followers(&self, user_id: &str, max_results: Option<u32>) -> Result<Value> {
let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
let client = reqwest::Client::new();
let url = format!("https://api.twitter.com/2/users/{}/followers", user_id);
let max = max_results.unwrap_or(100).max(1).min(1000);
let response = client.get(&url)
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("User-Agent", "ZCLAW/1.0")
.query(&[
("max_results", max.to_string()),
("user.fields", "created_at,description,public_metrics,verified,profile_image_url".to_string()),
])
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Followers fetch failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to read response: {}", e)))?;
if !status.is_success() {
return Ok(json!({
"success": false,
"error": format!("Twitter API returned {}: {}", status, response_text),
"status_code": status.as_u16()
}));
}
let parsed: Value = serde_json::from_str(&response_text).unwrap_or(json!({"raw": response_text}));
Ok(json!({ Ok(json!({
"success": true, "success": true,
"tweet_id": tweet_id, "user_id": user_id,
"action": "retweeted", "followers": parsed["data"].as_array().cloned().unwrap_or_default(),
"message": "Tweet retweeted (simulated)" "meta": parsed["meta"].clone(),
"message": "Followers fetched"
}))
}
/// Execute following fetch — GET /2/users/:id/following
async fn execute_following(&self, user_id: &str, max_results: Option<u32>) -> Result<Value> {
let creds = self.get_credentials().await
.ok_or_else(|| zclaw_types::ZclawError::HandError("Twitter credentials not configured".to_string()))?;
let client = reqwest::Client::new();
let url = format!("https://api.twitter.com/2/users/{}/following", user_id);
let max = max_results.unwrap_or(100).max(1).min(1000);
let response = client.get(&url)
.header("Authorization", format!("Bearer {}", creds.bearer_token.as_deref().unwrap_or("")))
.header("User-Agent", "ZCLAW/1.0")
.query(&[
("max_results", max.to_string()),
("user.fields", "created_at,description,public_metrics,verified,profile_image_url".to_string()),
])
.send()
.await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Following fetch failed: {}", e)))?;
let status = response.status();
let response_text = response.text().await
.map_err(|e| zclaw_types::ZclawError::HandError(format!("Failed to read response: {}", e)))?;
if !status.is_success() {
return Ok(json!({
"success": false,
"error": format!("Twitter API returned {}: {}", status, response_text),
"status_code": status.as_u16()
}));
}
let parsed: Value = serde_json::from_str(&response_text).unwrap_or(json!({"raw": response_text}));
Ok(json!({
"success": true,
"user_id": user_id,
"following": parsed["data"].as_array().cloned().unwrap_or_default(),
"meta": parsed["meta"].clone(),
"message": "Following fetched"
})) }))
} }
@@ -461,54 +779,17 @@ impl Hand for TwitterHand {
let result = match action { let result = match action {
TwitterAction::Tweet { config } => self.execute_tweet(&config).await?, TwitterAction::Tweet { config } => self.execute_tweet(&config).await?,
TwitterAction::DeleteTweet { tweet_id } => { TwitterAction::DeleteTweet { tweet_id } => self.execute_delete_tweet(&tweet_id).await?,
json!({
"success": true,
"tweet_id": tweet_id,
"action": "deleted",
"message": "Tweet deleted (simulated)"
})
}
TwitterAction::Retweet { tweet_id } => self.execute_retweet(&tweet_id).await?, TwitterAction::Retweet { tweet_id } => self.execute_retweet(&tweet_id).await?,
TwitterAction::Unretweet { tweet_id } => { TwitterAction::Unretweet { tweet_id } => self.execute_unretweet(&tweet_id).await?,
json!({
"success": true,
"tweet_id": tweet_id,
"action": "unretweeted",
"message": "Tweet unretweeted (simulated)"
})
}
TwitterAction::Like { tweet_id } => self.execute_like(&tweet_id).await?, TwitterAction::Like { tweet_id } => self.execute_like(&tweet_id).await?,
TwitterAction::Unlike { tweet_id } => { TwitterAction::Unlike { tweet_id } => self.execute_unlike(&tweet_id).await?,
json!({
"success": true,
"tweet_id": tweet_id,
"action": "unliked",
"message": "Tweet unliked (simulated)"
})
}
TwitterAction::Search { config } => self.execute_search(&config).await?, TwitterAction::Search { config } => self.execute_search(&config).await?,
TwitterAction::Timeline { config } => self.execute_timeline(&config).await?, TwitterAction::Timeline { config } => self.execute_timeline(&config).await?,
TwitterAction::GetTweet { tweet_id } => self.execute_get_tweet(&tweet_id).await?, TwitterAction::GetTweet { tweet_id } => self.execute_get_tweet(&tweet_id).await?,
TwitterAction::GetUser { username } => self.execute_get_user(&username).await?, TwitterAction::GetUser { username } => self.execute_get_user(&username).await?,
TwitterAction::Followers { user_id, max_results } => { TwitterAction::Followers { user_id, max_results } => self.execute_followers(&user_id, max_results).await?,
json!({ TwitterAction::Following { user_id, max_results } => self.execute_following(&user_id, max_results).await?,
"success": true,
"user_id": user_id,
"followers": [],
"max_results": max_results.unwrap_or(100),
"message": "Followers fetched (simulated)"
})
}
TwitterAction::Following { user_id, max_results } => {
json!({
"success": true,
"user_id": user_id,
"following": [],
"max_results": max_results.unwrap_or(100),
"message": "Following fetched (simulated)"
})
}
TwitterAction::CheckCredentials => self.execute_check_credentials().await?, TwitterAction::CheckCredentials => self.execute_check_credentials().await?,
}; };

View File

@@ -132,6 +132,8 @@ pub struct Kernel {
running_hand_runs: Arc<dashmap::DashMap<HandRunId, Arc<std::sync::atomic::AtomicBool>>>, running_hand_runs: Arc<dashmap::DashMap<HandRunId, Arc<std::sync::atomic::AtomicBool>>>,
/// Shared memory storage backend for Growth system /// Shared memory storage backend for Growth system
viking: Arc<zclaw_runtime::VikingAdapter>, viking: Arc<zclaw_runtime::VikingAdapter>,
/// Optional LLM driver for memory extraction (set by Tauri desktop layer)
extraction_driver: Option<Arc<dyn zclaw_runtime::LlmDriverForExtraction>>,
/// A2A router for inter-agent messaging (gated by multi-agent feature) /// A2A router for inter-agent messaging (gated by multi-agent feature)
#[cfg(feature = "multi-agent")] #[cfg(feature = "multi-agent")]
a2a_router: Arc<A2aRouter>, a2a_router: Arc<A2aRouter>,
@@ -223,6 +225,7 @@ impl Kernel {
pending_approvals: Arc::new(Mutex::new(Vec::new())), pending_approvals: Arc::new(Mutex::new(Vec::new())),
running_hand_runs: Arc::new(dashmap::DashMap::new()), running_hand_runs: Arc::new(dashmap::DashMap::new()),
viking, viking,
extraction_driver: None,
#[cfg(feature = "multi-agent")] #[cfg(feature = "multi-agent")]
a2a_router, a2a_router,
#[cfg(feature = "multi-agent")] #[cfg(feature = "multi-agent")]
@@ -246,13 +249,19 @@ impl Kernel {
let mut chain = zclaw_runtime::middleware::MiddlewareChain::new(); let mut chain = zclaw_runtime::middleware::MiddlewareChain::new();
// Growth integration — shared VikingAdapter for memory middleware & compaction // Growth integration — shared VikingAdapter for memory middleware & compaction
let growth = zclaw_runtime::GrowthIntegration::new(self.viking.clone()); let mut growth = zclaw_runtime::GrowthIntegration::new(self.viking.clone());
if let Some(ref driver) = self.extraction_driver {
growth = growth.with_llm_driver(driver.clone());
}
// Compaction middleware — only register when threshold > 0 // Compaction middleware — only register when threshold > 0
let threshold = self.config.compaction_threshold(); let threshold = self.config.compaction_threshold();
if threshold > 0 { if threshold > 0 {
use std::sync::Arc; use std::sync::Arc;
let growth_for_compaction = zclaw_runtime::GrowthIntegration::new(self.viking.clone()); let mut growth_for_compaction = zclaw_runtime::GrowthIntegration::new(self.viking.clone());
if let Some(ref driver) = self.extraction_driver {
growth_for_compaction = growth_for_compaction.with_llm_driver(driver.clone());
}
let mw = zclaw_runtime::middleware::compaction::CompactionMiddleware::new( let mw = zclaw_runtime::middleware::compaction::CompactionMiddleware::new(
threshold, threshold,
zclaw_runtime::CompactionConfig::default(), zclaw_runtime::CompactionConfig::default(),
@@ -657,6 +666,30 @@ impl Kernel {
self.driver.clone() self.driver.clone()
} }
/// Replace the default in-memory VikingAdapter with a persistent one.
///
/// Called by the Tauri desktop layer after `Kernel::boot()` to bridge
/// the kernel's Growth system to the same SqliteStorage used by
/// viking_commands and intelligence_hooks.
pub fn set_viking(&mut self, viking: Arc<zclaw_runtime::VikingAdapter>) {
tracing::info!("[Kernel] Replacing in-memory VikingAdapter with persistent storage");
self.viking = viking;
}
/// Get a reference to the shared VikingAdapter
pub fn viking(&self) -> Arc<zclaw_runtime::VikingAdapter> {
self.viking.clone()
}
/// Set the LLM extraction driver for the Growth system.
///
/// Required for `MemoryMiddleware` to extract memories from conversations
/// via LLM analysis. If not set, memory extraction is silently skipped.
pub fn set_extraction_driver(&mut self, driver: Arc<dyn zclaw_runtime::LlmDriverForExtraction>) {
tracing::info!("[Kernel] Extraction driver configured for Growth system");
self.extraction_driver = Some(driver);
}
/// Get the skills registry /// Get the skills registry
pub fn skills(&self) -> &Arc<SkillRegistry> { pub fn skills(&self) -> &Arc<SkillRegistry> {
&self.skills &self.skills

View File

@@ -61,6 +61,10 @@ pub struct PipelineMetadata {
/// Version string /// Version string
#[serde(default = "default_version")] #[serde(default = "default_version")]
pub version: String, pub version: String,
/// Arbitrary key-value annotations (e.g., is_template: true)
#[serde(default)]
pub annotations: Option<std::collections::HashMap<String, serde_json::Value>>,
} }
fn default_version() -> String { fn default_version() -> String {

View File

@@ -4,14 +4,11 @@
//! enabling automatic memory retrieval before conversations and memory extraction //! enabling automatic memory retrieval before conversations and memory extraction
//! after conversations. //! after conversations.
//! //!
//! **Note (2026-03-27 audit)**: In the Tauri desktop deployment, this module is //! **Note (2026-03-30)**: GrowthIntegration IS wired into the Kernel's middleware
//! NOT wired into the Kernel. The intelligence_hooks module in desktop/src-tauri //! chain (MemoryMiddleware + CompactionMiddleware). In the Tauri desktop deployment,
//! provides the same functionality (memory retrieval, heartbeat, reflection) via //! `kernel_commands::kernel_init()` bridges the persistent SqliteStorage to the Kernel
//! direct VikingStorage calls. GrowthIntegration remains available for future //! via `set_viking()` + `set_extraction_driver()`, so the middleware chain and the
//! use (e.g., headless/server deployments where intelligence_hooks is not available). //! Tauri intelligence_hooks share the same persistent storage backend.
//!
//! The `AgentLoop.growth` field defaults to `None` and the code gracefully falls
//! through to normal behavior when not set.
use std::sync::Arc; use std::sync::Arc;
use zclaw_growth::{ use zclaw_growth::{

View File

@@ -29,4 +29,5 @@ pub use stream::{StreamEvent, StreamSender};
pub use growth::GrowthIntegration; pub use growth::GrowthIntegration;
pub use zclaw_growth::VikingAdapter; pub use zclaw_growth::VikingAdapter;
pub use zclaw_growth::EmbeddingClient; pub use zclaw_growth::EmbeddingClient;
pub use zclaw_growth::LlmDriverForExtraction;
pub use compaction::{CompactionConfig, CompactionOutcome}; pub use compaction::{CompactionConfig, CompactionOutcome};

View File

@@ -66,10 +66,14 @@ async fn main() -> anyhow::Result<()> {
} }
async fn health_handler(State(state): State<AppState>) -> axum::Json<serde_json::Value> { async fn health_handler(State(state): State<AppState>) -> axum::Json<serde_json::Value> {
let db_healthy = sqlx::query_scalar::<_, i32>("SELECT 1") // health 必须独立快速返回,用 3s 超时避免连接池满时阻塞
.fetch_one(&state.db) let db_healthy = tokio::time::timeout(
std::time::Duration::from_secs(3),
sqlx::query_scalar::<_, i32>("SELECT 1").fetch_one(&state.db),
)
.await .await
.is_ok(); .map(|r| r.is_ok())
.unwrap_or(false);
let status = if db_healthy { "healthy" } else { "degraded" }; let status = if db_healthy { "healthy" } else { "degraded" };
let _code = if db_healthy { 200 } else { 503 }; let _code = if db_healthy { 200 } else { 503 };

View File

@@ -441,9 +441,9 @@ pub async fn get_usage_stats(
.and_hms_opt(0, 0, 0).unwrap() .and_hms_opt(0, 0, 0).unwrap()
.and_utc() .and_utc()
.to_rfc3339(); .to_rfc3339();
let daily_sql = "SELECT SUBSTRING(created_at, 1, 10) as day, COUNT(*)::bigint AS request_count, COALESCE(SUM(input_tokens), 0) AS input_tokens, COALESCE(SUM(output_tokens), 0) AS output_tokens let daily_sql = "SELECT created_at::date::text as day, COUNT(*)::bigint AS request_count, COALESCE(SUM(input_tokens), 0) AS input_tokens, COALESCE(SUM(output_tokens), 0) AS output_tokens
FROM usage_records WHERE account_id = $1 AND created_at >= $2 FROM usage_records WHERE account_id = $1 AND created_at >= $2
GROUP BY SUBSTRING(created_at, 1, 10) ORDER BY day DESC LIMIT $3"; GROUP BY created_at::date ORDER BY day DESC LIMIT $3";
let daily_rows: Vec<UsageByDayRow> = sqlx::query_as(daily_sql) let daily_rows: Vec<UsageByDayRow> = sqlx::query_as(daily_sql)
.bind(account_id).bind(&from_days).bind(days as i32) .bind(account_id).bind(&from_days).bind(days as i32)
.fetch_all(db).await?; .fetch_all(db).await?;

View File

@@ -142,6 +142,13 @@ pub async fn chat_completions(
let target_model = target_model let target_model = target_model
.ok_or_else(|| SaasError::NotFound(format!("模型 {} 不存在或未启用", model_name)))?; .ok_or_else(|| SaasError::NotFound(format!("模型 {} 不存在或未启用", model_name)))?;
// Stream compatibility check: reject stream requests for non-streaming models
if stream && !target_model.supports_streaming {
return Err(SaasError::InvalidInput(
format!("模型 {} 不支持流式响应,请使用 stream: false", model_name)
));
}
// 获取 provider 信息 // 获取 provider 信息
let provider = model_service::get_provider(&state.db, &target_model.provider_id).await?; let provider = model_service::get_provider(&state.db, &target_model.provider_id).await?;
if !provider.enabled { if !provider.enabled {
@@ -385,6 +392,12 @@ pub async fn add_provider_key(
if req.key_value.trim().is_empty() { if req.key_value.trim().is_empty() {
return Err(SaasError::InvalidInput("key_value 不能为空".into())); return Err(SaasError::InvalidInput("key_value 不能为空".into()));
} }
if req.key_value.len() < 20 {
return Err(SaasError::InvalidInput("key_value 长度不足(至少 20 字符)".into()));
}
if req.key_value.contains(char::is_whitespace) {
return Err(SaasError::InvalidInput("key_value 不能包含空白字符".into()));
}
let key_id = super::key_pool::add_provider_key( let key_id = super::key_pool::add_provider_key(
&state.db, &provider_id, &req.key_label, &req.key_value, &state.db, &provider_id, &req.key_label, &req.key_value,

View File

@@ -240,7 +240,7 @@ pub async fn get_daily_stats(
.to_rfc3339(); .to_rfc3339();
let sql = "SELECT let sql = "SELECT
SUBSTRING(reported_at, 1, 10) as day, reported_at::date::text as day,
COUNT(*)::bigint as request_count, COUNT(*)::bigint as request_count,
COALESCE(SUM(input_tokens), 0)::bigint as input_tokens, COALESCE(SUM(input_tokens), 0)::bigint as input_tokens,
COALESCE(SUM(output_tokens), 0)::bigint as output_tokens, COALESCE(SUM(output_tokens), 0)::bigint as output_tokens,
@@ -248,7 +248,7 @@ pub async fn get_daily_stats(
FROM telemetry_reports FROM telemetry_reports
WHERE account_id = $1 WHERE account_id = $1
AND reported_at >= $2 AND reported_at >= $2
GROUP BY SUBSTRING(reported_at, 1, 10) GROUP BY reported_at::date
ORDER BY day DESC"; ORDER BY day DESC";
let rows: Vec<TelemetryDailyStatsRow> = let rows: Vec<TelemetryDailyStatsRow> =

View File

@@ -68,6 +68,7 @@
"@types/react-window": "^2.0.0", "@types/react-window": "^2.0.0",
"@types/uuid": "^10.0.0", "@types/uuid": "^10.0.0",
"@vitejs/plugin-react": "^4.7.0", "@vitejs/plugin-react": "^4.7.0",
"@vitejs/plugin-react-oxc": "^0.4.3",
"@vitest/coverage-v8": "2.1.9", "@vitest/coverage-v8": "2.1.9",
"autoprefixer": "^10.4.27", "autoprefixer": "^10.4.27",
"eslint": "^10.1.0", "eslint": "^10.1.0",

19
desktop/pnpm-lock.yaml generated
View File

@@ -99,6 +99,9 @@ importers:
'@vitejs/plugin-react': '@vitejs/plugin-react':
specifier: ^4.7.0 specifier: ^4.7.0
version: 4.7.0(vite@8.0.3(esbuild@0.27.4)(jiti@2.6.1)) version: 4.7.0(vite@8.0.3(esbuild@0.27.4)(jiti@2.6.1))
'@vitejs/plugin-react-oxc':
specifier: ^0.4.3
version: 0.4.3(vite@8.0.3(esbuild@0.27.4)(jiti@2.6.1))
'@vitest/coverage-v8': '@vitest/coverage-v8':
specifier: 2.1.9 specifier: 2.1.9
version: 2.1.9(vitest@2.1.9(jsdom@25.0.1)(lightningcss@1.32.0)) version: 2.1.9(vitest@2.1.9(jsdom@25.0.1)(lightningcss@1.32.0))
@@ -787,6 +790,9 @@ packages:
'@rolldown/pluginutils@1.0.0-beta.27': '@rolldown/pluginutils@1.0.0-beta.27':
resolution: {integrity: sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==} resolution: {integrity: sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==}
'@rolldown/pluginutils@1.0.0-beta.47':
resolution: {integrity: sha512-8QagwMH3kNCuzD8EWL8R2YPW5e4OrHNSAHRFDdmFqEwEaD/KcNKjVoumo+gP2vW5eKB2UPbM6vTYiGZX0ixLnw==}
'@rolldown/pluginutils@1.0.0-rc.12': '@rolldown/pluginutils@1.0.0-rc.12':
resolution: {integrity: sha512-HHMwmarRKvoFsJorqYlFeFRzXZqCt2ETQlEDOb9aqssrnVBB1/+xgTGtuTrIk5vzLNX1MjMtTf7W9z3tsSbrxw==} resolution: {integrity: sha512-HHMwmarRKvoFsJorqYlFeFRzXZqCt2ETQlEDOb9aqssrnVBB1/+xgTGtuTrIk5vzLNX1MjMtTf7W9z3tsSbrxw==}
@@ -1303,6 +1309,12 @@ packages:
'@ungap/structured-clone@1.3.0': '@ungap/structured-clone@1.3.0':
resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==}
'@vitejs/plugin-react-oxc@0.4.3':
resolution: {integrity: sha512-eJv6hHOIOVXzA4b2lZwccu/7VNmk9372fGOqsx5tNxiJHLtFBokyCTQUhlgjjXxl7guLPauHp0TqGTVyn1HvQA==}
engines: {node: ^20.19.0 || >=22.12.0}
peerDependencies:
vite: ^6.3.0 || ^7.0.0
'@vitejs/plugin-react@4.7.0': '@vitejs/plugin-react@4.7.0':
resolution: {integrity: sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==} resolution: {integrity: sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==}
engines: {node: ^14.18.0 || >=16.0.0} engines: {node: ^14.18.0 || >=16.0.0}
@@ -3880,6 +3892,8 @@ snapshots:
'@rolldown/pluginutils@1.0.0-beta.27': {} '@rolldown/pluginutils@1.0.0-beta.27': {}
'@rolldown/pluginutils@1.0.0-beta.47': {}
'@rolldown/pluginutils@1.0.0-rc.12': {} '@rolldown/pluginutils@1.0.0-rc.12': {}
'@rollup/rollup-android-arm-eabi@4.60.0': '@rollup/rollup-android-arm-eabi@4.60.0':
@@ -4322,6 +4336,11 @@ snapshots:
'@ungap/structured-clone@1.3.0': {} '@ungap/structured-clone@1.3.0': {}
'@vitejs/plugin-react-oxc@0.4.3(vite@8.0.3(esbuild@0.27.4)(jiti@2.6.1))':
dependencies:
'@rolldown/pluginutils': 1.0.0-beta.47
vite: 8.0.3(esbuild@0.27.4)(jiti@2.6.1)
'@vitejs/plugin-react@4.7.0(vite@8.0.3(esbuild@0.27.4)(jiti@2.6.1))': '@vitejs/plugin-react@4.7.0(vite@8.0.3(esbuild@0.27.4)(jiti@2.6.1))':
dependencies: dependencies:
'@babel/core': 7.29.0 '@babel/core': 7.29.0

View File

@@ -17,6 +17,9 @@ tauri-build = { version = "2", features = [] }
[features] [features]
default = [] default = []
# Multi-agent orchestration (A2A protocol, Director, agent delegation)
# Disabled by default — enable when multi-agent UI is ready.
multi-agent = ["zclaw-kernel/multi-agent"]
dev-server = ["dep:axum", "dep:tower-http"] dev-server = ["dep:axum", "dep:tower-http"]
[dependencies] [dependencies]
@@ -24,7 +27,7 @@ dev-server = ["dep:axum", "dep:tower-http"]
zclaw-types = { workspace = true } zclaw-types = { workspace = true }
zclaw-memory = { workspace = true } zclaw-memory = { workspace = true }
zclaw-runtime = { workspace = true } zclaw-runtime = { workspace = true }
zclaw-kernel = { workspace = true, features = ["multi-agent"] } zclaw-kernel = { workspace = true }
zclaw-skills = { workspace = true } zclaw-skills = { workspace = true }
zclaw-hands = { workspace = true } zclaw-hands = { workspace = true }
zclaw-pipeline = { workspace = true } zclaw-pipeline = { workspace = true }

View File

@@ -246,6 +246,7 @@ pub fn is_extraction_driver_configured() -> bool {
/// Get the global extraction driver. /// Get the global extraction driver.
/// ///
/// Returns `None` if not yet configured via `configure_extraction_driver`. /// Returns `None` if not yet configured via `configure_extraction_driver`.
#[allow(dead_code)]
pub fn get_extraction_driver() -> Option<Arc<TauriExtractionDriver>> { pub fn get_extraction_driver() -> Option<Arc<TauriExtractionDriver>> {
EXTRACTION_DRIVER.get().cloned() EXTRACTION_DRIVER.get().cloned()
} }

View File

@@ -100,12 +100,12 @@ pub type HeartbeatCheckFn = Box<dyn Fn(String) -> std::pin::Pin<Box<dyn std::fut
impl Default for HeartbeatConfig { impl Default for HeartbeatConfig {
fn default() -> Self { fn default() -> Self {
Self { Self {
enabled: false, enabled: true,
interval_minutes: 30, interval_minutes: 30,
quiet_hours_start: Some("22:00".to_string()), quiet_hours_start: Some("22:00".to_string()),
quiet_hours_end: Some("08:00".to_string()), quiet_hours_end: Some("08:00".to_string()),
notify_channel: NotifyChannel::Ui, notify_channel: NotifyChannel::Ui,
proactivity_level: ProactivityLevel::Light, proactivity_level: ProactivityLevel::Standard,
max_alerts_per_tick: 5, max_alerts_per_tick: 5,
} }
} }

View File

@@ -57,6 +57,52 @@ impl fmt::Display for ValidationError {
impl std::error::Error for ValidationError {} impl std::error::Error for ValidationError {}
/// Validate a UUID string (for agent_id, session_id, etc.)
///
/// Provides a clear error message when the UUID format is invalid,
/// instead of a generic "invalid characters" error from `validate_identifier`.
pub fn validate_uuid(value: &str, field_name: &str) -> Result<(), ValidationError> {
let len = value.len();
if len == 0 {
return Err(ValidationError::RequiredFieldEmpty {
field: field_name.to_string(),
});
}
// UUID format: 8-4-4-4-12 hex digits with hyphens (36 chars total)
if len != 36 {
return Err(ValidationError::InvalidCharacters {
field: field_name.to_string(),
invalid_chars: format!("expected UUID format (36 chars), got {} chars", len),
});
}
// Quick structure check: positions 8,13,18,23 should be '-'
let bytes = value.as_bytes();
if bytes[8] != b'-' || bytes[13] != b'-' || bytes[18] != b'-' || bytes[23] != b'-' {
return Err(ValidationError::InvalidCharacters {
field: field_name.to_string(),
invalid_chars: "not a valid UUID (expected format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx)".into(),
});
}
// Check all non-hyphen positions are hex digits
for (i, &b) in bytes.iter().enumerate() {
if i == 8 || i == 13 || i == 18 || i == 23 {
continue;
}
if !b.is_ascii_hexdigit() {
return Err(ValidationError::InvalidCharacters {
field: field_name.to_string(),
invalid_chars: format!("'{}' at position {} is not a hex digit", b as char, i),
});
}
}
Ok(())
}
/// Validate an identifier (agent_id, pipeline_id, skill_id, etc.) /// Validate an identifier (agent_id, pipeline_id, skill_id, etc.)
/// ///
/// # Rules /// # Rules

View File

@@ -25,6 +25,11 @@ pub type SessionStreamGuard = Arc<dashmap::DashMap<String, Arc<Mutex<()>>>>;
fn validate_agent_id(agent_id: &str) -> Result<String, String> { fn validate_agent_id(agent_id: &str) -> Result<String, String> {
validate_identifier(agent_id, "agent_id") validate_identifier(agent_id, "agent_id")
.map_err(|e| format!("Invalid agent_id: {}", e))?; .map_err(|e| format!("Invalid agent_id: {}", e))?;
// AgentId is a UUID wrapper — validate UUID format for better error messages
if agent_id.contains('-') {
crate::intelligence::validation::validate_uuid(agent_id, "agent_id")
.map_err(|e| format!("Invalid agent_id: {}", e))?;
}
Ok(agent_id.to_string()) Ok(agent_id.to_string())
} }
@@ -209,7 +214,7 @@ pub async fn kernel_init(
let model = config.llm.model.clone(); let model = config.llm.model.clone();
// Boot kernel // Boot kernel
let kernel = Kernel::boot(config.clone()) let mut kernel = Kernel::boot(config.clone())
.await .await
.map_err(|e| format!("Failed to initialize kernel: {}", e))?; .map_err(|e| format!("Failed to initialize kernel: {}", e))?;
@@ -222,6 +227,33 @@ pub async fn kernel_init(
model.clone(), model.clone(),
); );
// Bridge SqliteStorage to Kernel's GrowthIntegration
// This connects the middleware chain (MemoryMiddleware, CompactionMiddleware)
// to the same persistent SqliteStorage used by viking_commands and intelligence_hooks.
{
match crate::viking_commands::get_storage().await {
Ok(sqlite_storage) => {
// Wrap SqliteStorage in VikingAdapter (SqliteStorage implements VikingStorage)
let viking = std::sync::Arc::new(zclaw_runtime::VikingAdapter::new(sqlite_storage));
kernel.set_viking(viking);
tracing::info!("[kernel_init] Bridged persistent SqliteStorage to Kernel GrowthIntegration");
}
Err(e) => {
tracing::warn!(
"[kernel_init] Failed to get SqliteStorage, GrowthIntegration will use in-memory storage: {}",
e
);
}
}
// Set the LLM extraction driver on the kernel for memory extraction via middleware
let extraction_driver = crate::intelligence::extraction_adapter::TauriExtractionDriver::new(
driver.clone(),
model.clone(),
);
kernel.set_extraction_driver(std::sync::Arc::new(extraction_driver));
}
// Configure summary driver so the Growth system can generate L0/L1 summaries // Configure summary driver so the Growth system can generate L0/L1 summaries
if let Some(api_key) = config_request.as_ref().and_then(|r| r.api_key.clone()) { if let Some(api_key) = config_request.as_ref().and_then(|r| r.api_key.clone()) {
crate::summarizer_adapter::configure_summary_driver( crate::summarizer_adapter::configure_summary_driver(
@@ -530,6 +562,21 @@ pub async fn agent_chat_stream(
format!("Session {} already has an active stream", session_id) format!("Session {} already has an active stream", session_id)
})?; })?;
// AUTO-INIT HEARTBEAT: Ensure heartbeat engine exists for this agent.
// Uses default config (enabled: true, 30min interval) so heartbeat runs
// automatically from the first conversation without manual setup.
{
let mut engines = heartbeat_state.lock().await;
if !engines.contains_key(&request.agent_id) {
let engine = crate::intelligence::heartbeat::HeartbeatEngine::new(
request.agent_id.clone(),
None, // Use default config (enabled: true)
);
engines.insert(request.agent_id.clone(), engine);
tracing::info!("[agent_chat_stream] Auto-initialized heartbeat for agent: {}", request.agent_id);
}
}
// PRE-CONVERSATION: Build intelligence-enhanced system prompt // PRE-CONVERSATION: Build intelligence-enhanced system prompt
let enhanced_prompt = crate::intelligence_hooks::pre_conversation_hook( let enhanced_prompt = crate::intelligence_hooks::pre_conversation_hook(
&request.agent_id, &request.agent_id,
@@ -550,15 +597,22 @@ pub async fn agent_chat_stream(
// Use intelligence-enhanced system prompt if available // Use intelligence-enhanced system prompt if available
let prompt_arg = if enhanced_prompt.is_empty() { None } else { Some(enhanced_prompt) }; let prompt_arg = if enhanced_prompt.is_empty() { None } else { Some(enhanced_prompt) };
// Parse session_id for session reuse (carry conversation history across turns) // Parse session_id for session reuse (carry conversation history across turns)
let session_id_parsed = std::str::FromStr::from_str(&session_id) // Empty session_id means first message in a new conversation — that's valid.
.ok() // Non-empty session_id MUST be a valid UUID; if not, return error instead of
.map(|uuid| zclaw_types::SessionId::from_uuid(uuid)); // silently losing context by creating a new session.
if session_id_parsed.is_none() { let session_id_parsed = if session_id.is_empty() {
tracing::warn!( None
"session_id '{}' is not a valid UUID, will create a new session (context will be lost)", } else {
session_id match uuid::Uuid::parse_str(&session_id) {
); Ok(uuid) => Some(zclaw_types::SessionId::from_uuid(uuid)),
Err(e) => {
return Err(format!(
"Invalid session_id '{}': {}. Cannot reuse conversation context.",
session_id, e
));
} }
}
};
let rx = kernel.send_message_stream_with_prompt(&id, message.clone(), prompt_arg, session_id_parsed) let rx = kernel.send_message_stream_with_prompt(&id, message.clone(), prompt_arg, session_id_parsed)
.await .await
.map_err(|e| format!("Failed to start streaming: {}", e))?; .map_err(|e| format!("Failed to start streaming: {}", e))?;
@@ -1775,9 +1829,10 @@ pub async fn scheduled_task_list(
} }
// ============================================================ // ============================================================
// A2A (Agent-to-Agent) Commands // A2A (Agent-to-Agent) Commands — gated behind multi-agent feature
// ============================================================ // ============================================================
#[cfg(feature = "multi-agent")]
/// Send a direct A2A message from one agent to another /// Send a direct A2A message from one agent to another
#[tauri::command] #[tauri::command]
pub async fn agent_a2a_send( pub async fn agent_a2a_send(
@@ -1810,6 +1865,7 @@ pub async fn agent_a2a_send(
} }
/// Broadcast a message from one agent to all other agents /// Broadcast a message from one agent to all other agents
#[cfg(feature = "multi-agent")]
#[tauri::command] #[tauri::command]
pub async fn agent_a2a_broadcast( pub async fn agent_a2a_broadcast(
state: State<'_, KernelState>, state: State<'_, KernelState>,
@@ -1830,6 +1886,7 @@ pub async fn agent_a2a_broadcast(
} }
/// Discover agents with a specific capability /// Discover agents with a specific capability
#[cfg(feature = "multi-agent")]
#[tauri::command] #[tauri::command]
pub async fn agent_a2a_discover( pub async fn agent_a2a_discover(
state: State<'_, KernelState>, state: State<'_, KernelState>,
@@ -1850,6 +1907,7 @@ pub async fn agent_a2a_discover(
} }
/// Delegate a task to another agent and wait for response /// Delegate a task to another agent and wait for response
#[cfg(feature = "multi-agent")]
#[tauri::command] #[tauri::command]
pub async fn agent_a2a_delegate_task( pub async fn agent_a2a_delegate_task(
state: State<'_, KernelState>, state: State<'_, KernelState>,

View File

@@ -1352,15 +1352,19 @@ pub fn run() {
kernel_commands::scheduled_task_create, kernel_commands::scheduled_task_create,
kernel_commands::scheduled_task_list, kernel_commands::scheduled_task_list,
// A2A commands (Agent-to-Agent messaging) // A2A commands gated behind multi-agent feature
#[cfg(feature = "multi-agent")]
kernel_commands::agent_a2a_send, kernel_commands::agent_a2a_send,
#[cfg(feature = "multi-agent")]
kernel_commands::agent_a2a_broadcast, kernel_commands::agent_a2a_broadcast,
#[cfg(feature = "multi-agent")]
kernel_commands::agent_a2a_discover, kernel_commands::agent_a2a_discover,
#[cfg(feature = "multi-agent")]
kernel_commands::agent_a2a_delegate_task, kernel_commands::agent_a2a_delegate_task,
// Pipeline commands (DSL-based workflows) // Pipeline commands (DSL-based workflows)
pipeline_commands::pipeline_list, pipeline_commands::pipeline_list,
pipeline_commands::pipeline_get, pipeline_commands::pipeline_templates, pipeline_commands::pipeline_get,
pipeline_commands::pipeline_run, pipeline_commands::pipeline_run,
pipeline_commands::pipeline_progress, pipeline_commands::pipeline_progress,
pipeline_commands::pipeline_cancel, pipeline_commands::pipeline_cancel,

View File

@@ -681,9 +681,8 @@ fn scan_pipelines_with_paths(
tracing::debug!("[scan] File content length: {} bytes", content.len()); tracing::debug!("[scan] File content length: {} bytes", content.len());
match parse_pipeline_yaml(&content) { match parse_pipeline_yaml(&content) {
Ok(pipeline) => { Ok(pipeline) => {
// Debug: log parsed pipeline metadata tracing::debug!(
println!( "[scan] Parsed YAML: {} -> category: {:?}, industry: {:?}",
"[DEBUG scan] Parsed YAML: {} -> category: {:?}, industry: {:?}",
pipeline.metadata.name, pipeline.metadata.name,
pipeline.metadata.category, pipeline.metadata.category,
pipeline.metadata.industry pipeline.metadata.industry
@@ -744,8 +743,8 @@ fn scan_pipelines_full_sync(
fn pipeline_to_info(pipeline: &Pipeline) -> PipelineInfo { fn pipeline_to_info(pipeline: &Pipeline) -> PipelineInfo {
let industry = pipeline.metadata.industry.clone().unwrap_or_default(); let industry = pipeline.metadata.industry.clone().unwrap_or_default();
println!( tracing::debug!(
"[DEBUG pipeline_to_info] Pipeline: {}, category: {:?}, industry: {:?}", "[pipeline_to_info] Pipeline: {}, category: {:?}, industry: {:?}",
pipeline.metadata.name, pipeline.metadata.name,
pipeline.metadata.category, pipeline.metadata.category,
pipeline.metadata.industry pipeline.metadata.industry
@@ -1040,17 +1039,31 @@ fn create_llm_driver_from_config() -> Option<Arc<dyn LlmActionDriver>> {
// Convert api_key to SecretString // Convert api_key to SecretString
let secret_key = SecretString::new(api_key); let secret_key = SecretString::new(api_key);
// Create the runtime driver // Create the runtime driver — use with_base_url when a custom endpoint is configured
// (essential for Chinese providers like doubao, qwen, deepseek, kimi)
let runtime_driver: Arc<dyn zclaw_runtime::LlmDriver> = match provider.as_str() { let runtime_driver: Arc<dyn zclaw_runtime::LlmDriver> = match provider.as_str() {
"anthropic" => { "anthropic" => {
if let Some(url) = base_url {
Arc::new(zclaw_runtime::AnthropicDriver::with_base_url(secret_key, url))
} else {
Arc::new(zclaw_runtime::AnthropicDriver::new(secret_key)) Arc::new(zclaw_runtime::AnthropicDriver::new(secret_key))
} }
"openai" | "doubao" | "qwen" | "deepseek" | "kimi" => { }
"openai" | "doubao" | "qwen" | "deepseek" | "kimi" | "zhipu" => {
// Chinese providers typically need a custom base_url
if let Some(url) = base_url {
Arc::new(zclaw_runtime::OpenAiDriver::with_base_url(secret_key, url))
} else {
Arc::new(zclaw_runtime::OpenAiDriver::new(secret_key)) Arc::new(zclaw_runtime::OpenAiDriver::new(secret_key))
} }
}
"gemini" => { "gemini" => {
if let Some(url) = base_url {
Arc::new(zclaw_runtime::GeminiDriver::with_base_url(secret_key, url))
} else {
Arc::new(zclaw_runtime::GeminiDriver::new(secret_key)) Arc::new(zclaw_runtime::GeminiDriver::new(secret_key))
} }
}
"local" | "ollama" => { "local" | "ollama" => {
let url = base_url.unwrap_or_else(|| "http://localhost:11434".to_string()); let url = base_url.unwrap_or_else(|| "http://localhost:11434".to_string());
Arc::new(zclaw_runtime::LocalDriver::new(&url)) Arc::new(zclaw_runtime::LocalDriver::new(&url))
@@ -1077,3 +1090,83 @@ pub async fn analyze_presentation(
// Convert analysis to JSON // Convert analysis to JSON
serde_json::to_value(&analysis).map_err(|e| e.to_string()) serde_json::to_value(&analysis).map_err(|e| e.to_string())
} }
/// Pipeline template metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PipelineTemplateInfo {
pub id: String,
pub display_name: String,
pub description: String,
pub category: String,
pub industry: String,
pub tags: Vec<String>,
pub icon: String,
pub version: String,
pub author: String,
pub inputs: Vec<PipelineInputInfo>,
}
/// List available pipeline templates from the `_templates/` directory.
///
/// Templates are pipeline YAML files that users can browse and instantiate.
/// They live in `pipelines/_templates/` and are not directly runnable
/// (they serve as blueprints).
#[tauri::command]
pub async fn pipeline_templates(
state: State<'_, Arc<PipelineState>>,
) -> Result<Vec<PipelineTemplateInfo>, String> {
let pipelines = state.pipelines.read().await;
// Filter pipelines that have `is_template: true` in metadata
// or are in the _templates directory
let templates: Vec<PipelineTemplateInfo> = pipelines.iter()
.filter_map(|(id, pipeline)| {
// Check if this pipeline has template metadata
let is_template = pipeline.metadata.annotations
.as_ref()
.and_then(|a| a.get("is_template"))
.and_then(|v| v.as_bool())
.unwrap_or(false);
if !is_template {
return None;
}
Some(PipelineTemplateInfo {
id: pipeline.metadata.name.clone(),
display_name: pipeline.metadata.display_name.clone()
.unwrap_or_else(|| pipeline.metadata.name.clone()),
description: pipeline.metadata.description.clone().unwrap_or_default(),
category: pipeline.metadata.category.clone().unwrap_or_default(),
industry: pipeline.metadata.industry.clone().unwrap_or_default(),
tags: pipeline.metadata.tags.clone(),
icon: pipeline.metadata.icon.clone().unwrap_or_else(|| "📦".to_string()),
version: pipeline.metadata.version.clone(),
author: pipeline.metadata.author.clone().unwrap_or_default(),
inputs: pipeline.spec.inputs.iter().map(|input| {
PipelineInputInfo {
name: input.name.clone(),
input_type: match input.input_type {
zclaw_pipeline::InputType::String => "string".to_string(),
zclaw_pipeline::InputType::Number => "number".to_string(),
zclaw_pipeline::InputType::Boolean => "boolean".to_string(),
zclaw_pipeline::InputType::Select => "select".to_string(),
zclaw_pipeline::InputType::MultiSelect => "multi-select".to_string(),
zclaw_pipeline::InputType::File => "file".to_string(),
zclaw_pipeline::InputType::Text => "text".to_string(),
},
required: input.required,
label: input.label.clone().unwrap_or_else(|| input.name.clone()),
placeholder: input.placeholder.clone(),
default: input.default.clone(),
options: input.options.clone(),
}
}).collect(),
})
})
.collect();
tracing::debug!("[pipeline_templates] Found {} templates", templates.len());
Ok(templates)
}

View File

@@ -35,7 +35,7 @@ import {
// === Default Config === // === Default Config ===
const DEFAULT_HEARTBEAT_CONFIG: HeartbeatConfigType = { const DEFAULT_HEARTBEAT_CONFIG: HeartbeatConfigType = {
enabled: false, enabled: true,
interval_minutes: 30, interval_minutes: 30,
quiet_hours_start: null, quiet_hours_start: null,
quiet_hours_end: null, quiet_hours_end: null,

View File

@@ -12,6 +12,7 @@ import { useHandStore } from '../store/handStore';
import { useWorkflowStore } from '../store/workflowStore'; import { useWorkflowStore } from '../store/workflowStore';
import { useChatStore } from '../store/chatStore'; import { useChatStore } from '../store/chatStore';
import type { GatewayClient } from '../lib/gateway-client'; import type { GatewayClient } from '../lib/gateway-client';
import { speechSynth } from '../lib/speech-synth';
// === Event Types === // === Event Types ===
@@ -161,6 +162,23 @@ export function useAutomationEvents(
handResult: eventData.hand_result, handResult: eventData.hand_result,
runId: eventData.run_id, runId: eventData.run_id,
}); });
// Trigger browser TTS for SpeechHand results
if (eventData.hand_name === 'speech' && eventData.hand_result && typeof eventData.hand_result === 'object') {
const res = eventData.hand_result as Record<string, unknown>;
if (res.tts_method === 'browser' && typeof res.text === 'string' && res.text) {
speechSynth.speak({
text: res.text,
voice: typeof res.voice === 'string' ? res.voice : undefined,
language: typeof res.language === 'string' ? res.language : undefined,
rate: typeof res.rate === 'number' ? res.rate : undefined,
pitch: typeof res.pitch === 'number' ? res.pitch : undefined,
volume: typeof res.volume === 'number' ? res.volume : undefined,
}).catch((err: unknown) => {
console.warn('[useAutomationEvents] Browser TTS failed:', err);
});
}
}
} }
// Handle error status // Handle error status

View File

@@ -920,6 +920,12 @@ export class SaaSClient {
return this.request('GET', '/api/v1/config/pull' + qs); return this.request('GET', '/api/v1/config/pull' + qs);
} }
// ==========================================================================
// Admin Panel API — Reserved for future admin UI (Next.js admin dashboard)
// These methods are not called by the desktop app but are kept as thin API
// wrappers for when the admin panel is built.
// ==========================================================================
// --- Provider Management (Admin) --- // --- Provider Management (Admin) ---
/** List all providers */ /** List all providers */

View File

@@ -0,0 +1,195 @@
/**
* Speech Synthesis Service — Browser TTS via Web Speech API
*
* Provides text-to-speech playback using the browser's native SpeechSynthesis API.
* Zero external dependencies, works offline, supports Chinese and English voices.
*
* Architecture:
* - SpeechHand (Rust) returns tts_method + text + voice config
* - This service handles Browser TTS playback in the webview
* - OpenAI/Azure TTS is handled via backend API calls
*/
export interface SpeechSynthOptions {
text: string;
voice?: string;
language?: string;
rate?: number;
pitch?: number;
volume?: number;
}
export interface SpeechSynthState {
playing: boolean;
paused: boolean;
currentText: string | null;
voices: SpeechSynthesisVoice[];
}
type SpeechEventCallback = (state: SpeechSynthState) => void;
class SpeechSynthService {
private synth: SpeechSynthesis | null = null;
private currentUtterance: SpeechSynthesisUtterance | null = null;
private listeners: Set<SpeechEventCallback> = new Set();
private cachedVoices: SpeechSynthesisVoice[] = [];
constructor() {
if (typeof window !== 'undefined' && window.speechSynthesis) {
this.synth = window.speechSynthesis;
this.loadVoices();
// Voices may load asynchronously
this.synth.onvoiceschanged = () => this.loadVoices();
}
}
private loadVoices() {
if (!this.synth) return;
this.cachedVoices = this.synth.getVoices();
this.notify();
}
private notify() {
const state = this.getState();
this.listeners.forEach(cb => cb(state));
}
/** Subscribe to state changes */
subscribe(callback: SpeechEventCallback): () => void {
this.listeners.add(callback);
return () => this.listeners.delete(callback);
}
/** Get current state */
getState(): SpeechSynthState {
return {
playing: this.synth?.speaking ?? false,
paused: this.synth?.paused ?? false,
currentText: this.currentUtterance?.text ?? null,
voices: this.cachedVoices,
};
}
/** Check if TTS is available */
isAvailable(): boolean {
return this.synth != null;
}
/** Get available voices, optionally filtered by language */
getVoices(language?: string): SpeechSynthesisVoice[] {
if (!language) return this.cachedVoices;
const langPrefix = language.split('-')[0].toLowerCase();
return this.cachedVoices.filter(v =>
v.lang.toLowerCase().startsWith(langPrefix)
);
}
/** Speak text with given options */
speak(options: SpeechSynthOptions): Promise<void> {
return new Promise((resolve, reject) => {
if (!this.synth) {
reject(new Error('Speech synthesis not available'));
return;
}
// Cancel any ongoing speech
this.stop();
const utterance = new SpeechSynthesisUtterance(options.text);
this.currentUtterance = utterance;
// Set language
utterance.lang = options.language ?? 'zh-CN';
// Set voice if specified
if (options.voice && options.voice !== 'default') {
const voice = this.cachedVoices.find(v =>
v.name === options.voice || v.voiceURI === options.voice
);
if (voice) utterance.voice = voice;
} else {
// Auto-select best voice for the language
this.selectBestVoice(utterance, options.language ?? 'zh-CN');
}
// Set parameters
utterance.rate = options.rate ?? 1.0;
utterance.pitch = options.pitch ?? 1.0;
utterance.volume = options.volume ?? 1.0;
utterance.onstart = () => {
this.notify();
};
utterance.onend = () => {
this.currentUtterance = null;
this.notify();
resolve();
};
utterance.onerror = (event) => {
this.currentUtterance = null;
this.notify();
// "canceled" is not a real error (happens on stop())
if (event.error !== 'canceled') {
reject(new Error(`Speech error: ${event.error}`));
} else {
resolve();
}
};
this.synth.speak(utterance);
});
}
/** Pause current speech */
pause() {
this.synth?.pause();
this.notify();
}
/** Resume paused speech */
resume() {
this.synth?.resume();
this.notify();
}
/** Stop current speech */
stop() {
this.synth?.cancel();
this.currentUtterance = null;
this.notify();
}
/** Auto-select the best voice for a language */
private selectBestVoice(utterance: SpeechSynthesisUtterance, language: string) {
const langPrefix = language.split('-')[0].toLowerCase();
const candidates = this.cachedVoices.filter(v =>
v.lang.toLowerCase().startsWith(langPrefix)
);
if (candidates.length === 0) return;
// Prefer voices with "Neural" or "Enhanced" in name (higher quality)
const neural = candidates.find(v =>
v.name.includes('Neural') || v.name.includes('Enhanced') || v.name.includes('Premium')
);
if (neural) {
utterance.voice = neural;
return;
}
// Prefer local voices (work offline)
const local = candidates.find(v => v.localService);
if (local) {
utterance.voice = local;
return;
}
// Fall back to first matching voice
utterance.voice = candidates[0];
}
}
// Singleton instance
export const speechSynth = new SpeechSynthService();

View File

@@ -8,6 +8,7 @@ import { getSkillDiscovery } from '../lib/skill-discovery';
import { useOfflineStore, isOffline } from './offlineStore'; import { useOfflineStore, isOffline } from './offlineStore';
import { useConnectionStore } from './connectionStore'; import { useConnectionStore } from './connectionStore';
import { createLogger } from '../lib/logger'; import { createLogger } from '../lib/logger';
import { speechSynth } from '../lib/speech-synth';
import { generateRandomString } from '../lib/crypto-utils'; import { generateRandomString } from '../lib/crypto-utils';
const log = createLogger('ChatStore'); const log = createLogger('ChatStore');
@@ -461,6 +462,24 @@ export const useChatStore = create<ChatState>()(
handResult: result, handResult: result,
}; };
set((state) => ({ messages: [...state.messages, handMsg] })); set((state) => ({ messages: [...state.messages, handMsg] }));
// Trigger browser TTS when SpeechHand completes with browser method
if (name === 'speech' && status === 'completed' && result && typeof result === 'object') {
const res = result as Record<string, unknown>;
if (res.tts_method === 'browser' && typeof res.text === 'string' && res.text) {
speechSynth.speak({
text: res.text as string,
voice: (res.voice as string) || undefined,
language: (res.language as string) || undefined,
rate: typeof res.rate === 'number' ? res.rate : undefined,
pitch: typeof res.pitch === 'number' ? res.pitch : undefined,
volume: typeof res.volume === 'number' ? res.volume : undefined,
}).catch((err: unknown) => {
const logger = createLogger('speech-synth');
logger.warn('Browser TTS failed', { error: String(err) });
});
}
}
}, },
onComplete: (inputTokens?: number, outputTokens?: number) => { onComplete: (inputTokens?: number, outputTokens?: number) => {
const state = get(); const state = get();

View File

@@ -1,5 +1,5 @@
import { defineConfig } from "vite"; import { defineConfig } from "vite";
import react from "@vitejs/plugin-react"; import react from "@vitejs/plugin-react-oxc";
import tailwindcss from "@tailwindcss/vite"; import tailwindcss from "@tailwindcss/vite";
const host = process.env.TAURI_DEV_HOST; const host = process.env.TAURI_DEV_HOST;
@@ -36,6 +36,15 @@ export default defineConfig(async () => ({
changeOrigin: true, changeOrigin: true,
secure: false, secure: false,
ws: true, // Enable WebSocket proxy for streaming ws: true, // Enable WebSocket proxy for streaming
configure: (proxy) => {
// Suppress ECONNREFUSED errors during startup while Kernel is still compiling
proxy.on('error', (err) => {
if ('code' in err && (err as NodeJS.ErrnoException).code === 'ECONNREFUSED') {
return; // Silently ignore — Kernel not ready yet
}
console.error('[proxy error]', err);
});
},
}, },
}, },
}, },

View File

@@ -0,0 +1,75 @@
# ZCLAW Pipeline Template — 快速文章摘要
# 用户输入文章或 URL自动提取摘要、关键观点和行动项
apiVersion: zclaw/v1
kind: Pipeline
metadata:
name: article-summary-template
displayName: 快速文章摘要
category: productivity
industry: general
description: 输入文章内容或 URL自动生成结构化摘要、关键观点和行动项
tags:
- 摘要
- 阅读
- 效率
icon: 📝
author: ZCLAW
version: 1.0.0
annotations:
is_template: true
spec:
inputs:
- name: content
type: text
required: true
label: 文章内容
placeholder: 粘贴文章内容或输入 URL
validation:
min_length: 10
- name: style
type: select
required: false
label: 摘要风格
default: concise
options:
- concise
- detailed
- bullet-points
- name: language
type: select
required: false
label: 输出语言
default: chinese
options:
- chinese
- english
- japanese
outputs:
- name: summary
type: text
label: 文章摘要
- name: key_points
type: list
label: 关键观点
- name: action_items
type: list
label: 行动项
steps:
- name: extract-summary
action: llm_generate
params:
prompt: |
请用{{style}}风格,以{{language}}语言,总结以下文章内容。
输出格式要求:
1. 摘要 (3-5 句话)
2. 关键观点 (5-8 条)
3. 行动项 (如适用)
文章内容:
{{content}}
model: default
output: summary_result

View File

@@ -0,0 +1,65 @@
# ZCLAW Pipeline Template — 竞品分析报告
# 输入竞品名称和行业,自动生成结构化竞品分析报告
apiVersion: zclaw/v1
kind: Pipeline
metadata:
name: competitor-analysis-template
displayName: 竞品分析报告
category: marketing
industry: general
description: 输入竞品名称和行业领域自动生成包含产品对比、SWOT 分析和市场定位的分析报告
tags:
- 竞品分析
- 市场
- 战略
icon: 📊
author: ZCLAW
version: 1.0.0
annotations:
is_template: true
spec:
inputs:
- name: competitor_name
type: string
required: true
label: 竞品名称
placeholder: 例如Notion
- name: industry
type: string
required: true
label: 行业领域
placeholder: 例如SaaS 协作工具
- name: focus_areas
type: multi-select
required: false
label: 分析维度
default:
- features
- pricing
- target_audience
options:
- features
- pricing
- target_audience
- technology
- marketing_strategy
steps:
- name: analyze-competitor
action: llm_generate
params:
prompt: |
请对 {{competitor_name}}{{industry}}行业)进行竞品分析。
重点分析以下维度:{{focus_areas}}
输出格式:
1. 产品概述
2. 核心功能对比
3. 定价策略分析
4. 目标用户画像
5. SWOT 分析
6. 市场定位建议
model: default
output: analysis_result

View File

@@ -13,7 +13,7 @@ cors_origins = ["http://localhost:1420", "http://localhost:5173", "http://localh
[database] [database]
# 开发环境默认值; 生产环境务必设置 ZCLAW_DATABASE_URL 环境变量 # 开发环境默认值; 生产环境务必设置 ZCLAW_DATABASE_URL 环境变量
url = "postgres://postgres:postgres@localhost:5432/zclaw" url = "postgres://postgres:123123@localhost:5432/zclaw"
[auth] [auth]
jwt_expiration_hours = 24 jwt_expiration_hours = 24

View File

@@ -84,12 +84,15 @@ if ($Stop) {
} }
# Stop Admin dev server (kill process tree to ensure node.exe children die) # Stop Admin dev server (kill process tree to ensure node.exe children die)
$port3000 = netstat -ano | Select-String ":3000.*LISTENING" # Next.js turbopack may use ports 3000-3002
if ($port3000) { foreach ($adminPort in @(3000, 3001, 3002)) {
$pid3000 = ($port3000 -split '\s+')[-1] $portMatch = netstat -ano | Select-String ":${adminPort}.*LISTENING"
if ($pid3000 -match '^\d+$') { if ($portMatch) {
& taskkill /T /F /PID $pid3000 2>$null $adminPid = ($portMatch -split '\s+')[-1]
ok "Stopped Admin dev server on port 3000 (PID: $pid3000)" if ($adminPid -match '^\d+$') {
& taskkill /T /F /PID $adminPid 2>$null
ok "Stopped Admin process on port $adminPort (PID: $adminPid)"
}
} }
} }
@@ -120,15 +123,19 @@ Write-Host ""
# Track processes for cleanup # Track processes for cleanup
$Jobs = @() $Jobs = @()
$CleanupCalled = $false
function Cleanup { function Cleanup {
info "Cleaning up..." if ($CleanupCalled) { return }
$CleanupCalled = $true
info "Cleaning up child services..."
# Kill tracked process trees (parent + all children) # Kill tracked process trees (parent + all children)
foreach ($job in $Jobs) { foreach ($job in $Jobs) {
if ($job -and !$job.HasExited) { if ($job -and !$job.HasExited) {
info "Stopping $($job.ProcessName) (PID: $($job.Id)) and child processes" info "Stopping $($job.ProcessName) (PID: $($job.Id)) and child processes"
try { try {
# taskkill /T kills the entire process tree, not just the parent
& taskkill /T /F /PID $job.Id 2>$null & taskkill /T /F /PID $job.Id 2>$null
if (!$job.HasExited) { $job.Kill() } if (!$job.HasExited) { $job.Kill() }
} catch { } catch {
@@ -136,21 +143,34 @@ function Cleanup {
} }
} }
} }
# Fallback: kill processes by known ports
foreach ($port in @(8080, 3000)) { # Fallback: kill ALL processes on service ports (3000-3002 = Next.js + turbopack)
foreach ($port in @(8080, 3000, 3001, 3002)) {
$listening = netstat -ano | Select-String ":${port}.*LISTENING" $listening = netstat -ano | Select-String ":${port}.*LISTENING"
if ($listening) { if ($listening) {
$pid = ($listening -split '\s+')[-1] $pid = ($listening -split '\s+')[-1]
if ($pid -match '^\d+$') { if ($pid -match '^\d+$') {
info "Killing orphan process on port $port (PID: $pid)" info "Killing process on port $port (PID: $pid)"
& taskkill /T /F /PID $pid 2>$null & taskkill /T /F /PID $pid 2>$null
} }
} }
} }
ok "Cleanup complete"
}
# Ctrl+C handler: ensures Cleanup runs even on interrupt
try {
$null = [Console]::CancelKeyPress.Add_Invocation({
param($sender, $e)
$e.Cancel = $true # Prevent immediate termination
Cleanup
})
} catch {
# Not running in an interactive console (e.g. launched via pnpm) - rely on try/finally instead
} }
trap { Cleanup; break } trap { Cleanup; break }
Register-EngineEvent -SourceIdentifier PowerShell.Exiting -Action { Cleanup } | Out-Null
# Skip SaaS and ChromeDriver if DesktopOnly # Skip SaaS and ChromeDriver if DesktopOnly
if ($DesktopOnly) { if ($DesktopOnly) {
@@ -158,7 +178,7 @@ if ($DesktopOnly) {
$NoSaas = $true $NoSaas = $true
} }
# 1. PostgreSQL (Windows native) required for SaaS backend # 1. PostgreSQL (Windows native) - required for SaaS backend
if (-not $NoSaas) { if (-not $NoSaas) {
info "Checking PostgreSQL..." info "Checking PostgreSQL..."
@@ -247,15 +267,9 @@ if (-not $NoSaas) {
} else { } else {
if (Test-Path "$ScriptDir\admin\package.json") { if (Test-Path "$ScriptDir\admin\package.json") {
info "Starting Admin dashboard on port 3000..." info "Starting Admin dashboard on port 3000..."
Set-Location "$ScriptDir\admin"
if ($Dev) {
$proc = Start-Process -FilePath "cmd.exe" -ArgumentList "/c cd /d `"$ScriptDir\admin`" && pnpm dev" -PassThru -WindowStyle Minimized $proc = Start-Process -FilePath "cmd.exe" -ArgumentList "/c cd /d `"$ScriptDir\admin`" && pnpm dev" -PassThru -WindowStyle Minimized
} else {
$proc = Start-Process -FilePath "cmd.exe" -ArgumentList "/c cd /d `"$ScriptDir\admin`" && pnpm dev" -PassThru -WindowStyle Minimized
}
$Jobs += $proc $Jobs += $proc
Set-Location $ScriptDir
Start-Sleep -Seconds 5 Start-Sleep -Seconds 5
$port3000Check = netstat -ano | Select-String ":3000.*LISTENING" $port3000Check = netstat -ano | Select-String ":3000.*LISTENING"
@@ -275,7 +289,6 @@ if (-not $NoSaas) {
Write-Host "" Write-Host ""
# 4. ChromeDriver (optional - for Browser Hand automation) # 4. ChromeDriver (optional - for Browser Hand automation)
if (-not $NoBrowser) { if (-not $NoBrowser) {
info "Checking ChromeDriver..." info "Checking ChromeDriver..."
@@ -318,14 +331,19 @@ if ($port1420) {
$pid1420 = ($port1420 -split '\s+')[-1] $pid1420 = ($port1420 -split '\s+')[-1]
if ($pid1420 -match '^\d+$') { if ($pid1420 -match '^\d+$') {
warn "Port 1420 is in use by PID $pid1420. Killing..." warn "Port 1420 is in use by PID $pid1420. Killing..."
Stop-Process -Id $pid1420 -Force -ErrorAction SilentlyContinue & taskkill /T /F /PID $pid1420 2>$null
Start-Sleep -Seconds 1 Start-Sleep -Seconds 1
} }
} }
if ($Dev) { if ($Dev) {
info "Development mode enabled" info "Development mode enabled"
info "Press Ctrl+C to stop all services..."
try {
pnpm tauri dev pnpm tauri dev
} finally {
Cleanup
}
} else { } else {
$exe = "src-tauri\target\release\ZClaw.exe" $exe = "src-tauri\target\release\ZClaw.exe"
if (Test-Path $exe) { if (Test-Path $exe) {
@@ -337,10 +355,3 @@ if ($Dev) {
pnpm tauri dev pnpm tauri dev
} }
} }
if ($Dev) {
Write-Host ""
info "Press Ctrl+C to stop all services..."
try { while ($true) { Start-Sleep -Seconds 1 } }
finally { Cleanup }
}