fix(安全): 修复HTML导出中的XSS漏洞并清理调试日志
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

refactor(日志): 替换console.log为tracing日志系统
style(代码): 移除未使用的代码和依赖项

feat(测试): 添加端到端测试文档和CI工作流
docs(变更日志): 更新CHANGELOG.md记录0.1.0版本变更

perf(构建): 更新依赖版本并优化CI流程
This commit is contained in:
iven
2026-03-26 19:49:03 +08:00
parent b8d565a9eb
commit 978dc5cdd8
79 changed files with 3953 additions and 5724 deletions

View File

@@ -1,57 +0,0 @@
//! Discord channel adapter
use async_trait::async_trait;
use std::sync::Arc;
use tokio::sync::mpsc;
use zclaw_types::Result;
use crate::{Channel, ChannelConfig, ChannelStatus, IncomingMessage, OutgoingMessage};
/// Discord channel adapter
pub struct DiscordChannel {
config: ChannelConfig,
status: Arc<tokio::sync::RwLock<ChannelStatus>>,
}
impl DiscordChannel {
pub fn new(config: ChannelConfig) -> Self {
Self {
config,
status: Arc::new(tokio::sync::RwLock::new(ChannelStatus::Disconnected)),
}
}
}
#[async_trait]
impl Channel for DiscordChannel {
fn config(&self) -> &ChannelConfig {
&self.config
}
async fn connect(&self) -> Result<()> {
let mut status = self.status.write().await;
*status = ChannelStatus::Connected;
Ok(())
}
async fn disconnect(&self) -> Result<()> {
let mut status = self.status.write().await;
*status = ChannelStatus::Disconnected;
Ok(())
}
async fn status(&self) -> ChannelStatus {
self.status.read().await.clone()
}
async fn send(&self, _message: OutgoingMessage) -> Result<String> {
// TODO: Implement Discord API send
Ok("discord_msg_id".to_string())
}
async fn receive(&self) -> Result<mpsc::Receiver<IncomingMessage>> {
let (_tx, rx) = mpsc::channel(100);
// TODO: Implement Discord gateway
Ok(rx)
}
}

View File

@@ -1,11 +1,5 @@
//! Channel adapters
mod telegram;
mod discord;
mod slack;
mod console;
pub use telegram::TelegramChannel;
pub use discord::DiscordChannel;
pub use slack::SlackChannel;
pub use console::ConsoleChannel;

View File

@@ -1,57 +0,0 @@
//! Slack channel adapter
use async_trait::async_trait;
use std::sync::Arc;
use tokio::sync::mpsc;
use zclaw_types::Result;
use crate::{Channel, ChannelConfig, ChannelStatus, IncomingMessage, OutgoingMessage};
/// Slack channel adapter
pub struct SlackChannel {
config: ChannelConfig,
status: Arc<tokio::sync::RwLock<ChannelStatus>>,
}
impl SlackChannel {
pub fn new(config: ChannelConfig) -> Self {
Self {
config,
status: Arc::new(tokio::sync::RwLock::new(ChannelStatus::Disconnected)),
}
}
}
#[async_trait]
impl Channel for SlackChannel {
fn config(&self) -> &ChannelConfig {
&self.config
}
async fn connect(&self) -> Result<()> {
let mut status = self.status.write().await;
*status = ChannelStatus::Connected;
Ok(())
}
async fn disconnect(&self) -> Result<()> {
let mut status = self.status.write().await;
*status = ChannelStatus::Disconnected;
Ok(())
}
async fn status(&self) -> ChannelStatus {
self.status.read().await.clone()
}
async fn send(&self, _message: OutgoingMessage) -> Result<String> {
// TODO: Implement Slack API send
Ok("slack_msg_ts".to_string())
}
async fn receive(&self) -> Result<mpsc::Receiver<IncomingMessage>> {
let (_tx, rx) = mpsc::channel(100);
// TODO: Implement Slack RTM/events API
Ok(rx)
}
}

View File

@@ -1,60 +0,0 @@
//! Telegram channel adapter
use async_trait::async_trait;
use std::sync::Arc;
use tokio::sync::mpsc;
use zclaw_types::Result;
use crate::{Channel, ChannelConfig, ChannelStatus, IncomingMessage, OutgoingMessage};
/// Telegram channel adapter
pub struct TelegramChannel {
config: ChannelConfig,
#[allow(dead_code)] // TODO: Implement Telegram API client
client: Option<reqwest::Client>,
status: Arc<tokio::sync::RwLock<ChannelStatus>>,
}
impl TelegramChannel {
pub fn new(config: ChannelConfig) -> Self {
Self {
config,
client: None,
status: Arc::new(tokio::sync::RwLock::new(ChannelStatus::Disconnected)),
}
}
}
#[async_trait]
impl Channel for TelegramChannel {
fn config(&self) -> &ChannelConfig {
&self.config
}
async fn connect(&self) -> Result<()> {
let mut status = self.status.write().await;
*status = ChannelStatus::Connected;
Ok(())
}
async fn disconnect(&self) -> Result<()> {
let mut status = self.status.write().await;
*status = ChannelStatus::Disconnected;
Ok(())
}
async fn status(&self) -> ChannelStatus {
self.status.read().await.clone()
}
async fn send(&self, _message: OutgoingMessage) -> Result<String> {
// TODO: Implement Telegram API send
Ok("telegram_msg_id".to_string())
}
async fn receive(&self) -> Result<mpsc::Receiver<IncomingMessage>> {
let (_tx, rx) = mpsc::channel(100);
// TODO: Implement Telegram webhook/polling
Ok(rx)
}
}

View File

@@ -3,11 +3,35 @@
//! Provides TF-IDF based semantic similarity computation for memory retrieval.
//! This is a lightweight, dependency-free implementation suitable for
//! medium-scale memory systems.
//!
//! Supports optional embedding API integration for improved semantic search.
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use crate::types::MemoryEntry;
/// Semantic similarity scorer using TF-IDF
/// Embedding client trait for API integration
#[async_trait::async_trait]
pub trait EmbeddingClient: Send + Sync {
async fn embed(&self, text: &str) -> Result<Vec<f32>, String>;
fn is_available(&self) -> bool;
}
/// No-op embedding client (uses TF-IDF only)
pub struct NoOpEmbeddingClient;
#[async_trait::async_trait]
impl EmbeddingClient for NoOpEmbeddingClient {
async fn embed(&self, _text: &str) -> Result<Vec<f32>, String> {
Err("Embedding not configured".to_string())
}
fn is_available(&self) -> bool {
false
}
}
/// Semantic similarity scorer using TF-IDF with optional embedding support
pub struct SemanticScorer {
/// Document frequency for IDF computation
document_frequencies: HashMap<String, usize>,
@@ -15,8 +39,14 @@ pub struct SemanticScorer {
total_documents: usize,
/// Precomputed TF-IDF vectors for entries
entry_vectors: HashMap<String, HashMap<String, f32>>,
/// Precomputed embedding vectors for entries
entry_embeddings: HashMap<String, Vec<f32>>,
/// Stop words to ignore
stop_words: HashSet<String>,
/// Optional embedding client
embedding_client: Arc<dyn EmbeddingClient>,
/// Whether to use embedding for similarity
use_embedding: bool,
}
impl SemanticScorer {
@@ -26,10 +56,41 @@ impl SemanticScorer {
document_frequencies: HashMap::new(),
total_documents: 0,
entry_vectors: HashMap::new(),
entry_embeddings: HashMap::new(),
stop_words: Self::default_stop_words(),
embedding_client: Arc::new(NoOpEmbeddingClient),
use_embedding: false,
}
}
/// Create a new semantic scorer with embedding client
pub fn with_embedding(client: Arc<dyn EmbeddingClient>) -> Self {
Self {
document_frequencies: HashMap::new(),
total_documents: 0,
entry_vectors: HashMap::new(),
entry_embeddings: HashMap::new(),
stop_words: Self::default_stop_words(),
embedding_client: client,
use_embedding: true,
}
}
/// Set whether to use embedding for similarity
pub fn set_use_embedding(&mut self, use_embedding: bool) {
self.use_embedding = use_embedding && self.embedding_client.is_available();
}
/// Check if embedding is available
pub fn is_embedding_available(&self) -> bool {
self.embedding_client.is_available()
}
/// Get the embedding client
pub fn get_embedding_client(&self) -> Arc<dyn EmbeddingClient> {
self.embedding_client.clone()
}
/// Get default stop words
fn default_stop_words() -> HashSet<String> {
[
@@ -132,9 +193,34 @@ impl SemanticScorer {
self.entry_vectors.insert(entry.uri.clone(), tfidf);
}
/// Index an entry with embedding (async)
pub async fn index_entry_with_embedding(&mut self, entry: &MemoryEntry) {
// First do TF-IDF indexing
self.index_entry(entry);
// Then compute embedding if available
if self.use_embedding && self.embedding_client.is_available() {
let text_to_embed = if !entry.keywords.is_empty() {
format!("{} {}", entry.content, entry.keywords.join(" "))
} else {
entry.content.clone()
};
match self.embedding_client.embed(&text_to_embed).await {
Ok(embedding) => {
self.entry_embeddings.insert(entry.uri.clone(), embedding);
}
Err(e) => {
tracing::warn!("[SemanticScorer] Failed to compute embedding for {}: {}", entry.uri, e);
}
}
}
}
/// Remove an entry from the index
pub fn remove_entry(&mut self, uri: &str) {
self.entry_vectors.remove(uri);
self.entry_embeddings.remove(uri);
}
/// Compute cosine similarity between two vectors
@@ -167,6 +253,57 @@ impl SemanticScorer {
}
}
/// Compute cosine similarity between two embedding vectors
fn cosine_similarity_embedding(v1: &[f32], v2: &[f32]) -> f32 {
if v1.is_empty() || v2.is_empty() || v1.len() != v2.len() {
return 0.0;
}
let mut dot_product = 0.0;
let mut norm1 = 0.0;
let mut norm2 = 0.0;
for i in 0..v1.len() {
dot_product += v1[i] * v2[i];
norm1 += v1[i] * v1[i];
norm2 += v2[i] * v2[i];
}
let denom = (norm1 * norm2).sqrt();
if denom == 0.0 {
0.0
} else {
(dot_product / denom).clamp(0.0, 1.0)
}
}
/// Score similarity between query and entry using embedding (async)
pub async fn score_similarity_with_embedding(&self, query: &str, entry: &MemoryEntry) -> f32 {
// If we have precomputed embedding for this entry and embedding is enabled
if self.use_embedding && self.embedding_client.is_available() {
if let Some(entry_embedding) = self.entry_embeddings.get(&entry.uri) {
// Compute query embedding
match self.embedding_client.embed(query).await {
Ok(query_embedding) => {
let embedding_score = Self::cosine_similarity_embedding(&query_embedding, entry_embedding);
// Also compute TF-IDF score for hybrid approach
let tfidf_score = self.score_similarity(query, entry);
// Weighted combination: 70% embedding, 30% TF-IDF
return embedding_score * 0.7 + tfidf_score * 0.3;
}
Err(e) => {
tracing::debug!("[SemanticScorer] Failed to embed query: {}", e);
}
}
}
}
// Fall back to TF-IDF
self.score_similarity(query, entry)
}
/// Score similarity between query and entry
pub fn score_similarity(&self, query: &str, entry: &MemoryEntry) -> f32 {
// Tokenize query
@@ -246,6 +383,7 @@ impl SemanticScorer {
self.document_frequencies.clear();
self.total_documents = 0;
self.entry_vectors.clear();
self.entry_embeddings.clear();
}
/// Get statistics about the index
@@ -254,6 +392,8 @@ impl SemanticScorer {
total_documents: self.total_documents,
unique_terms: self.document_frequencies.len(),
indexed_entries: self.entry_vectors.len(),
embedding_entries: self.entry_embeddings.len(),
use_embedding: self.use_embedding && self.embedding_client.is_available(),
}
}
}
@@ -270,6 +410,8 @@ pub struct IndexStats {
pub total_documents: usize,
pub unique_terms: usize,
pub indexed_entries: usize,
pub embedding_entries: usize,
pub use_embedding: bool,
}
#[cfg(test)]

View File

@@ -168,7 +168,7 @@ fn default_skills_dir() -> Option<std::path::PathBuf> {
// 1. Check environment variable override
if let Ok(dir) = std::env::var("ZCLAW_SKILLS_DIR") {
let path = std::path::PathBuf::from(&dir);
eprintln!("[default_skills_dir] ZCLAW_SKILLS_DIR env: {} (exists: {})", path.display(), path.exists());
tracing::debug!(target: "kernel_config", "ZCLAW_SKILLS_DIR env: {} (exists: {})", path.display(), path.exists());
if path.exists() {
return Some(path);
}
@@ -180,12 +180,12 @@ fn default_skills_dir() -> Option<std::path::PathBuf> {
// CARGO_MANIFEST_DIR is the crate directory (crates/zclaw-kernel)
// We need to go up to find the workspace root
let manifest_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"));
eprintln!("[default_skills_dir] CARGO_MANIFEST_DIR: {}", manifest_dir.display());
tracing::debug!(target: "kernel_config", "CARGO_MANIFEST_DIR: {}", manifest_dir.display());
// Go up from crates/zclaw-kernel to workspace root
if let Some(workspace_root) = manifest_dir.parent().and_then(|p| p.parent()) {
let workspace_skills = workspace_root.join("skills");
eprintln!("[default_skills_dir] Workspace skills: {} (exists: {})", workspace_skills.display(), workspace_skills.exists());
tracing::debug!(target: "kernel_config", "Workspace skills: {} (exists: {})", workspace_skills.display(), workspace_skills.exists());
if workspace_skills.exists() {
return Some(workspace_skills);
}
@@ -194,7 +194,7 @@ fn default_skills_dir() -> Option<std::path::PathBuf> {
// 3. Try current working directory first (for development)
if let Ok(cwd) = std::env::current_dir() {
let cwd_skills = cwd.join("skills");
eprintln!("[default_skills_dir] Checking cwd: {} (exists: {})", cwd_skills.display(), cwd_skills.exists());
tracing::debug!(target: "kernel_config", "Checking cwd: {} (exists: {})", cwd_skills.display(), cwd_skills.exists());
if cwd_skills.exists() {
return Some(cwd_skills);
}
@@ -204,7 +204,7 @@ fn default_skills_dir() -> Option<std::path::PathBuf> {
for i in 0..6 {
if let Some(parent) = current.parent() {
let parent_skills = parent.join("skills");
eprintln!("[default_skills_dir] CWD Level {}: {} (exists: {})", i, parent_skills.display(), parent_skills.exists());
tracing::debug!(target: "kernel_config", "CWD Level {}: {} (exists: {})", i, parent_skills.display(), parent_skills.exists());
if parent_skills.exists() {
return Some(parent_skills);
}
@@ -217,11 +217,11 @@ fn default_skills_dir() -> Option<std::path::PathBuf> {
// 4. Try executable's directory and multiple levels up
if let Ok(exe) = std::env::current_exe() {
eprintln!("[default_skills_dir] Current exe: {}", exe.display());
tracing::debug!(target: "kernel_config", "Current exe: {}", exe.display());
if let Some(exe_dir) = exe.parent().map(|p| p.to_path_buf()) {
// Same directory as exe
let exe_skills = exe_dir.join("skills");
eprintln!("[default_skills_dir] Checking exe dir: {} (exists: {})", exe_skills.display(), exe_skills.exists());
tracing::debug!(target: "kernel_config", "Checking exe dir: {} (exists: {})", exe_skills.display(), exe_skills.exists());
if exe_skills.exists() {
return Some(exe_skills);
}
@@ -231,7 +231,7 @@ fn default_skills_dir() -> Option<std::path::PathBuf> {
for i in 0..6 {
if let Some(parent) = current.parent() {
let parent_skills = parent.join("skills");
eprintln!("[default_skills_dir] EXE Level {}: {} (exists: {})", i, parent_skills.display(), parent_skills.exists());
tracing::debug!(target: "kernel_config", "EXE Level {}: {} (exists: {})", i, parent_skills.display(), parent_skills.exists());
if parent_skills.exists() {
return Some(parent_skills);
}
@@ -247,7 +247,7 @@ fn default_skills_dir() -> Option<std::path::PathBuf> {
let fallback = std::env::current_dir()
.ok()
.map(|cwd| cwd.join("skills"));
eprintln!("[default_skills_dir] Fallback to: {:?}", fallback);
tracing::debug!(target: "kernel_config", "Fallback to: {:?}", fallback);
fallback
}

View File

@@ -129,9 +129,9 @@ impl Kernel {
}
/// Build a system prompt with skill information injected
fn build_system_prompt_with_skills(&self, base_prompt: Option<&String>) -> String {
// Get skill list synchronously (we're in sync context)
let skills = futures::executor::block_on(self.skills.list());
async fn build_system_prompt_with_skills(&self, base_prompt: Option<&String>) -> String {
// Get skill list asynchronously
let skills = self.skills.list().await;
let mut prompt = base_prompt
.map(|p| p.clone())
@@ -309,7 +309,7 @@ impl Kernel {
.with_temperature(agent_config.temperature.unwrap_or_else(|| self.config.temperature()));
// Build system prompt with skill information injected
let system_prompt = self.build_system_prompt_with_skills(agent_config.system_prompt.as_ref());
let system_prompt = self.build_system_prompt_with_skills(agent_config.system_prompt.as_ref()).await;
let loop_runner = loop_runner.with_system_prompt(&system_prompt);
// Run the loop
@@ -352,7 +352,7 @@ impl Kernel {
.with_temperature(agent_config.temperature.unwrap_or_else(|| self.config.temperature()));
// Build system prompt with skill information injected
let system_prompt = self.build_system_prompt_with_skills(agent_config.system_prompt.as_ref());
let system_prompt = self.build_system_prompt_with_skills(agent_config.system_prompt.as_ref()).await;
let loop_runner = loop_runner.with_system_prompt(&system_prompt);
// Run with streaming

View File

@@ -26,7 +26,10 @@ impl MemoryStore {
// Parse SQLite URL to extract file path
// Format: sqlite:/path/to/db or sqlite://path/to/db
if database_url.starts_with("sqlite:") {
let path_part = database_url.strip_prefix("sqlite:").unwrap();
let path_part = database_url.strip_prefix("sqlite:")
.ok_or_else(|| ZclawError::StorageError(
format!("Invalid database URL format: {}", database_url)
))?;
// Skip in-memory databases
if path_part == ":memory:" {
@@ -34,7 +37,10 @@ impl MemoryStore {
}
// Remove query parameters (e.g., ?mode=rwc)
let path_without_query = path_part.split('?').next().unwrap();
let path_without_query = path_part.split('?').next()
.ok_or_else(|| ZclawError::StorageError(
format!("Invalid database URL path: {}", path_part)
))?;
// Handle both absolute and relative paths
let path = std::path::Path::new(path_without_query);

View File

@@ -103,6 +103,22 @@ fn render_markdown(data: &Value) -> String {
md
}
/// Escape HTML special characters to prevent XSS
fn escape_html(s: &str) -> String {
let mut escaped = String::with_capacity(s.len());
for ch in s.chars() {
match ch {
'<' => escaped.push_str("&lt;"),
'>' => escaped.push_str("&gt;"),
'&' => escaped.push_str("&amp;"),
'"' => escaped.push_str("&quot;"),
'\'' => escaped.push_str("&#39;"),
_ => escaped.push(ch),
}
}
escaped
}
/// Render data to HTML
fn render_html(data: &Value) -> String {
let mut html = String::from(r#"<!DOCTYPE html>
@@ -123,11 +139,11 @@ fn render_html(data: &Value) -> String {
"#);
if let Some(title) = data.get("title").and_then(|v| v.as_str()) {
html.push_str(&format!("<h1>{}</h1>", title));
html.push_str(&format!("<h1>{}</h1>", escape_html(title)));
}
if let Some(description) = data.get("description").and_then(|v| v.as_str()) {
html.push_str(&format!("<p>{}</p>", description));
html.push_str(&format!("<p>{}</p>", escape_html(description)));
}
if let Some(outline) = data.get("outline") {
@@ -135,7 +151,7 @@ fn render_html(data: &Value) -> String {
if let Some(items) = outline.get("items").and_then(|v| v.as_array()) {
for item in items {
if let Some(text) = item.get("title").and_then(|v| v.as_str()) {
html.push_str(&format!("<li>{}</li>", text));
html.push_str(&format!("<li>{}</li>", escape_html(text)));
}
}
}
@@ -147,10 +163,10 @@ fn render_html(data: &Value) -> String {
for scene in scenes {
html.push_str("<div class=\"scene\">");
if let Some(title) = scene.get("title").and_then(|v| v.as_str()) {
html.push_str(&format!("<h3>{}</h3>", title));
html.push_str(&format!("<h3>{}</h3>", escape_html(title)));
}
if let Some(content) = scene.get("content").and_then(|v| v.as_str()) {
html.push_str(&format!("<p>{}</p>", content));
html.push_str(&format!("<p>{}</p>", escape_html(content)));
}
html.push_str("</div>");
}

View File

@@ -134,10 +134,10 @@ impl ActionRegistry {
max_tokens: Option<u32>,
json_mode: bool,
) -> Result<Value, ActionError> {
println!("[DEBUG execute_llm] Called with template length: {}", template.len());
println!("[DEBUG execute_llm] Input HashMap contents:");
tracing::debug!(target: "pipeline_actions", "execute_llm: Called with template length: {}", template.len());
tracing::debug!(target: "pipeline_actions", "execute_llm: Input HashMap contents:");
for (k, v) in &input {
println!(" {} => {:?}", k, v);
tracing::debug!(target: "pipeline_actions", " {} => {:?}", k, v);
}
if let Some(driver) = &self.llm_driver {
@@ -148,7 +148,7 @@ impl ActionRegistry {
template.to_string()
};
println!("[DEBUG execute_llm] Calling driver.generate with prompt length: {}", prompt.len());
tracing::debug!(target: "pipeline_actions", "execute_llm: Calling driver.generate with prompt length: {}", prompt.len());
driver.generate(prompt, input, model, temperature, max_tokens, json_mode)
.await
@@ -346,14 +346,14 @@ impl ActionRegistry {
let mut html = String::from("<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>Export</title></head><body>");
if let Some(title) = data.get("title").and_then(|v| v.as_str()) {
html.push_str(&format!("<h1>{}</h1>", title));
html.push_str(&format!("<h1>{}</h1>", escape_html(title)));
}
if let Some(items) = data.get("items").and_then(|v| v.as_array()) {
html.push_str("<ul>");
for item in items {
if let Some(text) = item.as_str() {
html.push_str(&format!("<li>{}</li>", text));
html.push_str(&format!("<li>{}</li>", escape_html(text)));
}
}
html.push_str("</ul>");
@@ -364,6 +364,22 @@ impl ActionRegistry {
}
}
/// Escape HTML special characters to prevent XSS
fn escape_html(s: &str) -> String {
let mut escaped = String::with_capacity(s.len());
for ch in s.chars() {
match ch {
'<' => escaped.push_str("&lt;"),
'>' => escaped.push_str("&gt;"),
'&' => escaped.push_str("&amp;"),
'"' => escaped.push_str("&quot;"),
'\'' => escaped.push_str("&#39;"),
_ => escaped.push(ch),
}
}
escaped
}
impl ExportFormat {
fn extension(&self) -> &'static str {
match self {

View File

@@ -185,22 +185,22 @@ impl PipelineExecutor {
async move {
match action {
Action::LlmGenerate { template, input, model, temperature, max_tokens, json_mode } => {
println!("[DEBUG executor] LlmGenerate action called");
println!("[DEBUG executor] Raw input map:");
tracing::debug!(target: "pipeline_executor", "LlmGenerate action called");
tracing::debug!(target: "pipeline_executor", "Raw input map:");
for (k, v) in input {
println!(" {} => {}", k, v);
tracing::debug!(target: "pipeline_executor", " {} => {}", k, v);
}
// First resolve the template itself (handles ${inputs.xxx}, ${item.xxx}, etc.)
let resolved_template = context.resolve(template)?;
let resolved_template_str = resolved_template.as_str().unwrap_or(template).to_string();
println!("[DEBUG executor] Resolved template (first 300 chars): {}",
tracing::debug!(target: "pipeline_executor", "Resolved template (first 300 chars): {}",
&resolved_template_str[..resolved_template_str.len().min(300)]);
let resolved_input = context.resolve_map(input)?;
println!("[DEBUG executor] Resolved input map:");
tracing::debug!(target: "pipeline_executor", "Resolved input map:");
for (k, v) in &resolved_input {
println!(" {} => {:?}", k, v);
tracing::debug!(target: "pipeline_executor", " {} => {:?}", k, v);
}
self.action_registry.execute_llm(
&resolved_template_str,

View File

@@ -65,8 +65,8 @@ impl LlmDriver for OpenAiDriver {
// Debug: log the request details
let url = format!("{}/chat/completions", self.base_url);
let request_body = serde_json::to_string(&api_request).unwrap_or_default();
eprintln!("[OpenAiDriver] Sending request to: {}", url);
eprintln!("[OpenAiDriver] Request body: {}", request_body);
tracing::debug!(target: "openai_driver", "Sending request to: {}", url);
tracing::trace!(target: "openai_driver", "Request body: {}", request_body);
let response = self.client
.post(&url)
@@ -80,11 +80,11 @@ impl LlmDriver for OpenAiDriver {
if !response.status().is_success() {
let status = response.status();
let body = response.text().await.unwrap_or_default();
eprintln!("[OpenAiDriver] API error {}: {}", status, body);
tracing::warn!(target: "openai_driver", "API error {}: {}", status, body);
return Err(ZclawError::LlmError(format!("API error {}: {}", status, body)));
}
eprintln!("[OpenAiDriver] Response status: {}", response.status());
tracing::debug!(target: "openai_driver", "Response status: {}", response.status());
let api_response: OpenAiResponse = response
.json()
@@ -107,11 +107,11 @@ impl LlmDriver for OpenAiDriver {
self.base_url.contains("aliyuncs") ||
self.base_url.contains("bigmodel.cn");
eprintln!("[OpenAiDriver:stream] base_url={}, has_tools={}, needs_non_streaming={}",
tracing::debug!(target: "openai_driver", "stream config: base_url={}, has_tools={}, needs_non_streaming={}",
self.base_url, has_tools, needs_non_streaming);
if has_tools && needs_non_streaming {
eprintln!("[OpenAiDriver:stream] Provider detected that may not support streaming with tools, using non-streaming mode. URL: {}", self.base_url);
tracing::info!(target: "openai_driver", "Provider detected that may not support streaming with tools, using non-streaming mode. URL: {}", self.base_url);
// Use non-streaming mode and convert to stream
return self.stream_from_complete(request);
}
@@ -458,11 +458,11 @@ impl OpenAiDriver {
let api_key = self.api_key.expose_secret().to_string();
let model = request.model.clone();
eprintln!("[OpenAiDriver:stream_from_complete] Starting non-streaming request to: {}/chat/completions", base_url);
tracing::debug!(target: "openai_driver", "stream_from_complete: Starting non-streaming request to: {}/chat/completions", base_url);
Box::pin(stream! {
let url = format!("{}/chat/completions", base_url);
eprintln!("[OpenAiDriver:stream_from_complete] Sending non-streaming request to: {}", url);
tracing::debug!(target: "openai_driver", "stream_from_complete: Sending non-streaming request to: {}", url);
let response = match self.client
.post(&url)
@@ -490,15 +490,15 @@ impl OpenAiDriver {
let api_response: OpenAiResponse = match response.json().await {
Ok(r) => r,
Err(e) => {
eprintln!("[OpenAiDriver:stream_from_complete] Failed to parse response: {}", e);
tracing::warn!(target: "openai_driver", "stream_from_complete: Failed to parse response: {}", e);
yield Err(ZclawError::LlmError(format!("Failed to parse response: {}", e)));
return;
}
};
eprintln!("[OpenAiDriver:stream_from_complete] Got response with {} choices", api_response.choices.len());
tracing::debug!(target: "openai_driver", "stream_from_complete: Got response with {} choices", api_response.choices.len());
if let Some(choice) = api_response.choices.first() {
eprintln!("[OpenAiDriver:stream_from_complete] First choice: content={:?}, tool_calls={:?}, finish_reason={:?}",
tracing::debug!(target: "openai_driver", "stream_from_complete: First choice: content={:?}, tool_calls={:?}, finish_reason={:?}",
choice.message.content.as_ref().map(|c| {
if c.len() > 100 {
// 使用 floor_char_boundary 确保不在多字节字符中间截断
@@ -514,15 +514,15 @@ impl OpenAiDriver {
// Convert response to stream chunks
let completion = self.convert_response(api_response, model.clone());
eprintln!("[OpenAiDriver:stream_from_complete] Converted to {} content blocks, stop_reason: {:?}", completion.content.len(), completion.stop_reason);
tracing::debug!(target: "openai_driver", "stream_from_complete: Converted to {} content blocks, stop_reason: {:?}", completion.content.len(), completion.stop_reason);
// Emit content blocks as stream chunks
for block in &completion.content {
eprintln!("[OpenAiDriver:stream_from_complete] Emitting block: {:?}", block);
tracing::debug!(target: "openai_driver", "stream_from_complete: Emitting block: {:?}", block);
match block {
ContentBlock::Text { text } => {
if !text.is_empty() {
eprintln!("[OpenAiDriver:stream_from_complete] Emitting TextDelta: {} chars", text.len());
tracing::debug!(target: "openai_driver", "stream_from_complete: Emitting TextDelta: {} chars", text.len());
yield Ok(StreamChunk::TextDelta { delta: text.clone() });
}
}
@@ -530,7 +530,7 @@ impl OpenAiDriver {
yield Ok(StreamChunk::ThinkingDelta { delta: thinking.clone() });
}
ContentBlock::ToolUse { id, name, input } => {
eprintln!("[OpenAiDriver:stream_from_complete] Emitting ToolUse: id={}, name={}", id, name);
tracing::debug!(target: "openai_driver", "stream_from_complete: Emitting ToolUse: id={}, name={}", id, name);
// Emit tool use start
yield Ok(StreamChunk::ToolUseStart {
id: id.clone(),

View File

@@ -4,7 +4,7 @@
/// Default User-Agent header sent with all outgoing HTTP requests.
/// Some LLM providers (e.g. Moonshot, Qwen, DashScope Coding Plan) reject requests without one.
pub const USER_AGENT: &str = "ZCLAW/0.2.0";
pub const USER_AGENT: &str = "ZCLAW/0.1.0";
pub mod driver;
pub mod tool;

View File

@@ -18,3 +18,4 @@ tracing = { workspace = true }
async-trait = { workspace = true }
regex = { workspace = true }
uuid = { workspace = true }
shlex = { workspace = true }

View File

@@ -360,8 +360,9 @@ mod tests {
#[test]
fn test_extract_types() {
let registry: &'static SkillRegistry = Box::leak(Box::new(SkillRegistry::new()));
let composer = AutoComposer {
registry: unsafe { &*(&SkillRegistry::new() as *const _) },
registry,
};
let schema = serde_json::json!({

View File

@@ -118,7 +118,12 @@ impl Skill for ShellSkill {
let mut cmd = self.command.clone();
if let Value::String(s) = input {
cmd = cmd.replace("{{input}}", &s);
// Shell-quote the input to prevent command injection
let quoted = shlex::try_quote(&s)
.map_err(|_| zclaw_types::ZclawError::ToolError(
"Input contains null bytes and cannot be safely quoted".to_string()
))?;
cmd = cmd.replace("{{input}}", &quoted);
}
#[cfg(target_os = "windows")]