release(v0.2.0): streaming, MCP protocol, Browser Hand, security enhancements

## Major Features

### Streaming Response System
- Implement LlmDriver trait with `stream()` method returning async Stream
- Add SSE parsing for Anthropic and OpenAI API streaming
- Integrate Tauri event system for frontend streaming (`stream:chunk` events)
- Add StreamChunk types: Delta, ToolStart, ToolEnd, Complete, Error

### MCP Protocol Implementation
- Add MCP JSON-RPC 2.0 types (mcp_types.rs)
- Implement stdio-based MCP transport (mcp_transport.rs)
- Support tool discovery, execution, and resource operations

### Browser Hand Implementation
- Complete browser automation with Playwright-style actions
- Support Navigate, Click, Type, Scrape, Screenshot, Wait actions
- Add educational Hands: Whiteboard, Slideshow, Speech, Quiz

### Security Enhancements
- Implement command whitelist/blacklist for shell_exec tool
- Add SSRF protection with private IP blocking
- Create security.toml configuration file

## Test Improvements
- Fix test import paths (security-utils, setup)
- Fix vi.mock hoisting issues with vi.hoisted()
- Update test expectations for validateUrl and sanitizeFilename
- Add getUnsupportedLocalGatewayStatus mock

## Documentation Updates
- Update architecture documentation
- Improve configuration reference
- Add quick-start guide updates

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
iven
2026-03-24 03:24:24 +08:00
parent e49ba4460b
commit 3ff08faa56
78 changed files with 29575 additions and 1682 deletions

View File

@@ -1,4 +1,9 @@
//! Kernel configuration
//!
//! Design principles:
//! - Model ID is passed directly to the API without any transformation
//! - No provider prefix or alias mapping
//! - Simple, unified configuration structure
use std::sync::Arc;
use serde::{Deserialize, Serialize};
@@ -6,6 +11,104 @@ use secrecy::SecretString;
use zclaw_types::{Result, ZclawError};
use zclaw_runtime::{LlmDriver, AnthropicDriver, OpenAiDriver, GeminiDriver, LocalDriver};
/// API protocol type
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ApiProtocol {
OpenAI,
Anthropic,
}
impl Default for ApiProtocol {
fn default() -> Self {
Self::OpenAI
}
}
/// LLM configuration - unified config for all providers
///
/// This is the single source of truth for LLM configuration.
/// Model ID is passed directly to the API without any transformation.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LlmConfig {
/// API base URL (e.g., "https://api.openai.com/v1")
pub base_url: String,
/// API key
#[serde(skip_serializing)]
pub api_key: String,
/// Model identifier - passed directly to the API
/// Examples: "gpt-4o", "glm-4-flash", "glm-4-plus", "claude-3-opus-20240229"
pub model: String,
/// API protocol (OpenAI-compatible or Anthropic)
#[serde(default)]
pub api_protocol: ApiProtocol,
/// Maximum tokens per response
#[serde(default = "default_max_tokens")]
pub max_tokens: u32,
/// Temperature
#[serde(default = "default_temperature")]
pub temperature: f32,
}
impl LlmConfig {
/// Create a new LLM config
pub fn new(base_url: impl Into<String>, api_key: impl Into<String>, model: impl Into<String>) -> Self {
Self {
base_url: base_url.into(),
api_key: api_key.into(),
model: model.into(),
api_protocol: ApiProtocol::OpenAI,
max_tokens: default_max_tokens(),
temperature: default_temperature(),
}
}
/// Set API protocol
pub fn with_protocol(mut self, protocol: ApiProtocol) -> Self {
self.api_protocol = protocol;
self
}
/// Set max tokens
pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
self.max_tokens = max_tokens;
self
}
/// Set temperature
pub fn with_temperature(mut self, temperature: f32) -> Self {
self.temperature = temperature;
self
}
/// Create driver from this config
pub fn create_driver(&self) -> Result<Arc<dyn LlmDriver>> {
match self.api_protocol {
ApiProtocol::Anthropic => {
if self.base_url.is_empty() {
Ok(Arc::new(AnthropicDriver::new(SecretString::new(self.api_key.clone()))))
} else {
Ok(Arc::new(AnthropicDriver::with_base_url(
SecretString::new(self.api_key.clone()),
self.base_url.clone(),
)))
}
}
ApiProtocol::OpenAI => {
Ok(Arc::new(OpenAiDriver::with_base_url(
SecretString::new(self.api_key.clone()),
self.base_url.clone(),
)))
}
}
}
}
/// Kernel configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct KernelConfig {
@@ -13,33 +116,9 @@ pub struct KernelConfig {
#[serde(default = "default_database_url")]
pub database_url: String,
/// Default LLM provider
#[serde(default = "default_provider")]
pub default_provider: String,
/// Default model
#[serde(default = "default_model")]
pub default_model: String,
/// API keys (loaded from environment)
#[serde(skip)]
pub anthropic_api_key: Option<String>,
#[serde(skip)]
pub openai_api_key: Option<String>,
#[serde(skip)]
pub gemini_api_key: Option<String>,
/// Local LLM base URL
#[serde(default)]
pub local_base_url: Option<String>,
/// Maximum tokens per response
#[serde(default = "default_max_tokens")]
pub max_tokens: u32,
/// Default temperature
#[serde(default = "default_temperature")]
pub temperature: f32,
/// LLM configuration
#[serde(flatten)]
pub llm: LlmConfig,
}
fn default_database_url() -> String {
@@ -48,14 +127,6 @@ fn default_database_url() -> String {
format!("sqlite:{}/data.db?mode=rwc", dir.display())
}
fn default_provider() -> String {
"anthropic".to_string()
}
fn default_model() -> String {
"claude-sonnet-4-20250514".to_string()
}
fn default_max_tokens() -> u32 {
4096
}
@@ -68,14 +139,14 @@ impl Default for KernelConfig {
fn default() -> Self {
Self {
database_url: default_database_url(),
default_provider: default_provider(),
default_model: default_model(),
anthropic_api_key: std::env::var("ANTHROPIC_API_KEY").ok(),
openai_api_key: std::env::var("OPENAI_API_KEY").ok(),
gemini_api_key: std::env::var("GEMINI_API_KEY").ok(),
local_base_url: None,
max_tokens: default_max_tokens(),
temperature: default_temperature(),
llm: LlmConfig {
base_url: "https://api.openai.com/v1".to_string(),
api_key: String::new(),
model: "gpt-4o-mini".to_string(),
api_protocol: ApiProtocol::OpenAI,
max_tokens: default_max_tokens(),
temperature: default_temperature(),
},
}
}
}
@@ -87,35 +158,183 @@ impl KernelConfig {
Ok(Self::default())
}
/// Create the default LLM driver
/// Create the LLM driver
pub fn create_driver(&self) -> Result<Arc<dyn LlmDriver>> {
let driver: Arc<dyn LlmDriver> = match self.default_provider.as_str() {
"anthropic" => {
let key = self.anthropic_api_key.clone()
.ok_or_else(|| ZclawError::ConfigError("ANTHROPIC_API_KEY not set".into()))?;
Arc::new(AnthropicDriver::new(SecretString::new(key)))
}
"openai" => {
let key = self.openai_api_key.clone()
.ok_or_else(|| ZclawError::ConfigError("OPENAI_API_KEY not set".into()))?;
Arc::new(OpenAiDriver::new(SecretString::new(key)))
}
"gemini" => {
let key = self.gemini_api_key.clone()
.ok_or_else(|| ZclawError::ConfigError("GEMINI_API_KEY not set".into()))?;
Arc::new(GeminiDriver::new(SecretString::new(key)))
}
"local" | "ollama" => {
let base_url = self.local_base_url.clone()
.unwrap_or_else(|| "http://localhost:11434/v1".to_string());
Arc::new(LocalDriver::new(base_url))
}
_ => {
return Err(ZclawError::ConfigError(
format!("Unknown provider: {}", self.default_provider)
));
}
};
Ok(driver)
self.llm.create_driver()
}
/// Get the model ID (passed directly to API)
pub fn model(&self) -> &str {
&self.llm.model
}
/// Get max tokens
pub fn max_tokens(&self) -> u32 {
self.llm.max_tokens
}
/// Get temperature
pub fn temperature(&self) -> f32 {
self.llm.temperature
}
}
// === Preset configurations for common providers ===
impl LlmConfig {
/// OpenAI GPT-4
pub fn openai(api_key: impl Into<String>) -> Self {
Self::new("https://api.openai.com/v1", api_key, "gpt-4o")
}
/// Anthropic Claude
pub fn anthropic(api_key: impl Into<String>) -> Self {
Self::new("https://api.anthropic.com", api_key, "claude-sonnet-4-20250514")
.with_protocol(ApiProtocol::Anthropic)
}
/// 智谱 GLM
pub fn zhipu(api_key: impl Into<String>, model: impl Into<String>) -> Self {
Self::new("https://open.bigmodel.cn/api/paas/v4", api_key, model)
}
/// 智谱 GLM Coding Plan
pub fn zhipu_coding(api_key: impl Into<String>, model: impl Into<String>) -> Self {
Self::new("https://open.bigmodel.cn/api/coding/paas/v4", api_key, model)
}
/// Kimi (Moonshot)
pub fn kimi(api_key: impl Into<String>, model: impl Into<String>) -> Self {
Self::new("https://api.moonshot.cn/v1", api_key, model)
}
/// Kimi Coding Plan
pub fn kimi_coding(api_key: impl Into<String>, model: impl Into<String>) -> Self {
Self::new("https://api.kimi.com/coding/v1", api_key, model)
}
/// 阿里云百炼 (Qwen)
pub fn qwen(api_key: impl Into<String>, model: impl Into<String>) -> Self {
Self::new("https://dashscope.aliyuncs.com/compatible-mode/v1", api_key, model)
}
/// 阿里云百炼 Coding Plan
pub fn qwen_coding(api_key: impl Into<String>, model: impl Into<String>) -> Self {
Self::new("https://coding.dashscope.aliyuncs.com/v1", api_key, model)
}
/// DeepSeek
pub fn deepseek(api_key: impl Into<String>, model: impl Into<String>) -> Self {
Self::new("https://api.deepseek.com/v1", api_key, model)
}
/// Ollama / Local
pub fn local(base_url: impl Into<String>, model: impl Into<String>) -> Self {
Self::new(base_url, "", model)
}
}
// === Backward compatibility ===
/// Provider type for backward compatibility
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Provider {
OpenAI,
Anthropic,
Gemini,
Zhipu,
Kimi,
Qwen,
DeepSeek,
Local,
Custom,
}
impl KernelConfig {
/// Create config from provider type (for backward compatibility with Tauri commands)
pub fn from_provider(
provider: &str,
api_key: &str,
model: &str,
base_url: Option<&str>,
api_protocol: &str,
) -> Self {
let llm = match provider {
"anthropic" => LlmConfig::anthropic(api_key).with_model(model),
"openai" => {
if let Some(url) = base_url.filter(|u| !u.is_empty()) {
LlmConfig::new(url, api_key, model)
} else {
LlmConfig::openai(api_key).with_model(model)
}
}
"gemini" => LlmConfig::new(
base_url.unwrap_or("https://generativelanguage.googleapis.com/v1beta"),
api_key,
model,
),
"zhipu" => {
let url = base_url.unwrap_or("https://open.bigmodel.cn/api/paas/v4");
LlmConfig::zhipu(api_key, model).with_base_url(url)
}
"zhipu-coding" => {
let url = base_url.unwrap_or("https://open.bigmodel.cn/api/coding/paas/v4");
LlmConfig::zhipu_coding(api_key, model).with_base_url(url)
}
"kimi" => {
let url = base_url.unwrap_or("https://api.moonshot.cn/v1");
LlmConfig::kimi(api_key, model).with_base_url(url)
}
"kimi-coding" => {
let url = base_url.unwrap_or("https://api.kimi.com/coding/v1");
LlmConfig::kimi_coding(api_key, model).with_base_url(url)
}
"qwen" => {
let url = base_url.unwrap_or("https://dashscope.aliyuncs.com/compatible-mode/v1");
LlmConfig::qwen(api_key, model).with_base_url(url)
}
"qwen-coding" => {
let url = base_url.unwrap_or("https://coding.dashscope.aliyuncs.com/v1");
LlmConfig::qwen_coding(api_key, model).with_base_url(url)
}
"deepseek" => LlmConfig::deepseek(api_key, model),
"local" | "ollama" => {
let url = base_url.unwrap_or("http://localhost:11434/v1");
LlmConfig::local(url, model)
}
_ => {
// Custom provider
let protocol = if api_protocol == "anthropic" {
ApiProtocol::Anthropic
} else {
ApiProtocol::OpenAI
};
LlmConfig::new(
base_url.unwrap_or("https://api.openai.com/v1"),
api_key,
model,
)
.with_protocol(protocol)
}
};
Self {
database_url: default_database_url(),
llm,
}
}
}
impl LlmConfig {
/// Set model
pub fn with_model(mut self, model: impl Into<String>) -> Self {
self.model = model.into();
self
}
/// Set base URL
pub fn with_base_url(mut self, base_url: impl Into<String>) -> Self {
self.base_url = base_url.into();
self
}
}