feat(kernel): add internal ZCLAW kernel integration with Tauri

Phase 1-3 of independence architecture:
- zclaw-types: Add ToolDefinition, ToolResult, KernelConfig, ModelConfig
- zclaw-kernel: Fix AgentInfo provider field, export config module
- desktop: Add kernel_commands for internal kernel access
- Add AgentId FromStr implementation for parsing

New Tauri commands:
- kernel_init, kernel_status, kernel_shutdown
- agent_create, agent_list, agent_get, agent_delete
- agent_chat

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
iven
2026-03-22 08:37:20 +08:00
parent 185763868a
commit 7abfca9d5c
6 changed files with 618 additions and 0 deletions

View File

@@ -0,0 +1,129 @@
//! Configuration types
use serde::{Deserialize, Serialize};
/// Kernel configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct KernelConfig {
/// Database URL (SQLite or PostgreSQL)
pub database_url: String,
/// Default LLM provider
pub default_provider: String,
/// Default model
pub default_model: String,
/// Maximum tokens per response
#[serde(default = "default_max_tokens")]
pub max_tokens: u32,
/// Default temperature
#[serde(default = "default_temperature")]
pub temperature: f32,
/// Enable debug logging
#[serde(default)]
pub debug: bool,
}
fn default_max_tokens() -> u32 {
4096
}
fn default_temperature() -> f32 {
0.7
}
impl Default for KernelConfig {
fn default() -> Self {
Self {
database_url: "sqlite::memory:".to_string(),
default_provider: "anthropic".to_string(),
default_model: "claude-sonnet-4-20250514".to_string(),
max_tokens: default_max_tokens(),
temperature: default_temperature(),
debug: false,
}
}
}
/// Model configuration for an agent
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ModelConfig {
/// Provider name (anthropic, openai, gemini, ollama, etc.)
pub provider: String,
/// Model identifier
pub model: String,
/// API key environment variable name
#[serde(default)]
pub api_key_env: Option<String>,
/// Custom base URL (for OpenAI-compatible providers)
#[serde(default)]
pub base_url: Option<String>,
}
impl Default for ModelConfig {
fn default() -> Self {
Self {
provider: "anthropic".to_string(),
model: "claude-sonnet-4-20250514".to_string(),
api_key_env: Some("ANTHROPIC_API_KEY".to_string()),
base_url: None,
}
}
}
impl ModelConfig {
pub fn anthropic(model: impl Into<String>) -> Self {
Self {
provider: "anthropic".to_string(),
model: model.into(),
api_key_env: Some("ANTHROPIC_API_KEY".to_string()),
base_url: None,
}
}
pub fn openai(model: impl Into<String>) -> Self {
Self {
provider: "openai".to_string(),
model: model.into(),
api_key_env: Some("OPENAI_API_KEY".to_string()),
base_url: None,
}
}
pub fn gemini(model: impl Into<String>) -> Self {
Self {
provider: "gemini".to_string(),
model: model.into(),
api_key_env: Some("GEMINI_API_KEY".to_string()),
base_url: None,
}
}
pub fn ollama(model: impl Into<String>) -> Self {
Self {
provider: "ollama".to_string(),
model: model.into(),
api_key_env: None,
base_url: Some("http://localhost:11434".to_string()),
}
}
pub fn openai_compatible(model: impl Into<String>, base_url: impl Into<String>) -> Self {
Self {
provider: "openai".to_string(),
model: model.into(),
api_key_env: None,
base_url: Some(base_url.into()),
}
}
/// Check if this uses the same driver as another config
pub fn same_driver(&self, other: &ModelConfig) -> bool {
self.provider == other.provider
&& self.api_key_env == other.api_key_env
&& self.base_url == other.base_url
}
}

View File

@@ -0,0 +1,90 @@
//! Tool definition types
use serde::{Deserialize, Serialize};
use serde_json::Value;
/// Tool definition for LLM function calling
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolDefinition {
/// Tool name (unique identifier)
pub name: String,
/// Human-readable description
pub description: String,
/// JSON Schema for input parameters
pub input_schema: Value,
}
impl ToolDefinition {
pub fn new(name: impl Into<String>, description: impl Into<String>, schema: Value) -> Self {
Self {
name: name.into(),
description: description.into(),
input_schema: schema,
}
}
/// Create a simple tool with string parameters
pub fn simple(name: impl Into<String>, description: impl Into<String>, params: &[&str]) -> Self {
let properties: Value = params
.iter()
.map(|p| {
let s = p.to_string();
(s.clone(), serde_json::json!({"type": "string"}))
})
.collect();
let required: Vec<&str> = params.to_vec();
Self {
name: name.into(),
description: description.into(),
input_schema: serde_json::json!({
"type": "object",
"properties": properties,
"required": required
}),
}
}
}
/// Tool execution result
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolResult {
/// Whether execution succeeded
pub success: bool,
/// Output data
pub output: Value,
/// Error message if failed
pub error: Option<String>,
}
impl ToolResult {
pub fn success(output: Value) -> Self {
Self {
success: true,
output,
error: None,
}
}
pub fn error(message: impl Into<String>) -> Self {
Self {
success: false,
output: Value::Null,
error: Some(message.into()),
}
}
}
/// Built-in tool names
pub mod builtin_tools {
pub const FILE_READ: &str = "file_read";
pub const FILE_WRITE: &str = "file_write";
pub const FILE_LIST: &str = "file_list";
pub const SHELL_EXEC: &str = "shell_exec";
pub const WEB_FETCH: &str = "web_fetch";
pub const WEB_SEARCH: &str = "web_search";
pub const MEMORY_STORE: &str = "memory_store";
pub const MEMORY_RECALL: &str = "memory_recall";
pub const MEMORY_SEARCH: &str = "memory_search";
}