feat: add internal ZCLAW kernel crates to git tracking

This commit is contained in:
iven
2026-03-22 09:26:36 +08:00
parent d72c0f7161
commit 58cd24f85b
36 changed files with 10298 additions and 0 deletions

View File

@@ -0,0 +1,226 @@
//! Anthropic Claude driver implementation
use async_trait::async_trait;
use secrecy::{ExposeSecret, SecretString};
use reqwest::Client;
use serde::{Deserialize, Serialize};
use zclaw_types::{Result, ZclawError};
use super::{CompletionRequest, CompletionResponse, ContentBlock, LlmDriver, StopReason};
/// Anthropic API driver
pub struct AnthropicDriver {
client: Client,
api_key: SecretString,
base_url: String,
}
impl AnthropicDriver {
pub fn new(api_key: SecretString) -> Self {
Self {
client: Client::new(),
api_key,
base_url: "https://api.anthropic.com".to_string(),
}
}
pub fn with_base_url(api_key: SecretString, base_url: String) -> Self {
Self {
client: Client::new(),
api_key,
base_url,
}
}
}
#[async_trait]
impl LlmDriver for AnthropicDriver {
fn provider(&self) -> &str {
"anthropic"
}
fn is_configured(&self) -> bool {
!self.api_key.expose_secret().is_empty()
}
async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse> {
let api_request = self.build_api_request(&request);
let response = self.client
.post(format!("{}/v1/messages", self.base_url))
.header("x-api-key", self.api_key.expose_secret())
.header("anthropic-version", "2023-06-01")
.header("content-type", "application/json")
.json(&api_request)
.send()
.await
.map_err(|e| ZclawError::LlmError(format!("HTTP request failed: {}", e)))?;
if !response.status().is_success() {
let status = response.status();
let body = response.text().await.unwrap_or_default();
return Err(ZclawError::LlmError(format!("API error {}: {}", status, body)));
}
let api_response: AnthropicResponse = response
.json()
.await
.map_err(|e| ZclawError::LlmError(format!("Failed to parse response: {}", e)))?;
Ok(self.convert_response(api_response))
}
}
impl AnthropicDriver {
fn build_api_request(&self, request: &CompletionRequest) -> AnthropicRequest {
let messages: Vec<AnthropicMessage> = request.messages
.iter()
.filter_map(|msg| match msg {
zclaw_types::Message::User { content } => Some(AnthropicMessage {
role: "user".to_string(),
content: vec!(ContentBlock::Text { text: content.clone() }),
}),
zclaw_types::Message::Assistant { content, thinking } => {
let mut blocks = Vec::new();
if let Some(think) = thinking {
blocks.push(ContentBlock::Thinking { thinking: think.clone() });
}
blocks.push(ContentBlock::Text { text: content.clone() });
Some(AnthropicMessage {
role: "assistant".to_string(),
content: blocks,
})
}
zclaw_types::Message::ToolUse { id, tool, input } => Some(AnthropicMessage {
role: "assistant".to_string(),
content: vec![ContentBlock::ToolUse {
id: id.clone(),
name: tool.to_string(),
input: input.clone(),
}],
}),
zclaw_types::Message::ToolResult { tool_call_id: _, tool: _, output, is_error } => {
let content = if *is_error {
format!("Error: {}", output)
} else {
output.to_string()
};
Some(AnthropicMessage {
role: "user".to_string(),
content: vec![ContentBlock::Text { text: content }],
})
}
_ => None,
})
.collect();
let tools: Vec<AnthropicTool> = request.tools
.iter()
.map(|t| AnthropicTool {
name: t.name.clone(),
description: t.description.clone(),
input_schema: t.input_schema.clone(),
})
.collect();
AnthropicRequest {
model: request.model.clone(),
max_tokens: request.max_tokens.unwrap_or(4096),
system: request.system.clone(),
messages,
tools: if tools.is_empty() { None } else { Some(tools) },
temperature: request.temperature,
stop_sequences: if request.stop.is_empty() { None } else { Some(request.stop.clone()) },
stream: request.stream,
}
}
fn convert_response(&self, api_response: AnthropicResponse) -> CompletionResponse {
let content: Vec<ContentBlock> = api_response.content
.into_iter()
.map(|block| match block.block_type.as_str() {
"text" => ContentBlock::Text { text: block.text.unwrap_or_default() },
"thinking" => ContentBlock::Thinking { thinking: block.thinking.unwrap_or_default() },
"tool_use" => ContentBlock::ToolUse {
id: block.id.unwrap_or_default(),
name: block.name.unwrap_or_default(),
input: block.input.unwrap_or(serde_json::Value::Null),
},
_ => ContentBlock::Text { text: String::new() },
})
.collect();
let stop_reason = match api_response.stop_reason.as_deref() {
Some("end_turn") => StopReason::EndTurn,
Some("max_tokens") => StopReason::MaxTokens,
Some("stop_sequence") => StopReason::StopSequence,
Some("tool_use") => StopReason::ToolUse,
_ => StopReason::EndTurn,
};
CompletionResponse {
content,
model: api_response.model,
input_tokens: api_response.usage.input_tokens,
output_tokens: api_response.usage.output_tokens,
stop_reason,
}
}
}
// Anthropic API types
#[derive(Serialize)]
struct AnthropicRequest {
model: String,
max_tokens: u32,
#[serde(skip_serializing_if = "Option::is_none")]
system: Option<String>,
messages: Vec<AnthropicMessage>,
#[serde(skip_serializing_if = "Option::is_none")]
tools: Option<Vec<AnthropicTool>>,
#[serde(skip_serializing_if = "Option::is_none")]
temperature: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
stop_sequences: Option<Vec<String>>,
#[serde(default)]
stream: bool,
}
#[derive(Serialize)]
struct AnthropicMessage {
role: String,
content: Vec<ContentBlock>,
}
#[derive(Serialize)]
struct AnthropicTool {
name: String,
description: String,
input_schema: serde_json::Value,
}
#[derive(Deserialize)]
struct AnthropicResponse {
content: Vec<AnthropicContentBlock>,
model: String,
stop_reason: Option<String>,
usage: AnthropicUsage,
}
#[derive(Deserialize)]
struct AnthropicContentBlock {
#[serde(rename = "type")]
block_type: String,
text: Option<String>,
thinking: Option<String>,
id: Option<String>,
name: Option<String>,
input: Option<serde_json::Value>,
}
#[derive(Deserialize)]
struct AnthropicUsage {
input_tokens: u32,
output_tokens: u32,
}

View File

@@ -0,0 +1,49 @@
//! Google Gemini driver implementation
use async_trait::async_trait;
use secrecy::{ExposeSecret, SecretString};
use reqwest::Client;
use zclaw_types::Result;
use super::{CompletionRequest, CompletionResponse, ContentBlock, LlmDriver, StopReason};
/// Google Gemini driver
pub struct GeminiDriver {
client: Client,
api_key: SecretString,
base_url: String,
}
impl GeminiDriver {
pub fn new(api_key: SecretString) -> Self {
Self {
client: Client::new(),
api_key,
base_url: "https://generativelanguage.googleapis.com/v1beta".to_string(),
}
}
}
#[async_trait]
impl LlmDriver for GeminiDriver {
fn provider(&self) -> &str {
"gemini"
}
fn is_configured(&self) -> bool {
!self.api_key.expose_secret().is_empty()
}
async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse> {
// TODO: Implement actual API call
Ok(CompletionResponse {
content: vec![ContentBlock::Text {
text: "Gemini driver not yet implemented".to_string(),
}],
model: request.model,
input_tokens: 0,
output_tokens: 0,
stop_reason: StopReason::EndTurn,
})
}
}

View File

@@ -0,0 +1,59 @@
//! Local LLM driver (Ollama, LM Studio, vLLM, etc.)
use async_trait::async_trait;
use reqwest::Client;
use zclaw_types::Result;
use super::{CompletionRequest, CompletionResponse, ContentBlock, LlmDriver, StopReason};
/// Local LLM driver for Ollama, LM Studio, vLLM, etc.
pub struct LocalDriver {
client: Client,
base_url: String,
}
impl LocalDriver {
pub fn new(base_url: impl Into<String>) -> Self {
Self {
client: Client::new(),
base_url: base_url.into(),
}
}
pub fn ollama() -> Self {
Self::new("http://localhost:11434/v1")
}
pub fn lm_studio() -> Self {
Self::new("http://localhost:1234/v1")
}
pub fn vllm() -> Self {
Self::new("http://localhost:8000/v1")
}
}
#[async_trait]
impl LlmDriver for LocalDriver {
fn provider(&self) -> &str {
"local"
}
fn is_configured(&self) -> bool {
// Local drivers don't require API keys
true
}
async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse> {
// TODO: Implement actual API call (OpenAI-compatible)
Ok(CompletionResponse {
content: vec![ContentBlock::Text {
text: "Local driver not yet implemented".to_string(),
}],
model: request.model,
input_tokens: 0,
output_tokens: 0,
stop_reason: StopReason::EndTurn,
})
}
}

View File

@@ -0,0 +1,169 @@
//! LLM Driver trait and implementations
//!
//! This module provides a unified interface for multiple LLM providers.
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use secrecy::SecretString;
use zclaw_types::Result;
mod anthropic;
mod openai;
mod gemini;
mod local;
pub use anthropic::AnthropicDriver;
pub use openai::OpenAiDriver;
pub use gemini::GeminiDriver;
pub use local::LocalDriver;
/// LLM Driver trait - unified interface for all providers
#[async_trait]
pub trait LlmDriver: Send + Sync {
/// Get the provider name
fn provider(&self) -> &str;
/// Send a completion request
async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse>;
/// Check if the driver is properly configured
fn is_configured(&self) -> bool;
}
/// Completion request
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CompletionRequest {
/// Model identifier
pub model: String,
/// System prompt
pub system: Option<String>,
/// Conversation messages
pub messages: Vec<zclaw_types::Message>,
/// Available tools
pub tools: Vec<ToolDefinition>,
/// Maximum tokens to generate
pub max_tokens: Option<u32>,
/// Temperature (0.0 - 1.0)
pub temperature: Option<f32>,
/// Stop sequences
pub stop: Vec<String>,
/// Enable streaming
pub stream: bool,
}
impl Default for CompletionRequest {
fn default() -> Self {
Self {
model: String::new(),
system: None,
messages: Vec::new(),
tools: Vec::new(),
max_tokens: Some(4096),
temperature: Some(0.7),
stop: Vec::new(),
stream: false,
}
}
}
/// Tool definition for LLM
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolDefinition {
pub name: String,
pub description: String,
pub input_schema: serde_json::Value,
}
impl ToolDefinition {
pub fn new(name: impl Into<String>, description: impl Into<String>, schema: serde_json::Value) -> Self {
Self {
name: name.into(),
description: description.into(),
input_schema: schema,
}
}
}
/// Completion response
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CompletionResponse {
/// Generated content blocks
pub content: Vec<ContentBlock>,
/// Model used
pub model: String,
/// Input tokens
pub input_tokens: u32,
/// Output tokens
pub output_tokens: u32,
/// Stop reason
pub stop_reason: StopReason,
}
/// Content block in response
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ContentBlock {
Text { text: String },
Thinking { thinking: String },
ToolUse { id: String, name: String, input: serde_json::Value },
}
/// Stop reason
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum StopReason {
EndTurn,
MaxTokens,
StopSequence,
ToolUse,
Error,
}
/// Driver configuration
#[derive(Debug, Clone)]
pub enum DriverConfig {
Anthropic { api_key: SecretString },
OpenAi { api_key: SecretString, base_url: Option<String> },
Gemini { api_key: SecretString },
Local { base_url: String },
}
impl DriverConfig {
pub fn anthropic(api_key: impl Into<String>) -> Self {
Self::Anthropic {
api_key: SecretString::new(api_key.into()),
}
}
pub fn openai(api_key: impl Into<String>) -> Self {
Self::OpenAi {
api_key: SecretString::new(api_key.into()),
base_url: None,
}
}
pub fn openai_with_base(api_key: impl Into<String>, base_url: impl Into<String>) -> Self {
Self::OpenAi {
api_key: SecretString::new(api_key.into()),
base_url: Some(base_url.into()),
}
}
pub fn gemini(api_key: impl Into<String>) -> Self {
Self::Gemini {
api_key: SecretString::new(api_key.into()),
}
}
pub fn ollama() -> Self {
Self::Local {
base_url: "http://localhost:11434".to_string(),
}
}
pub fn local(base_url: impl Into<String>) -> Self {
Self::Local {
base_url: base_url.into(),
}
}
}

View File

@@ -0,0 +1,293 @@
//! OpenAI-compatible driver implementation
use async_trait::async_trait;
use secrecy::{ExposeSecret, SecretString};
use reqwest::Client;
use serde::{Deserialize, Serialize};
use zclaw_types::{Result, ZclawError};
use super::{CompletionRequest, CompletionResponse, ContentBlock, LlmDriver, StopReason, ToolDefinition};
/// OpenAI-compatible driver
pub struct OpenAiDriver {
client: Client,
api_key: SecretString,
base_url: String,
}
impl OpenAiDriver {
pub fn new(api_key: SecretString) -> Self {
Self {
client: Client::new(),
api_key,
base_url: "https://api.openai.com/v1".to_string(),
}
}
pub fn with_base_url(api_key: SecretString, base_url: String) -> Self {
Self {
client: Client::new(),
api_key,
base_url,
}
}
}
#[async_trait]
impl LlmDriver for OpenAiDriver {
fn provider(&self) -> &str {
"openai"
}
fn is_configured(&self) -> bool {
!self.api_key.expose_secret().is_empty()
}
async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse> {
let api_request = self.build_api_request(&request);
let response = self.client
.post(format!("{}/chat/completions", self.base_url))
.header("Authorization", format!("Bearer {}", self.api_key.expose_secret()))
.header("Content-Type", "application/json")
.json(&api_request)
.send()
.await
.map_err(|e| ZclawError::LlmError(format!("HTTP request failed: {}", e)))?;
if !response.status().is_success() {
let status = response.status();
let body = response.text().await.unwrap_or_default();
return Err(ZclawError::LlmError(format!("API error {}: {}", status, body)));
}
let api_response: OpenAiResponse = response
.json()
.await
.map_err(|e| ZclawError::LlmError(format!("Failed to parse response: {}", e)))?;
Ok(self.convert_response(api_response, request.model))
}
}
impl OpenAiDriver {
fn build_api_request(&self, request: &CompletionRequest) -> OpenAiRequest {
let messages: Vec<OpenAiMessage> = request.messages
.iter()
.filter_map(|msg| match msg {
zclaw_types::Message::User { content } => Some(OpenAiMessage {
role: "user".to_string(),
content: Some(content.clone()),
tool_calls: None,
}),
zclaw_types::Message::Assistant { content, thinking: _ } => Some(OpenAiMessage {
role: "assistant".to_string(),
content: Some(content.clone()),
tool_calls: None,
}),
zclaw_types::Message::System { content } => Some(OpenAiMessage {
role: "system".to_string(),
content: Some(content.clone()),
tool_calls: None,
}),
zclaw_types::Message::ToolUse { id, tool, input } => Some(OpenAiMessage {
role: "assistant".to_string(),
content: None,
tool_calls: Some(vec![OpenAiToolCall {
id: id.clone(),
r#type: "function".to_string(),
function: FunctionCall {
name: tool.to_string(),
arguments: serde_json::to_string(input).unwrap_or_default(),
},
}]),
}),
zclaw_types::Message::ToolResult { tool_call_id, output, is_error, .. } => Some(OpenAiMessage {
role: "tool".to_string(),
content: Some(if *is_error {
format!("Error: {}", output)
} else {
output.to_string()
}),
tool_calls: None,
}),
})
.collect();
// Add system prompt if provided
let mut messages = messages;
if let Some(system) = &request.system {
messages.insert(0, OpenAiMessage {
role: "system".to_string(),
content: Some(system.clone()),
tool_calls: None,
});
}
let tools: Vec<OpenAiTool> = request.tools
.iter()
.map(|t| OpenAiTool {
r#type: "function".to_string(),
function: FunctionDef {
name: t.name.clone(),
description: t.description.clone(),
parameters: t.input_schema.clone(),
},
})
.collect();
OpenAiRequest {
model: request.model.clone(),
messages,
max_tokens: request.max_tokens,
temperature: request.temperature,
stop: if request.stop.is_empty() { None } else { Some(request.stop.clone()) },
stream: request.stream,
tools: if tools.is_empty() { None } else { Some(tools) },
}
}
fn convert_response(&self, api_response: OpenAiResponse, model: String) -> CompletionResponse {
let choice = api_response.choices.first();
let (content, stop_reason) = match choice {
Some(c) => {
let blocks = if let Some(text) = &c.message.content {
vec![ContentBlock::Text { text: text.clone() }]
} else if let Some(tool_calls) = &c.message.tool_calls {
tool_calls.iter().map(|tc| ContentBlock::ToolUse {
id: tc.id.clone(),
name: tc.function.name.clone(),
input: serde_json::from_str(&tc.function.arguments).unwrap_or(serde_json::Value::Null),
}).collect()
} else {
vec![ContentBlock::Text { text: String::new() }]
};
let stop = match c.finish_reason.as_deref() {
Some("stop") => StopReason::EndTurn,
Some("length") => StopReason::MaxTokens,
Some("tool_calls") => StopReason::ToolUse,
_ => StopReason::EndTurn,
};
(blocks, stop)
}
None => (vec![ContentBlock::Text { text: String::new() }], StopReason::EndTurn),
};
let (input_tokens, output_tokens) = api_response.usage
.map(|u| (u.prompt_tokens, u.completion_tokens))
.unwrap_or((0, 0));
CompletionResponse {
content,
model,
input_tokens,
output_tokens,
stop_reason,
}
}
}
// OpenAI API types
#[derive(Serialize)]
struct OpenAiRequest {
model: String,
messages: Vec<OpenAiMessage>,
#[serde(skip_serializing_if = "Option::is_none")]
max_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
temperature: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
stop: Option<Vec<String>>,
#[serde(default)]
stream: bool,
#[serde(skip_serializing_if = "Option::is_none")]
tools: Option<Vec<OpenAiTool>>,
}
#[derive(Serialize)]
struct OpenAiMessage {
role: String,
#[serde(skip_serializing_if = "Option::is_none")]
content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
tool_calls: Option<Vec<OpenAiToolCall>>,
}
#[derive(Serialize)]
struct OpenAiToolCall {
id: String,
r#type: String,
function: FunctionCall,
}
impl Default for OpenAiToolCall {
fn default() -> Self {
Self {
id: String::new(),
r#type: "function".to_string(),
function: FunctionCall {
name: String::new(),
arguments: String::new(),
},
}
}
}
#[derive(Serialize)]
struct FunctionCall {
name: String,
arguments: String,
}
#[derive(Serialize)]
struct OpenAiTool {
r#type: String,
function: FunctionDef,
}
#[derive(Serialize)]
struct FunctionDef {
name: String,
description: String,
parameters: serde_json::Value,
}
#[derive(Deserialize)]
struct OpenAiResponse {
choices: Vec<OpenAiChoice>,
usage: Option<OpenAiUsage>,
}
#[derive(Deserialize)]
struct OpenAiChoice {
message: OpenAiResponseMessage,
finish_reason: Option<String>,
}
#[derive(Deserialize)]
struct OpenAiResponseMessage {
content: Option<String>,
tool_calls: Option<Vec<OpenAiToolCallResponse>>,
}
#[derive(Deserialize)]
struct OpenAiToolCallResponse {
id: String,
function: FunctionCallResponse,
}
#[derive(Deserialize)]
struct FunctionCallResponse {
name: String,
arguments: String,
}
#[derive(Deserialize)]
struct OpenAiUsage {
prompt_tokens: u32,
completion_tokens: u32,
}

View File

@@ -0,0 +1,19 @@
//! ZCLAW Runtime
//!
//! LLM drivers, tool system, and agent loop implementation.
pub mod driver;
pub mod tool;
pub mod loop_runner;
pub mod loop_guard;
pub mod stream;
// Re-export main types
pub use driver::{
LlmDriver, CompletionRequest, CompletionResponse, ContentBlock, StopReason,
ToolDefinition, DriverConfig, AnthropicDriver, OpenAiDriver, GeminiDriver, LocalDriver,
};
pub use tool::{Tool, ToolRegistry, ToolContext};
pub use loop_runner::{AgentLoop, AgentLoopResult, LoopEvent};
pub use loop_guard::{LoopGuard, LoopGuardConfig, LoopGuardResult};
pub use stream::{StreamEvent, StreamSender};

View File

@@ -0,0 +1,103 @@
//! Loop guard to prevent infinite tool loops
use sha2::{Sha256, Digest};
use std::collections::HashMap;
/// Configuration for loop guard
#[derive(Debug, Clone)]
pub struct LoopGuardConfig {
/// Warn after this many repetitions
pub warn_threshold: u32,
/// Block tool call after this many repetitions
pub block_threshold: u32,
/// Terminate loop after this many total repetitions
pub circuit_breaker: u32,
}
impl Default for LoopGuardConfig {
fn default() -> Self {
Self {
warn_threshold: 3,
block_threshold: 5,
circuit_breaker: 30,
}
}
}
/// Loop guard state
#[derive(Debug)]
pub struct LoopGuard {
config: LoopGuardConfig,
/// Hash of (tool_name, params) -> count
call_counts: HashMap<String, u32>,
/// Total calls in this session
total_calls: u32,
}
impl LoopGuard {
pub fn new(config: LoopGuardConfig) -> Self {
Self {
config,
call_counts: HashMap::new(),
total_calls: 0,
}
}
/// Check if a tool call should be allowed
pub fn check(&mut self, tool_name: &str, params: &serde_json::Value) -> LoopGuardResult {
let hash = self.hash_call(tool_name, params);
let count = self.call_counts.entry(hash).or_insert(0);
self.total_calls += 1;
*count += 1;
// Check circuit breaker first
if self.total_calls > self.config.circuit_breaker {
return LoopGuardResult::CircuitBreaker;
}
// Check block threshold
if *count > self.config.block_threshold {
return LoopGuardResult::Blocked;
}
// Check warn threshold
if *count > self.config.warn_threshold {
return LoopGuardResult::Warn;
}
LoopGuardResult::Allowed
}
/// Reset the guard state
pub fn reset(&mut self) {
self.call_counts.clear();
self.total_calls = 0;
}
fn hash_call(&self, tool_name: &str, params: &serde_json::Value) -> String {
let mut hasher = Sha256::new();
hasher.update(tool_name.as_bytes());
hasher.update(params.to_string().as_bytes());
format!("{:x}", hasher.finalize())
}
}
impl Default for LoopGuard {
fn default() -> Self {
Self::new(LoopGuardConfig::default())
}
}
/// Result of loop guard check
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum LoopGuardResult {
/// Call is allowed
Allowed,
/// Call is allowed but should warn
Warn,
/// Call should be blocked
Blocked,
/// Loop should be terminated
CircuitBreaker,
}

View File

@@ -0,0 +1,106 @@
//! Agent loop implementation
use std::sync::Arc;
use tokio::sync::mpsc;
use zclaw_types::{AgentId, SessionId, Message, Result};
use crate::driver::{LlmDriver, CompletionRequest};
use crate::tool::ToolRegistry;
use crate::loop_guard::LoopGuard;
use zclaw_memory::MemoryStore;
/// Agent loop runner
pub struct AgentLoop {
agent_id: AgentId,
driver: Arc<dyn LlmDriver>,
tools: ToolRegistry,
memory: Arc<MemoryStore>,
loop_guard: LoopGuard,
}
impl AgentLoop {
pub fn new(
agent_id: AgentId,
driver: Arc<dyn LlmDriver>,
tools: ToolRegistry,
memory: Arc<MemoryStore>,
) -> Self {
Self {
agent_id,
driver,
tools,
memory,
loop_guard: LoopGuard::default(),
}
}
/// Run the agent loop with a single message
pub async fn run(&self, session_id: SessionId, input: String) -> Result<AgentLoopResult> {
// Add user message to session
let user_message = Message::user(input);
self.memory.append_message(&session_id, &user_message).await?;
// Get all messages for context
let messages = self.memory.get_messages(&session_id).await?;
// Build completion request
let request = CompletionRequest {
model: "claude-sonnet-4-20250514".to_string(), // TODO: Get from agent config
system: None, // TODO: Get from agent config
messages,
tools: self.tools.definitions(),
max_tokens: Some(4096),
temperature: Some(0.7),
stop: Vec::new(),
stream: false,
};
// Call LLM
let response = self.driver.complete(request).await?;
// Process response and handle tool calls
let mut iterations = 0;
let max_iterations = 10;
// TODO: Implement full loop with tool execution
Ok(AgentLoopResult {
response: "Response placeholder".to_string(),
input_tokens: response.input_tokens,
output_tokens: response.output_tokens,
iterations,
})
}
/// Run the agent loop with streaming
pub async fn run_streaming(
&self,
session_id: SessionId,
input: String,
) -> Result<mpsc::Receiver<LoopEvent>> {
let (tx, rx) = mpsc::channel(100);
// TODO: Implement streaming
Ok(rx)
}
}
/// Result of an agent loop execution
#[derive(Debug, Clone)]
pub struct AgentLoopResult {
pub response: String,
pub input_tokens: u32,
pub output_tokens: u32,
pub iterations: usize,
}
/// Events emitted during streaming
#[derive(Debug, Clone)]
pub enum LoopEvent {
Delta(String),
ToolStart { name: String, input: serde_json::Value },
ToolEnd { name: String, output: serde_json::Value },
Complete(AgentLoopResult),
Error(String),
}

View File

@@ -0,0 +1,54 @@
//! Streaming utilities
use tokio::sync::mpsc;
use zclaw_types::Result;
/// Stream event for LLM responses
#[derive(Debug, Clone)]
pub enum StreamEvent {
/// Text delta received
TextDelta(String),
/// Thinking delta received
ThinkingDelta(String),
/// Tool use started
ToolUseStart { id: String, name: String },
/// Tool use input chunk
ToolUseInput { id: String, chunk: String },
/// Tool use completed
ToolUseEnd { id: String, input: serde_json::Value },
/// Response completed
Complete { input_tokens: u32, output_tokens: u32 },
/// Error occurred
Error(String),
}
/// Stream sender wrapper
pub struct StreamSender {
tx: mpsc::Sender<StreamEvent>,
}
impl StreamSender {
pub fn new(tx: mpsc::Sender<StreamEvent>) -> Self {
Self { tx }
}
pub async fn send_text(&self, delta: impl Into<String>) -> Result<()> {
self.tx.send(StreamEvent::TextDelta(delta.into())).await.ok();
Ok(())
}
pub async fn send_thinking(&self, delta: impl Into<String>) -> Result<()> {
self.tx.send(StreamEvent::ThinkingDelta(delta.into())).await.ok();
Ok(())
}
pub async fn send_complete(&self, input_tokens: u32, output_tokens: u32) -> Result<()> {
self.tx.send(StreamEvent::Complete { input_tokens, output_tokens }).await.ok();
Ok(())
}
pub async fn send_error(&self, error: impl Into<String>) -> Result<()> {
self.tx.send(StreamEvent::Error(error.into())).await.ok();
Ok(())
}
}

View File

@@ -0,0 +1,72 @@
//! Tool system for agent capabilities
use async_trait::async_trait;
use serde_json::Value;
use zclaw_types::{AgentId, Result};
use crate::driver::ToolDefinition;
/// Tool trait for implementing agent tools
#[async_trait]
pub trait Tool: Send + Sync {
/// Get the tool name
fn name(&self) -> &str;
/// Get the tool description
fn description(&self) -> &str;
/// Get the JSON schema for input parameters
fn input_schema(&self) -> Value;
/// Execute the tool
async fn execute(&self, input: Value, context: &ToolContext) -> Result<Value>;
}
/// Context provided to tool execution
#[derive(Debug, Clone)]
pub struct ToolContext {
pub agent_id: AgentId,
pub working_directory: Option<String>,
}
/// Tool registry for managing available tools
pub struct ToolRegistry {
tools: Vec<Box<dyn Tool>>,
}
impl ToolRegistry {
pub fn new() -> Self {
Self { tools: Vec::new() }
}
pub fn register(&mut self, tool: Box<dyn Tool>) {
self.tools.push(tool);
}
pub fn get(&self, name: &str) -> Option<&dyn Tool> {
self.tools.iter().find(|t| t.name() == name).map(|t| t.as_ref())
}
pub fn list(&self) -> Vec<&dyn Tool> {
self.tools.iter().map(|t| t.as_ref()).collect()
}
pub fn definitions(&self) -> Vec<ToolDefinition> {
self.tools.iter().map(|t| {
ToolDefinition::new(
t.name(),
t.description(),
t.input_schema(),
)
}).collect()
}
}
impl Default for ToolRegistry {
fn default() -> Self {
Self::new()
}
}
// Built-in tools module
pub mod builtin;

View File

@@ -0,0 +1,21 @@
//! Built-in tools
mod file_read;
mod file_write;
mod shell_exec;
mod web_fetch;
pub use file_read::FileReadTool;
pub use file_write::FileWriteTool;
pub use shell_exec::ShellExecTool;
pub use web_fetch::WebFetchTool;
use crate::tool::{ToolRegistry, Tool};
/// Register all built-in tools
pub fn register_builtin_tools(registry: &mut ToolRegistry) {
registry.register(Box::new(FileReadTool::new()));
registry.register(Box::new(FileWriteTool::new()));
registry.register(Box::new(ShellExecTool::new()));
registry.register(Box::new(WebFetchTool::new()));
}

View File

@@ -0,0 +1,55 @@
//! File read tool
use async_trait::async_trait;
use serde_json::{json, Value};
use zclaw_types::{Result, ZclawError};
use crate::tool::{Tool, ToolContext};
pub struct FileReadTool;
impl FileReadTool {
pub fn new() -> Self {
Self
}
}
#[async_trait]
impl Tool for FileReadTool {
fn name(&self) -> &str {
"file_read"
}
fn description(&self) -> &str {
"Read the contents of a file from the filesystem"
}
fn input_schema(&self) -> Value {
json!({
"type": "object",
"properties": {
"path": {
"type": "string",
"description": "The path to the file to read"
}
},
"required": ["path"]
})
}
async fn execute(&self, input: Value, _context: &ToolContext) -> Result<Value> {
let path = input["path"].as_str()
.ok_or_else(|| ZclawError::InvalidInput("Missing 'path' parameter".into()))?;
// TODO: Implement actual file reading with path validation
Ok(json!({
"content": format!("File content placeholder for: {}", path)
}))
}
}
impl Default for FileReadTool {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,62 @@
//! File write tool
use async_trait::async_trait;
use serde_json::{json, Value};
use zclaw_types::{Result, ZclawError};
use crate::tool::{Tool, ToolContext};
pub struct FileWriteTool;
impl FileWriteTool {
pub fn new() -> Self {
Self
}
}
#[async_trait]
impl Tool for FileWriteTool {
fn name(&self) -> &str {
"file_write"
}
fn description(&self) -> &str {
"Write content to a file on the filesystem"
}
fn input_schema(&self) -> Value {
json!({
"type": "object",
"properties": {
"path": {
"type": "string",
"description": "The path to the file to write"
},
"content": {
"type": "string",
"description": "The content to write to the file"
}
},
"required": ["path", "content"]
})
}
async fn execute(&self, input: Value, _context: &ToolContext) -> Result<Value> {
let path = input["path"].as_str()
.ok_or_else(|| ZclawError::InvalidInput("Missing 'path' parameter".into()))?;
let content = input["content"].as_str()
.ok_or_else(|| ZclawError::InvalidInput("Missing 'content' parameter".into()))?;
// TODO: Implement actual file writing with path validation
Ok(json!({
"success": true,
"bytes_written": content.len()
}))
}
}
impl Default for FileWriteTool {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,61 @@
//! Shell execution tool
use async_trait::async_trait;
use serde_json::{json, Value};
use zclaw_types::{Result, ZclawError};
use crate::tool::{Tool, ToolContext};
pub struct ShellExecTool;
impl ShellExecTool {
pub fn new() -> Self {
Self
}
}
#[async_trait]
impl Tool for ShellExecTool {
fn name(&self) -> &str {
"shell_exec"
}
fn description(&self) -> &str {
"Execute a shell command and return the output"
}
fn input_schema(&self) -> Value {
json!({
"type": "object",
"properties": {
"command": {
"type": "string",
"description": "The command to execute"
},
"timeout": {
"type": "integer",
"description": "Timeout in seconds (default: 30)"
}
},
"required": ["command"]
})
}
async fn execute(&self, input: Value, _context: &ToolContext) -> Result<Value> {
let command = input["command"].as_str()
.ok_or_else(|| ZclawError::InvalidInput("Missing 'command' parameter".into()))?;
// TODO: Implement actual shell execution with security constraints
Ok(json!({
"stdout": format!("Command output placeholder for: {}", command),
"stderr": "",
"exit_code": 0
}))
}
}
impl Default for ShellExecTool {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,61 @@
//! Web fetch tool
use async_trait::async_trait;
use serde_json::{json, Value};
use zclaw_types::{Result, ZclawError};
use crate::tool::{Tool, ToolContext};
pub struct WebFetchTool;
impl WebFetchTool {
pub fn new() -> Self {
Self
}
}
#[async_trait]
impl Tool for WebFetchTool {
fn name(&self) -> &str {
"web_fetch"
}
fn description(&self) -> &str {
"Fetch content from a URL"
}
fn input_schema(&self) -> Value {
json!({
"type": "object",
"properties": {
"url": {
"type": "string",
"description": "The URL to fetch"
},
"method": {
"type": "string",
"enum": ["GET", "POST"],
"description": "HTTP method (default: GET)"
}
},
"required": ["url"]
})
}
async fn execute(&self, input: Value, _context: &ToolContext) -> Result<Value> {
let url = input["url"].as_str()
.ok_or_else(|| ZclawError::InvalidInput("Missing 'url' parameter".into()))?;
// TODO: Implement actual web fetching with SSRF protection
Ok(json!({
"status": 200,
"content": format!("Fetched content placeholder for: {}", url)
}))
}
}
impl Default for WebFetchTool {
fn default() -> Self {
Self::new()
}
}