feat: add internal ZCLAW kernel crates to git tracking

This commit is contained in:
iven
2026-03-22 09:26:36 +08:00
parent d72c0f7161
commit 58cd24f85b
36 changed files with 10298 additions and 0 deletions

View File

@@ -0,0 +1,169 @@
//! LLM Driver trait and implementations
//!
//! This module provides a unified interface for multiple LLM providers.
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use secrecy::SecretString;
use zclaw_types::Result;
mod anthropic;
mod openai;
mod gemini;
mod local;
pub use anthropic::AnthropicDriver;
pub use openai::OpenAiDriver;
pub use gemini::GeminiDriver;
pub use local::LocalDriver;
/// LLM Driver trait - unified interface for all providers
#[async_trait]
pub trait LlmDriver: Send + Sync {
/// Get the provider name
fn provider(&self) -> &str;
/// Send a completion request
async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse>;
/// Check if the driver is properly configured
fn is_configured(&self) -> bool;
}
/// Completion request
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CompletionRequest {
/// Model identifier
pub model: String,
/// System prompt
pub system: Option<String>,
/// Conversation messages
pub messages: Vec<zclaw_types::Message>,
/// Available tools
pub tools: Vec<ToolDefinition>,
/// Maximum tokens to generate
pub max_tokens: Option<u32>,
/// Temperature (0.0 - 1.0)
pub temperature: Option<f32>,
/// Stop sequences
pub stop: Vec<String>,
/// Enable streaming
pub stream: bool,
}
impl Default for CompletionRequest {
fn default() -> Self {
Self {
model: String::new(),
system: None,
messages: Vec::new(),
tools: Vec::new(),
max_tokens: Some(4096),
temperature: Some(0.7),
stop: Vec::new(),
stream: false,
}
}
}
/// Tool definition for LLM
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolDefinition {
pub name: String,
pub description: String,
pub input_schema: serde_json::Value,
}
impl ToolDefinition {
pub fn new(name: impl Into<String>, description: impl Into<String>, schema: serde_json::Value) -> Self {
Self {
name: name.into(),
description: description.into(),
input_schema: schema,
}
}
}
/// Completion response
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CompletionResponse {
/// Generated content blocks
pub content: Vec<ContentBlock>,
/// Model used
pub model: String,
/// Input tokens
pub input_tokens: u32,
/// Output tokens
pub output_tokens: u32,
/// Stop reason
pub stop_reason: StopReason,
}
/// Content block in response
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ContentBlock {
Text { text: String },
Thinking { thinking: String },
ToolUse { id: String, name: String, input: serde_json::Value },
}
/// Stop reason
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum StopReason {
EndTurn,
MaxTokens,
StopSequence,
ToolUse,
Error,
}
/// Driver configuration
#[derive(Debug, Clone)]
pub enum DriverConfig {
Anthropic { api_key: SecretString },
OpenAi { api_key: SecretString, base_url: Option<String> },
Gemini { api_key: SecretString },
Local { base_url: String },
}
impl DriverConfig {
pub fn anthropic(api_key: impl Into<String>) -> Self {
Self::Anthropic {
api_key: SecretString::new(api_key.into()),
}
}
pub fn openai(api_key: impl Into<String>) -> Self {
Self::OpenAi {
api_key: SecretString::new(api_key.into()),
base_url: None,
}
}
pub fn openai_with_base(api_key: impl Into<String>, base_url: impl Into<String>) -> Self {
Self::OpenAi {
api_key: SecretString::new(api_key.into()),
base_url: Some(base_url.into()),
}
}
pub fn gemini(api_key: impl Into<String>) -> Self {
Self::Gemini {
api_key: SecretString::new(api_key.into()),
}
}
pub fn ollama() -> Self {
Self::Local {
base_url: "http://localhost:11434".to_string(),
}
}
pub fn local(base_url: impl Into<String>) -> Self {
Self::Local {
base_url: base_url.into(),
}
}
}