feat: add internal ZCLAW kernel crates to git tracking

This commit is contained in:
iven
2026-03-22 09:26:36 +08:00
parent d72c0f7161
commit 58cd24f85b
36 changed files with 10298 additions and 0 deletions

View File

@@ -0,0 +1,293 @@
//! OpenAI-compatible driver implementation
use async_trait::async_trait;
use secrecy::{ExposeSecret, SecretString};
use reqwest::Client;
use serde::{Deserialize, Serialize};
use zclaw_types::{Result, ZclawError};
use super::{CompletionRequest, CompletionResponse, ContentBlock, LlmDriver, StopReason, ToolDefinition};
/// OpenAI-compatible driver
pub struct OpenAiDriver {
client: Client,
api_key: SecretString,
base_url: String,
}
impl OpenAiDriver {
pub fn new(api_key: SecretString) -> Self {
Self {
client: Client::new(),
api_key,
base_url: "https://api.openai.com/v1".to_string(),
}
}
pub fn with_base_url(api_key: SecretString, base_url: String) -> Self {
Self {
client: Client::new(),
api_key,
base_url,
}
}
}
#[async_trait]
impl LlmDriver for OpenAiDriver {
fn provider(&self) -> &str {
"openai"
}
fn is_configured(&self) -> bool {
!self.api_key.expose_secret().is_empty()
}
async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse> {
let api_request = self.build_api_request(&request);
let response = self.client
.post(format!("{}/chat/completions", self.base_url))
.header("Authorization", format!("Bearer {}", self.api_key.expose_secret()))
.header("Content-Type", "application/json")
.json(&api_request)
.send()
.await
.map_err(|e| ZclawError::LlmError(format!("HTTP request failed: {}", e)))?;
if !response.status().is_success() {
let status = response.status();
let body = response.text().await.unwrap_or_default();
return Err(ZclawError::LlmError(format!("API error {}: {}", status, body)));
}
let api_response: OpenAiResponse = response
.json()
.await
.map_err(|e| ZclawError::LlmError(format!("Failed to parse response: {}", e)))?;
Ok(self.convert_response(api_response, request.model))
}
}
impl OpenAiDriver {
fn build_api_request(&self, request: &CompletionRequest) -> OpenAiRequest {
let messages: Vec<OpenAiMessage> = request.messages
.iter()
.filter_map(|msg| match msg {
zclaw_types::Message::User { content } => Some(OpenAiMessage {
role: "user".to_string(),
content: Some(content.clone()),
tool_calls: None,
}),
zclaw_types::Message::Assistant { content, thinking: _ } => Some(OpenAiMessage {
role: "assistant".to_string(),
content: Some(content.clone()),
tool_calls: None,
}),
zclaw_types::Message::System { content } => Some(OpenAiMessage {
role: "system".to_string(),
content: Some(content.clone()),
tool_calls: None,
}),
zclaw_types::Message::ToolUse { id, tool, input } => Some(OpenAiMessage {
role: "assistant".to_string(),
content: None,
tool_calls: Some(vec![OpenAiToolCall {
id: id.clone(),
r#type: "function".to_string(),
function: FunctionCall {
name: tool.to_string(),
arguments: serde_json::to_string(input).unwrap_or_default(),
},
}]),
}),
zclaw_types::Message::ToolResult { tool_call_id, output, is_error, .. } => Some(OpenAiMessage {
role: "tool".to_string(),
content: Some(if *is_error {
format!("Error: {}", output)
} else {
output.to_string()
}),
tool_calls: None,
}),
})
.collect();
// Add system prompt if provided
let mut messages = messages;
if let Some(system) = &request.system {
messages.insert(0, OpenAiMessage {
role: "system".to_string(),
content: Some(system.clone()),
tool_calls: None,
});
}
let tools: Vec<OpenAiTool> = request.tools
.iter()
.map(|t| OpenAiTool {
r#type: "function".to_string(),
function: FunctionDef {
name: t.name.clone(),
description: t.description.clone(),
parameters: t.input_schema.clone(),
},
})
.collect();
OpenAiRequest {
model: request.model.clone(),
messages,
max_tokens: request.max_tokens,
temperature: request.temperature,
stop: if request.stop.is_empty() { None } else { Some(request.stop.clone()) },
stream: request.stream,
tools: if tools.is_empty() { None } else { Some(tools) },
}
}
fn convert_response(&self, api_response: OpenAiResponse, model: String) -> CompletionResponse {
let choice = api_response.choices.first();
let (content, stop_reason) = match choice {
Some(c) => {
let blocks = if let Some(text) = &c.message.content {
vec![ContentBlock::Text { text: text.clone() }]
} else if let Some(tool_calls) = &c.message.tool_calls {
tool_calls.iter().map(|tc| ContentBlock::ToolUse {
id: tc.id.clone(),
name: tc.function.name.clone(),
input: serde_json::from_str(&tc.function.arguments).unwrap_or(serde_json::Value::Null),
}).collect()
} else {
vec![ContentBlock::Text { text: String::new() }]
};
let stop = match c.finish_reason.as_deref() {
Some("stop") => StopReason::EndTurn,
Some("length") => StopReason::MaxTokens,
Some("tool_calls") => StopReason::ToolUse,
_ => StopReason::EndTurn,
};
(blocks, stop)
}
None => (vec![ContentBlock::Text { text: String::new() }], StopReason::EndTurn),
};
let (input_tokens, output_tokens) = api_response.usage
.map(|u| (u.prompt_tokens, u.completion_tokens))
.unwrap_or((0, 0));
CompletionResponse {
content,
model,
input_tokens,
output_tokens,
stop_reason,
}
}
}
// OpenAI API types
#[derive(Serialize)]
struct OpenAiRequest {
model: String,
messages: Vec<OpenAiMessage>,
#[serde(skip_serializing_if = "Option::is_none")]
max_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
temperature: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
stop: Option<Vec<String>>,
#[serde(default)]
stream: bool,
#[serde(skip_serializing_if = "Option::is_none")]
tools: Option<Vec<OpenAiTool>>,
}
#[derive(Serialize)]
struct OpenAiMessage {
role: String,
#[serde(skip_serializing_if = "Option::is_none")]
content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
tool_calls: Option<Vec<OpenAiToolCall>>,
}
#[derive(Serialize)]
struct OpenAiToolCall {
id: String,
r#type: String,
function: FunctionCall,
}
impl Default for OpenAiToolCall {
fn default() -> Self {
Self {
id: String::new(),
r#type: "function".to_string(),
function: FunctionCall {
name: String::new(),
arguments: String::new(),
},
}
}
}
#[derive(Serialize)]
struct FunctionCall {
name: String,
arguments: String,
}
#[derive(Serialize)]
struct OpenAiTool {
r#type: String,
function: FunctionDef,
}
#[derive(Serialize)]
struct FunctionDef {
name: String,
description: String,
parameters: serde_json::Value,
}
#[derive(Deserialize)]
struct OpenAiResponse {
choices: Vec<OpenAiChoice>,
usage: Option<OpenAiUsage>,
}
#[derive(Deserialize)]
struct OpenAiChoice {
message: OpenAiResponseMessage,
finish_reason: Option<String>,
}
#[derive(Deserialize)]
struct OpenAiResponseMessage {
content: Option<String>,
tool_calls: Option<Vec<OpenAiToolCallResponse>>,
}
#[derive(Deserialize)]
struct OpenAiToolCallResponse {
id: String,
function: FunctionCallResponse,
}
#[derive(Deserialize)]
struct FunctionCallResponse {
name: String,
arguments: String,
}
#[derive(Deserialize)]
struct OpenAiUsage {
prompt_tokens: u32,
completion_tokens: u32,
}