feat(runtime): add streaming support to LlmDriver trait
- Add StreamChunk and StreamEvent types for Tauri event emission - Add stream() method to LlmDriver trait with async-stream - Implement Anthropic streaming with SSE parsing - Implement OpenAI streaming with SSE parsing - Add placeholder stream() for Gemini and Local drivers Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -3,10 +3,14 @@
|
||||
//! This module provides a unified interface for multiple LLM providers.
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use futures::Stream;
|
||||
use secrecy::SecretString;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::pin::Pin;
|
||||
use zclaw_types::Result;
|
||||
|
||||
use crate::stream::StreamChunk;
|
||||
|
||||
mod anthropic;
|
||||
mod openai;
|
||||
mod gemini;
|
||||
@@ -26,6 +30,13 @@ pub trait LlmDriver: Send + Sync {
|
||||
/// Send a completion request
|
||||
async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse>;
|
||||
|
||||
/// Send a streaming completion request
|
||||
/// Returns a stream of chunks
|
||||
fn stream(
|
||||
&self,
|
||||
request: CompletionRequest,
|
||||
) -> Pin<Box<dyn Stream<Item = Result<StreamChunk>> + Send + '_>>;
|
||||
|
||||
/// Check if the driver is properly configured
|
||||
fn is_configured(&self) -> bool;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user