feat(runtime): add streaming support to LlmDriver trait
- Add StreamChunk and StreamEvent types for Tauri event emission - Add stream() method to LlmDriver trait with async-stream - Implement Anthropic streaming with SSE parsing - Implement OpenAI streaming with SSE parsing - Add placeholder stream() for Gemini and Local drivers Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,10 +1,13 @@
|
||||
//! Local LLM driver (Ollama, LM Studio, vLLM, etc.)
|
||||
|
||||
use async_trait::async_trait;
|
||||
use futures::{Stream, StreamExt};
|
||||
use reqwest::Client;
|
||||
use zclaw_types::Result;
|
||||
use std::pin::Pin;
|
||||
use zclaw_types::{Result, ZclawError};
|
||||
|
||||
use super::{CompletionRequest, CompletionResponse, ContentBlock, LlmDriver, StopReason};
|
||||
use crate::stream::StreamChunk;
|
||||
|
||||
/// Local LLM driver for Ollama, LM Studio, vLLM, etc.
|
||||
pub struct LocalDriver {
|
||||
@@ -56,4 +59,14 @@ impl LlmDriver for LocalDriver {
|
||||
stop_reason: StopReason::EndTurn,
|
||||
})
|
||||
}
|
||||
|
||||
fn stream(
|
||||
&self,
|
||||
_request: CompletionRequest,
|
||||
) -> Pin<Box<dyn Stream<Item = Result<StreamChunk>> + Send + '_>> {
|
||||
// Placeholder - return error stream
|
||||
Box::pin(futures::stream::once(async {
|
||||
Err(ZclawError::LlmError("Local driver streaming not yet implemented".to_string()))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user