feat(runtime): add streaming support to LlmDriver trait

- Add StreamChunk and StreamEvent types for Tauri event emission
- Add stream() method to LlmDriver trait with async-stream
- Implement Anthropic streaming with SSE parsing
- Implement OpenAI streaming with SSE parsing
- Add placeholder stream() for Gemini and Local drivers

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
iven
2026-03-24 01:44:40 +08:00
parent 4ba0a531aa
commit 820e3a1ffe
8 changed files with 409 additions and 13 deletions

View File

@@ -1,10 +1,13 @@
//! Local LLM driver (Ollama, LM Studio, vLLM, etc.)
use async_trait::async_trait;
use futures::{Stream, StreamExt};
use reqwest::Client;
use zclaw_types::Result;
use std::pin::Pin;
use zclaw_types::{Result, ZclawError};
use super::{CompletionRequest, CompletionResponse, ContentBlock, LlmDriver, StopReason};
use crate::stream::StreamChunk;
/// Local LLM driver for Ollama, LM Studio, vLLM, etc.
pub struct LocalDriver {
@@ -56,4 +59,14 @@ impl LlmDriver for LocalDriver {
stop_reason: StopReason::EndTurn,
})
}
fn stream(
&self,
_request: CompletionRequest,
) -> Pin<Box<dyn Stream<Item = Result<StreamChunk>> + Send + '_>> {
// Placeholder - return error stream
Box::pin(futures::stream::once(async {
Err(ZclawError::LlmError("Local driver streaming not yet implemented".to_string()))
}))
}
}