fix(desktop): resolve 2 release-blocking P1 defects

P1-04: GenerationPipeline hardcoded model="default" causing classroom
generation 404. Added model field to GenerationPipeline struct, passed
from kernel config via with_driver(driver, model). Static scene
generation now receives model parameter.

P1-03: LLM API concurrent 500 DATABASE_ERROR. Added transient DB error
retry (PoolTimedOut/Io) in create_relay_task with 200ms backoff.
Recommend setting ZCLAW_DB_MIN_CONNECTIONS=10 for burst resilience.
This commit is contained in:
iven
2026-04-05 19:18:41 +08:00
parent a458e3f7d8
commit 90855dc83e
3 changed files with 35 additions and 10 deletions

View File

@@ -248,6 +248,7 @@ pub struct GenerationPipeline {
scenes: Arc<RwLock<Vec<GeneratedScene>>>,
agents_store: Arc<RwLock<Vec<AgentProfile>>>,
driver: Option<Arc<dyn LlmDriver>>,
model: String,
}
impl GenerationPipeline {
@@ -265,12 +266,14 @@ impl GenerationPipeline {
scenes: Arc::new(RwLock::new(Vec::new())),
agents_store: Arc::new(RwLock::new(Vec::new())),
driver: None,
model: "default".to_string(),
}
}
pub fn with_driver(driver: Arc<dyn LlmDriver>) -> Self {
pub fn with_driver(driver: Arc<dyn LlmDriver>, model: String) -> Self {
Self {
driver: Some(driver),
model,
..Self::new()
}
}
@@ -353,7 +356,7 @@ impl GenerationPipeline {
let item = item.clone();
async move {
if let Some(d) = driver {
Self::generate_scene_with_llm_static(d.as_ref(), &item, i).await
Self::generate_scene_with_llm_static(d.as_ref(), &self.model, &item, i).await
} else {
Self::generate_scene_for_item_static(&item, i)
}
@@ -413,7 +416,7 @@ impl GenerationPipeline {
request: &GenerationRequest,
) -> Result<Vec<OutlineItem>> {
let llm_request = CompletionRequest {
model: "default".to_string(),
model: self.model.clone(),
system: Some(self.get_outline_system_prompt()),
messages: vec![zclaw_types::Message::User {
content: prompt.to_string(),
@@ -469,6 +472,7 @@ Use Chinese if the topic is in Chinese. Include vivid metaphors and analogies."#
async fn generate_scene_with_llm_static(
driver: &dyn LlmDriver,
model: &str,
item: &OutlineItem,
order: usize,
) -> Result<GeneratedScene> {
@@ -488,7 +492,7 @@ Use Chinese if the topic is in Chinese. Include vivid metaphors and analogies."#
);
let llm_request = CompletionRequest {
model: "default".to_string(),
model: model.to_string(),
system: Some(Self::get_scene_system_prompt_static()),
messages: vec![zclaw_types::Message::User {
content: prompt,