fix(kernel): add map_err context to classroom LLM generation calls
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

Stage 1 (outline) and Stage 2 (scene) LLM calls now provide descriptive
error messages instead of propagating opaque driver errors.

Closes M11-02
This commit is contained in:
iven
2026-04-04 21:25:50 +08:00
parent 1fec8cfbc1
commit 1680f931e9

View File

@@ -428,7 +428,10 @@ impl GenerationPipeline {
plan_mode: false, plan_mode: false,
}; };
let response = driver.complete(llm_request).await?; let response = driver.complete(llm_request).await
.map_err(|e| zclaw_types::ZclawError::LlmError(
format!("Outline generation failed: {}", e)
))?;
let text = Self::extract_text_from_response_static(&response); let text = Self::extract_text_from_response_static(&response);
self.parse_outline_from_text(&text, request) self.parse_outline_from_text(&text, request)
} }
@@ -500,7 +503,10 @@ Use Chinese if the topic is in Chinese. Include vivid metaphors and analogies."#
plan_mode: false, plan_mode: false,
}; };
let response = driver.complete(llm_request).await?; let response = driver.complete(llm_request).await
.map_err(|e| zclaw_types::ZclawError::LlmError(
format!("Scene '{}' generation failed: {}", item.title, e)
))?;
let text = Self::extract_text_from_response_static(&response); let text = Self::extract_text_from_response_static(&response);
Self::parse_scene_from_text_static(&text, item, order) Self::parse_scene_from_text_static(&text, item, order)
} }