Files
zclaw_openfang/desktop/src-tauri/src/classroom_commands/chat.rs
iven 88172aa651
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
feat(classroom): add SQLite persistence + security hardening
M11-03: Classroom data persistence
- New persist.rs: SQLite-backed ClassroomPersistence with open/load_all/save
- Schema: classrooms (JSON blob) + classroom_chats tables
- generate.rs: auto-persist classroom after generation
- chat.rs: auto-persist chat messages after each exchange
- mod.rs: init_persistence() for app setup integration

M1-01: Gemini API key now uses x-goog-api-key header
- No longer leaks API key in URL query params or debug logs

M1-03/04: Mutex unwrap() replaced with unwrap_or_else(|e| e.into_inner())
- MemoryMiddleware and LoopGuardMiddleware recover from poison

M2-08: Agent creation input validation
- Reject empty names, out-of-range temperature (0-2), zero max_tokens

M11-06: Classroom chat message ID uses crypto.randomUUID()
2026-04-04 19:26:59 +08:00

232 lines
7.5 KiB
Rust

//! Classroom multi-agent chat commands
//!
//! - `classroom_chat` — send a message and receive multi-agent responses
//! - `classroom_chat_history` — retrieve chat history for a classroom
use std::sync::Arc;
use tokio::sync::Mutex;
use serde::{Deserialize, Serialize};
use tauri::State;
use zclaw_kernel::generation::{
AgentProfile, AgentRole,
ClassroomChatMessage, ClassroomChatState,
ClassroomChatRequest,
build_chat_prompt, parse_chat_responses,
};
use zclaw_runtime::CompletionRequest;
use super::ClassroomStore;
use crate::kernel_commands::KernelState;
// ---------------------------------------------------------------------------
// State
// ---------------------------------------------------------------------------
/// Chat state store: classroom_id → chat state
pub type ChatStore = Arc<Mutex<std::collections::HashMap<String, ClassroomChatState>>>;
pub fn create_chat_state() -> ChatStore {
Arc::new(Mutex::new(std::collections::HashMap::new()))
}
// ---------------------------------------------------------------------------
// Request / Response
// ---------------------------------------------------------------------------
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ClassroomChatCmdRequest {
pub classroom_id: String,
pub user_message: String,
pub scene_context: Option<String>,
}
// ---------------------------------------------------------------------------
// Commands
// ---------------------------------------------------------------------------
/// Send a message in the classroom chat and get multi-agent responses.
// @connected
#[tauri::command]
pub async fn classroom_chat(
store: State<'_, ClassroomStore>,
chat_store: State<'_, ChatStore>,
kernel_state: State<'_, KernelState>,
persistence: State<'_, crate::classroom_commands::persist::ClassroomPersistence>,
request: ClassroomChatCmdRequest,
) -> Result<Vec<ClassroomChatMessage>, String> {
if request.user_message.trim().is_empty() {
return Err("Message cannot be empty".to_string());
}
// Get classroom data
let classroom = {
let s = store.lock().await;
s.get(&request.classroom_id)
.cloned()
.ok_or_else(|| format!("Classroom '{}' not found", request.classroom_id))?
};
// Create user message
let user_msg = ClassroomChatMessage::user_message(&request.user_message);
// Get chat history for context
let history: Vec<ClassroomChatMessage> = {
let cs = chat_store.lock().await;
cs.get(&request.classroom_id)
.map(|s| s.messages.clone())
.unwrap_or_default()
};
// Try LLM-powered multi-agent responses, fallback to placeholder
let agent_responses = match generate_llm_responses(&kernel_state, &classroom.agents, &request.user_message, request.scene_context.as_deref(), &history).await {
Ok(responses) => responses,
Err(e) => {
tracing::warn!("LLM chat generation failed, using placeholders: {}", e);
generate_placeholder_responses(
&classroom.agents,
&request.user_message,
request.scene_context.as_deref(),
)
}
};
// Store in chat state
{
let mut cs = chat_store.lock().await;
let state = cs.entry(request.classroom_id.clone())
.or_insert_with(|| ClassroomChatState {
messages: vec![],
active: true,
});
state.messages.push(user_msg);
state.messages.extend(agent_responses.clone());
// Persist chat to SQLite
if let Err(e) = persistence.save_chat(&request.classroom_id, &state.messages).await {
tracing::warn!("[ClassroomChat] Failed to persist chat for {}: {}", request.classroom_id, e);
}
}
Ok(agent_responses)
}
/// Retrieve chat history for a classroom
// @connected
#[tauri::command]
pub async fn classroom_chat_history(
chat_store: State<'_, ChatStore>,
classroom_id: String,
) -> Result<Vec<ClassroomChatMessage>, String> {
let cs = chat_store.lock().await;
Ok(cs.get(&classroom_id)
.map(|s| s.messages.clone())
.unwrap_or_default())
}
// ---------------------------------------------------------------------------
// Placeholder response generation
// ---------------------------------------------------------------------------
fn generate_placeholder_responses(
agents: &[AgentProfile],
user_message: &str,
scene_context: Option<&str>,
) -> Vec<ClassroomChatMessage> {
let mut responses = Vec::new();
// Teacher always responds
if let Some(teacher) = agents.iter().find(|a| a.role == AgentRole::Teacher) {
let context_hint = scene_context
.map(|ctx| format!("关于「{}」,", ctx))
.unwrap_or_default();
responses.push(ClassroomChatMessage::agent_message(
teacher,
&format!("{}这是一个很好的问题!让我来详细解释一下「{}」的核心概念...", context_hint, user_message),
));
}
// Assistant chimes in
if let Some(assistant) = agents.iter().find(|a| a.role == AgentRole::Assistant) {
responses.push(ClassroomChatMessage::agent_message(
assistant,
"我来补充一下要点 📌",
));
}
// One student responds
if let Some(student) = agents.iter().find(|a| a.role == AgentRole::Student) {
responses.push(ClassroomChatMessage::agent_message(
student,
&format!("谢谢老师!我大概理解了{}", user_message),
));
}
responses
}
// ---------------------------------------------------------------------------
// LLM-powered response generation
// ---------------------------------------------------------------------------
async fn generate_llm_responses(
kernel_state: &State<'_, KernelState>,
agents: &[AgentProfile],
user_message: &str,
scene_context: Option<&str>,
history: &[ClassroomChatMessage],
) -> Result<Vec<ClassroomChatMessage>, String> {
let driver = {
let ks = kernel_state.lock().await;
ks.as_ref()
.map(|k| k.driver())
.ok_or_else(|| "Kernel not initialized".to_string())?
};
if !driver.is_configured() {
return Err("LLM driver not configured".to_string());
}
// Build the chat request for prompt generation (include history)
let chat_request = ClassroomChatRequest {
classroom_id: String::new(),
user_message: user_message.to_string(),
agents: agents.to_vec(),
scene_context: scene_context.map(|s| s.to_string()),
history: history.to_vec(),
};
let prompt = build_chat_prompt(&chat_request);
let request = CompletionRequest {
model: "default".to_string(),
system: Some("你是一个课堂多智能体讨论的协调器。".to_string()),
messages: vec![zclaw_types::Message::User {
content: prompt,
}],
..Default::default()
};
let response = driver.complete(request).await
.map_err(|e| format!("LLM call failed: {}", e))?;
// Extract text from response
let text = response.content.iter()
.filter_map(|block| match block {
zclaw_runtime::ContentBlock::Text { text } => Some(text.as_str()),
_ => None,
})
.collect::<Vec<_>>()
.join("");
let responses = parse_chat_responses(&text, agents);
if responses.is_empty() {
return Err("LLM returned no parseable agent responses".to_string());
}
Ok(responses)
}