fix(desktop): DeerFlow UI — ChatArea refactor + ai-elements + dead CSS cleanup

ChatArea retry button uses setInput instead of direct sendToGateway,
fix bootstrap spinner stuck for non-logged-in users,
remove dead CSS (aurora-title/sidebar-open/quick-action-chips),
add ai components (ReasoningBlock/StreamingText/ChatMode/ModelSelector/TaskProgress),
add ClassroomPlayer + ResizableChatLayout + artifact panel

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
iven
2026-04-02 19:24:44 +08:00
parent d40c4605b2
commit 28299807b6
70 changed files with 4938 additions and 618 deletions

View File

@@ -0,0 +1,223 @@
//! Classroom multi-agent chat commands
//!
//! - `classroom_chat` — send a message and receive multi-agent responses
//! - `classroom_chat_history` — retrieve chat history for a classroom
use std::sync::Arc;
use tokio::sync::Mutex;
use serde::{Deserialize, Serialize};
use tauri::State;
use zclaw_kernel::generation::{
AgentProfile, AgentRole,
ClassroomChatMessage, ClassroomChatState,
ClassroomChatRequest,
build_chat_prompt, parse_chat_responses,
};
use zclaw_runtime::CompletionRequest;
use super::ClassroomStore;
use crate::kernel_commands::KernelState;
// ---------------------------------------------------------------------------
// State
// ---------------------------------------------------------------------------
/// Chat state store: classroom_id → chat state
pub type ChatStore = Arc<Mutex<std::collections::HashMap<String, ClassroomChatState>>>;
pub fn create_chat_state() -> ChatStore {
Arc::new(Mutex::new(std::collections::HashMap::new()))
}
// ---------------------------------------------------------------------------
// Request / Response
// ---------------------------------------------------------------------------
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ClassroomChatCmdRequest {
pub classroom_id: String,
pub user_message: String,
pub scene_context: Option<String>,
}
// ---------------------------------------------------------------------------
// Commands
// ---------------------------------------------------------------------------
/// Send a message in the classroom chat and get multi-agent responses.
#[tauri::command]
pub async fn classroom_chat(
store: State<'_, ClassroomStore>,
chat_store: State<'_, ChatStore>,
kernel_state: State<'_, KernelState>,
request: ClassroomChatCmdRequest,
) -> Result<Vec<ClassroomChatMessage>, String> {
if request.user_message.trim().is_empty() {
return Err("Message cannot be empty".to_string());
}
// Get classroom data
let classroom = {
let s = store.lock().await;
s.get(&request.classroom_id)
.cloned()
.ok_or_else(|| format!("Classroom '{}' not found", request.classroom_id))?
};
// Create user message
let user_msg = ClassroomChatMessage::user_message(&request.user_message);
// Get chat history for context
let history: Vec<ClassroomChatMessage> = {
let cs = chat_store.lock().await;
cs.get(&request.classroom_id)
.map(|s| s.messages.clone())
.unwrap_or_default()
};
// Try LLM-powered multi-agent responses, fallback to placeholder
let agent_responses = match generate_llm_responses(&kernel_state, &classroom.agents, &request.user_message, request.scene_context.as_deref(), &history).await {
Ok(responses) => responses,
Err(e) => {
tracing::warn!("LLM chat generation failed, using placeholders: {}", e);
generate_placeholder_responses(
&classroom.agents,
&request.user_message,
request.scene_context.as_deref(),
)
}
};
// Store in chat state
{
let mut cs = chat_store.lock().await;
let state = cs.entry(request.classroom_id.clone())
.or_insert_with(|| ClassroomChatState {
messages: vec![],
active: true,
});
state.messages.push(user_msg);
state.messages.extend(agent_responses.clone());
}
Ok(agent_responses)
}
/// Retrieve chat history for a classroom
#[tauri::command]
pub async fn classroom_chat_history(
chat_store: State<'_, ChatStore>,
classroom_id: String,
) -> Result<Vec<ClassroomChatMessage>, String> {
let cs = chat_store.lock().await;
Ok(cs.get(&classroom_id)
.map(|s| s.messages.clone())
.unwrap_or_default())
}
// ---------------------------------------------------------------------------
// Placeholder response generation
// ---------------------------------------------------------------------------
fn generate_placeholder_responses(
agents: &[AgentProfile],
user_message: &str,
scene_context: Option<&str>,
) -> Vec<ClassroomChatMessage> {
let mut responses = Vec::new();
// Teacher always responds
if let Some(teacher) = agents.iter().find(|a| a.role == AgentRole::Teacher) {
let context_hint = scene_context
.map(|ctx| format!("关于「{}」,", ctx))
.unwrap_or_default();
responses.push(ClassroomChatMessage::agent_message(
teacher,
&format!("{}这是一个很好的问题!让我来详细解释一下「{}」的核心概念...", context_hint, user_message),
));
}
// Assistant chimes in
if let Some(assistant) = agents.iter().find(|a| a.role == AgentRole::Assistant) {
responses.push(ClassroomChatMessage::agent_message(
assistant,
"我来补充一下要点 📌",
));
}
// One student responds
if let Some(student) = agents.iter().find(|a| a.role == AgentRole::Student) {
responses.push(ClassroomChatMessage::agent_message(
student,
&format!("谢谢老师!我大概理解了{}", user_message),
));
}
responses
}
// ---------------------------------------------------------------------------
// LLM-powered response generation
// ---------------------------------------------------------------------------
async fn generate_llm_responses(
kernel_state: &State<'_, KernelState>,
agents: &[AgentProfile],
user_message: &str,
scene_context: Option<&str>,
history: &[ClassroomChatMessage],
) -> Result<Vec<ClassroomChatMessage>, String> {
let driver = {
let ks = kernel_state.lock().await;
ks.as_ref()
.map(|k| k.driver())
.ok_or_else(|| "Kernel not initialized".to_string())?
};
if !driver.is_configured() {
return Err("LLM driver not configured".to_string());
}
// Build the chat request for prompt generation (include history)
let chat_request = ClassroomChatRequest {
classroom_id: String::new(),
user_message: user_message.to_string(),
agents: agents.to_vec(),
scene_context: scene_context.map(|s| s.to_string()),
history: history.to_vec(),
};
let prompt = build_chat_prompt(&chat_request);
let request = CompletionRequest {
model: "default".to_string(),
system: Some("你是一个课堂多智能体讨论的协调器。".to_string()),
messages: vec![zclaw_types::Message::User {
content: prompt,
}],
..Default::default()
};
let response = driver.complete(request).await
.map_err(|e| format!("LLM call failed: {}", e))?;
// Extract text from response
let text = response.content.iter()
.filter_map(|block| match block {
zclaw_runtime::ContentBlock::Text { text } => Some(text.as_str()),
_ => None,
})
.collect::<Vec<_>>()
.join("");
let responses = parse_chat_responses(&text, agents);
if responses.is_empty() {
return Err("LLM returned no parseable agent responses".to_string());
}
Ok(responses)
}

View File

@@ -0,0 +1,152 @@
//! Classroom export commands
//!
//! - `classroom_export` — export classroom as HTML, Markdown, or JSON
use serde::{Deserialize, Serialize};
use tauri::State;
use zclaw_kernel::generation::Classroom;
use super::ClassroomStore;
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ClassroomExportRequest {
pub classroom_id: String,
pub format: String, // "html" | "markdown" | "json"
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ClassroomExportResponse {
pub content: String,
pub filename: String,
pub mime_type: String,
}
// ---------------------------------------------------------------------------
// Command
// ---------------------------------------------------------------------------
#[tauri::command]
pub async fn classroom_export(
store: State<'_, ClassroomStore>,
request: ClassroomExportRequest,
) -> Result<ClassroomExportResponse, String> {
let classroom = {
let s = store.lock().await;
s.get(&request.classroom_id)
.cloned()
.ok_or_else(|| format!("Classroom '{}' not found", request.classroom_id))?
};
match request.format.as_str() {
"json" => export_json(&classroom),
"html" => export_html(&classroom),
"markdown" | "md" => export_markdown(&classroom),
_ => Err(format!("Unsupported export format: '{}'. Use html, markdown, or json.", request.format)),
}
}
// ---------------------------------------------------------------------------
// Exporters
// ---------------------------------------------------------------------------
fn export_json(classroom: &Classroom) -> Result<ClassroomExportResponse, String> {
let content = serde_json::to_string_pretty(classroom)
.map_err(|e| format!("JSON serialization failed: {}", e))?;
Ok(ClassroomExportResponse {
filename: format!("{}.json", sanitize_filename(&classroom.title)),
content,
mime_type: "application/json".to_string(),
})
}
fn export_html(classroom: &Classroom) -> Result<ClassroomExportResponse, String> {
let mut html = String::from(r#"<!DOCTYPE html><html lang="zh-CN"><head><meta charset="UTF-8"><meta name="viewport" content="width=device-width,initial-scale=1">"#);
html.push_str(&format!("<title>{}</title>", html_escape(&classroom.title)));
html.push_str(r#"<style>body{font-family:system-ui,sans-serif;max-width:800px;margin:0 auto;padding:2rem;color:#333}h1{color:#4F46E5}h2{color:#7C3AED;border-bottom:2px solid #E5E7EB;padding-bottom:0.5rem}.scene{margin:2rem 0;padding:1rem;border-left:4px solid #4F46E5;background:#F9FAFB}.quiz{border-left-color:#F59E0B}.discussion{border-left-color:#10B981}.agent{display:inline-flex;align-items:center;gap:0.5rem;margin:0.25rem;padding:0.25rem 0.75rem;border-radius:9999px;font-size:0.875rem;font-weight:500}</style></head><body>"#);
html.push_str(&format!("<h1>{}</h1>", html_escape(&classroom.title)));
html.push_str(&format!("<p>{}</p>", html_escape(&classroom.description)));
// Agents
html.push_str("<h2>课堂角色</h2><div>");
for agent in &classroom.agents {
html.push_str(&format!(
r#"<span class="agent" style="background:{};color:white">{} {}</span>"#,
agent.color, agent.avatar, html_escape(&agent.name)
));
}
html.push_str("</div>");
// Scenes
html.push_str("<h2>课程内容</h2>");
for scene in &classroom.scenes {
let type_class = match scene.content.scene_type {
zclaw_kernel::generation::SceneType::Quiz => "quiz",
zclaw_kernel::generation::SceneType::Discussion => "discussion",
_ => "",
};
html.push_str(&format!(
r#"<div class="scene {}"><h3>{}</h3><p>类型: {:?} | 时长: {}秒</p></div>"#,
type_class,
html_escape(&scene.content.title),
scene.content.scene_type,
scene.content.duration_seconds
));
}
html.push_str("</body></html>");
Ok(ClassroomExportResponse {
filename: format!("{}.html", sanitize_filename(&classroom.title)),
content: html,
mime_type: "text/html".to_string(),
})
}
fn export_markdown(classroom: &Classroom) -> Result<ClassroomExportResponse, String> {
let mut md = String::new();
md.push_str(&format!("# {}\n\n", &classroom.title));
md.push_str(&format!("{}\n\n", &classroom.description));
md.push_str("## 课堂角色\n\n");
for agent in &classroom.agents {
md.push_str(&format!("- {} **{}** ({:?})\n", agent.avatar, agent.name, agent.role));
}
md.push('\n');
md.push_str("## 课程内容\n\n");
for (i, scene) in classroom.scenes.iter().enumerate() {
md.push_str(&format!("### {}. {}\n\n", i + 1, scene.content.title));
md.push_str(&format!("- 类型: `{:?}`\n", scene.content.scene_type));
md.push_str(&format!("- 时长: {}\n\n", scene.content.duration_seconds));
}
Ok(ClassroomExportResponse {
filename: format!("{}.md", sanitize_filename(&classroom.title)),
content: md,
mime_type: "text/markdown".to_string(),
})
}
fn sanitize_filename(name: &str) -> String {
name.chars()
.map(|c| if c.is_alphanumeric() || c == '-' || c == '_' { c } else { '_' })
.collect::<String>()
.trim_matches('_')
.to_string()
}
fn html_escape(s: &str) -> String {
s.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
}

View File

@@ -0,0 +1,286 @@
//! Classroom generation commands
//!
//! - `classroom_generate` — start 4-stage pipeline, emit progress events
//! - `classroom_generation_progress` — query current progress
//! - `classroom_cancel_generation` — cancel active generation
//! - `classroom_get` — retrieve generated classroom data
//! - `classroom_list` — list all generated classrooms
use serde::{Deserialize, Serialize};
use tauri::{AppHandle, Emitter, State};
use zclaw_kernel::generation::{
Classroom, GenerationPipeline, GenerationRequest as KernelGenRequest, GenerationStage,
TeachingStyle, DifficultyLevel,
};
use super::{ClassroomStore, GenerationTasks};
use crate::kernel_commands::KernelState;
// ---------------------------------------------------------------------------
// Request / Response types
// ---------------------------------------------------------------------------
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ClassroomGenerateRequest {
pub topic: String,
pub document: Option<String>,
pub style: Option<String>,
pub level: Option<String>,
pub target_duration_minutes: Option<u32>,
pub scene_count: Option<usize>,
pub custom_instructions: Option<String>,
pub language: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ClassroomGenerateResponse {
pub classroom_id: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ClassroomProgressResponse {
pub stage: String,
pub progress: u8,
pub activity: String,
pub items_progress: Option<(usize, usize)>,
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
fn parse_style(s: Option<&str>) -> TeachingStyle {
match s.unwrap_or("lecture") {
"discussion" => TeachingStyle::Discussion,
"pbl" => TeachingStyle::Pbl,
"flipped" => TeachingStyle::Flipped,
"socratic" => TeachingStyle::Socratic,
_ => TeachingStyle::Lecture,
}
}
fn parse_level(l: Option<&str>) -> DifficultyLevel {
match l.unwrap_or("intermediate") {
"beginner" => DifficultyLevel::Beginner,
"advanced" => DifficultyLevel::Advanced,
"expert" => DifficultyLevel::Expert,
_ => DifficultyLevel::Intermediate,
}
}
fn stage_name(stage: &GenerationStage) -> &'static str {
match stage {
GenerationStage::AgentProfiles => "agent_profiles",
GenerationStage::Outline => "outline",
GenerationStage::Scene => "scene",
GenerationStage::Complete => "complete",
}
}
// ---------------------------------------------------------------------------
// Commands
// ---------------------------------------------------------------------------
/// Start classroom generation (4-stage pipeline).
/// Progress events are emitted via `classroom:progress`.
/// Supports cancellation between stages by removing the task from GenerationTasks.
#[tauri::command]
pub async fn classroom_generate(
app: AppHandle,
store: State<'_, ClassroomStore>,
tasks: State<'_, GenerationTasks>,
kernel_state: State<'_, KernelState>,
request: ClassroomGenerateRequest,
) -> Result<ClassroomGenerateResponse, String> {
if request.topic.trim().is_empty() {
return Err("Topic is required".to_string());
}
let topic_clone = request.topic.clone();
let kernel_request = KernelGenRequest {
topic: request.topic.clone(),
document: request.document.clone(),
style: parse_style(request.style.as_deref()),
level: parse_level(request.level.as_deref()),
target_duration_minutes: request.target_duration_minutes.unwrap_or(30),
scene_count: request.scene_count,
custom_instructions: request.custom_instructions.clone(),
language: request.language.clone().or_else(|| Some("zh-CN".to_string())),
};
// Register generation task so cancellation can check it
{
use zclaw_kernel::generation::GenerationProgress;
let mut t = tasks.lock().await;
t.insert(topic_clone.clone(), GenerationProgress {
stage: zclaw_kernel::generation::GenerationStage::AgentProfiles,
progress: 0,
activity: "Starting generation...".to_string(),
items_progress: None,
eta_seconds: None,
});
}
// Get LLM driver from kernel if available, otherwise use placeholder mode
let pipeline = {
let ks = kernel_state.lock().await;
if let Some(kernel) = ks.as_ref() {
GenerationPipeline::with_driver(kernel.driver())
} else {
GenerationPipeline::new()
}
};
// Helper: check if cancelled
let is_cancelled = || {
let t = tasks.blocking_lock();
!t.contains_key(&topic_clone)
};
// Helper: emit progress event
let emit_progress = |stage: &str, progress: u8, activity: &str| {
let _ = app.emit("classroom:progress", serde_json::json!({
"topic": &topic_clone,
"stage": stage,
"progress": progress,
"activity": activity
}));
};
// ── Stage 0: Agent Profiles ──
emit_progress("agent_profiles", 5, "生成课堂角色...");
let agents = pipeline.generate_agent_profiles(&kernel_request).await;
emit_progress("agent_profiles", 25, "角色生成完成");
if is_cancelled() {
return Err("Generation cancelled".to_string());
}
// ── Stage 1: Outline ──
emit_progress("outline", 30, "分析主题,生成大纲...");
let outline = pipeline.generate_outline(&kernel_request).await
.map_err(|e| format!("Outline generation failed: {}", e))?;
emit_progress("outline", 50, &format!("大纲完成:{} 个场景", outline.len()));
if is_cancelled() {
return Err("Generation cancelled".to_string());
}
// ── Stage 2: Scenes (parallel) ──
emit_progress("scene", 55, &format!("并行生成 {} 个场景...", outline.len()));
let scenes = pipeline.generate_scenes(&outline).await
.map_err(|e| format!("Scene generation failed: {}", e))?;
if is_cancelled() {
return Err("Generation cancelled".to_string());
}
// ── Stage 3: Assemble ──
emit_progress("complete", 90, "组装课堂...");
// Build classroom directly (pipeline.build_classroom is private)
let total_duration: u32 = scenes.iter().map(|s| s.content.duration_seconds).sum();
let objectives = outline.iter()
.take(3)
.map(|item| format!("理解: {}", item.title))
.collect::<Vec<_>>();
let classroom_id = uuid::Uuid::new_v4().to_string();
let classroom = Classroom {
id: classroom_id.clone(),
title: format!("课堂: {}", kernel_request.topic),
description: format!("{:?} 风格课堂 — {}", kernel_request.style, kernel_request.topic),
topic: kernel_request.topic.clone(),
style: kernel_request.style,
level: kernel_request.level,
total_duration,
objectives,
scenes,
agents,
metadata: zclaw_kernel::generation::ClassroomMetadata {
generated_at: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis() as i64,
source_document: kernel_request.document.map(|_| "user_document".to_string()),
model: None,
version: "2.0.0".to_string(),
custom: serde_json::Map::new(),
},
};
// Store classroom
{
let mut s = store.lock().await;
s.insert(classroom_id.clone(), classroom);
}
// Clear generation task
{
let mut t = tasks.lock().await;
t.remove(&topic_clone);
}
// Emit completion
emit_progress("complete", 100, "课堂生成完成");
Ok(ClassroomGenerateResponse {
classroom_id,
})
}
/// Get current generation progress for a topic
#[tauri::command]
pub async fn classroom_generation_progress(
tasks: State<'_, GenerationTasks>,
topic: String,
) -> Result<ClassroomProgressResponse, String> {
let t = tasks.lock().await;
let progress = t.get(&topic);
Ok(ClassroomProgressResponse {
stage: progress.map(|p| stage_name(&p.stage).to_string()).unwrap_or_else(|| "none".to_string()),
progress: progress.map(|p| p.progress).unwrap_or(0),
activity: progress.map(|p| p.activity.clone()).unwrap_or_default(),
items_progress: progress.and_then(|p| p.items_progress),
})
}
/// Cancel an active generation
#[tauri::command]
pub async fn classroom_cancel_generation(
tasks: State<'_, GenerationTasks>,
topic: String,
) -> Result<(), String> {
let mut t = tasks.lock().await;
t.remove(&topic);
Ok(())
}
/// Retrieve a generated classroom by ID
#[tauri::command]
pub async fn classroom_get(
store: State<'_, ClassroomStore>,
classroom_id: String,
) -> Result<Classroom, String> {
let s = store.lock().await;
s.get(&classroom_id)
.cloned()
.ok_or_else(|| format!("Classroom '{}' not found", classroom_id))
}
/// List all generated classrooms (id + title only)
#[tauri::command]
pub async fn classroom_list(
store: State<'_, ClassroomStore>,
) -> Result<Vec<serde_json::Value>, String> {
let s = store.lock().await;
Ok(s.values().map(|c| serde_json::json!({
"id": c.id,
"title": c.title,
"topic": c.topic,
"totalDuration": c.total_duration,
"sceneCount": c.scenes.len(),
})).collect())
}

View File

@@ -0,0 +1,41 @@
//! Classroom generation and interaction commands
//!
//! Tauri commands for the OpenMAIC-style interactive classroom:
//! - Generate classroom (4-stage pipeline with progress events)
//! - Multi-agent chat
//! - Export (HTML/Markdown/JSON)
use std::sync::Arc;
use tokio::sync::Mutex;
use zclaw_kernel::generation::Classroom;
pub mod chat;
pub mod export;
pub mod generate;
// ---------------------------------------------------------------------------
// Shared state types
// ---------------------------------------------------------------------------
/// In-memory classroom store: classroom_id → Classroom
pub type ClassroomStore = Arc<Mutex<std::collections::HashMap<String, Classroom>>>;
/// Active generation tasks: topic → progress
pub type GenerationTasks = Arc<Mutex<std::collections::HashMap<String, zclaw_kernel::generation::GenerationProgress>>>;
// Re-export chat state type
// Re-export chat state type — used by lib.rs to construct managed state
#[allow(unused_imports)]
pub use chat::ChatStore;
// ---------------------------------------------------------------------------
// State constructors
// ---------------------------------------------------------------------------
pub fn create_classroom_state() -> ClassroomStore {
Arc::new(Mutex::new(std::collections::HashMap::new()))
}
pub fn create_generation_tasks() -> GenerationTasks {
Arc::new(Mutex::new(std::collections::HashMap::new()))
}

View File

@@ -258,11 +258,18 @@ impl AgentIdentityManager {
if !identity.instructions.is_empty() {
sections.push(identity.instructions.clone());
}
if !identity.user_profile.is_empty()
&& identity.user_profile != default_user_profile()
{
sections.push(format!("## 用户画像\n{}", identity.user_profile));
}
// NOTE: user_profile injection is intentionally disabled.
// The reflection engine may accumulate overly specific details from past
// conversations (e.g., "广东光华", "汕头玩具产业") into user_profile.
// These details then leak into every new conversation's system prompt,
// causing the model to think about old topics instead of the current query.
// Memory injection should only happen via MemoryMiddleware with relevance
// filtering, not unconditionally via user_profile.
// if !identity.user_profile.is_empty()
// && identity.user_profile != default_user_profile()
// {
// sections.push(format!("## 用户画像\n{}", identity.user_profile));
// }
if let Some(ctx) = memory_context {
sections.push(ctx.to_string());
}

View File

@@ -34,6 +34,7 @@ pub struct ChatResponse {
#[serde(rename_all = "camelCase", tag = "type")]
pub enum StreamChatEvent {
Delta { delta: String },
ThinkingDelta { delta: String },
ToolStart { name: String, input: serde_json::Value },
ToolEnd { name: String, output: serde_json::Value },
IterationStart { iteration: usize, max_iterations: usize },
@@ -218,6 +219,10 @@ pub async fn agent_chat_stream(
tracing::trace!("[agent_chat_stream] Delta: {} bytes", delta.len());
StreamChatEvent::Delta { delta: delta.clone() }
}
LoopEvent::ThinkingDelta(delta) => {
tracing::trace!("[agent_chat_stream] ThinkingDelta: {} bytes", delta.len());
StreamChatEvent::ThinkingDelta { delta: delta.clone() }
}
LoopEvent::ToolStart { name, input } => {
tracing::debug!("[agent_chat_stream] ToolStart: {}", name);
if name.starts_with("hand_") {

View File

@@ -249,3 +249,130 @@ pub async fn kernel_shutdown(
Ok(())
}
/// Apply SaaS-synced configuration to the Kernel config file.
///
/// Writes relevant config values (agent, llm categories) to the TOML config file.
/// The changes take effect on the next Kernel restart.
#[tauri::command]
pub async fn kernel_apply_saas_config(
configs: Vec<SaasConfigItem>,
) -> Result<u32, String> {
use std::io::Write;
let config_path = zclaw_kernel::config::KernelConfig::find_config_path()
.ok_or_else(|| "No config file path found".to_string())?;
// Read existing config or create empty
let existing = if config_path.exists() {
std::fs::read_to_string(&config_path).unwrap_or_default()
} else {
String::new()
};
let mut updated = existing;
let mut applied: u32 = 0;
for config in &configs {
// Only process kernel-relevant categories
if !matches!(config.category.as_str(), "agent" | "llm") {
continue;
}
// Write key=value to the [llm] or [agent] section
let section = &config.category;
let key = config.key.replace('.', "_");
let value = &config.value;
// Simple TOML patching: find or create section, update key
let section_header = format!("[{}]", section);
let line_to_set = format!("{} = {}", key, toml_quote_value(value));
if let Some(section_start) = updated.find(&section_header) {
// Section exists, find or add the key within it
let after_header = section_start + section_header.len();
let next_section = updated[after_header..].find("\n[")
.map(|i| after_header + i)
.unwrap_or(updated.len());
let section_content = &updated[after_header..next_section];
let key_prefix = format!("\n{} =", key);
let key_prefix_alt = format!("\n{}=", key);
if let Some(key_pos) = section_content.find(&key_prefix)
.or_else(|| section_content.find(&key_prefix_alt))
{
// Key exists, replace the line
let line_start = after_header + key_pos + 1; // skip \n
let line_end = updated[line_start..].find('\n')
.map(|i| line_start + i)
.unwrap_or(updated.len());
updated = format!(
"{}{}{}\n{}",
&updated[..line_start],
line_to_set,
if line_end < updated.len() { "" } else { "" },
&updated[line_end..]
);
// Remove the extra newline if line_end included one
updated = updated.replace(&format!("{}\n\n", line_to_set), &format!("{}\n", line_to_set));
} else {
// Key doesn't exist, append to section
updated.insert_str(next_section, format!("\n{}", line_to_set).as_str());
}
} else {
// Section doesn't exist, append it
updated = format!("{}\n{}\n{}\n", updated.trim_end(), section_header, line_to_set);
}
applied += 1;
}
if applied > 0 {
// Ensure parent directory exists
if let Some(parent) = config_path.parent() {
std::fs::create_dir_all(parent).map_err(|e| format!("Failed to create config dir: {}", e))?;
}
let mut file = std::fs::File::create(&config_path)
.map_err(|e| format!("Failed to write config: {}", e))?;
file.write_all(updated.as_bytes())
.map_err(|e| format!("Failed to write config: {}", e))?;
tracing::info!(
"[kernel_apply_saas_config] Applied {} config items to {:?} (restart required)",
applied,
config_path
);
}
Ok(applied)
}
/// Single config item from SaaS sync
#[derive(Debug, Clone, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SaasConfigItem {
pub category: String,
pub key: String,
pub value: String,
}
/// Quote a value for TOML format
fn toml_quote_value(value: &str) -> String {
// Try to parse as number or boolean
if value == "true" || value == "false" {
return value.to_string();
}
if let Ok(n) = value.parse::<i64>() {
return n.to_string();
}
if let Ok(n) = value.parse::<f64>() {
return n.to_string();
}
// Handle multi-line strings with TOML triple-quote syntax
if value.contains('\n') {
return format!("\"\"\"\n{}\"\"\"", value.replace('\\', "\\\\").replace("\"\"\"", "'\"'\"'\""));
}
// Default: quote as string
format!("\"{}\"", value.replace('\\', "\\\\").replace('"', "\\\""))
}

View File

@@ -34,6 +34,9 @@ mod kernel_commands;
// Pipeline commands (DSL-based workflows)
mod pipeline_commands;
// Classroom generation and interaction commands
mod classroom_commands;
// Gateway sub-modules (runtime, config, io, commands)
mod gateway;
@@ -99,6 +102,11 @@ pub fn run() {
// Initialize Pipeline state (DSL-based workflows)
let pipeline_state = pipeline_commands::create_pipeline_state();
// Initialize Classroom state (generation + chat)
let classroom_state = classroom_commands::create_classroom_state();
let classroom_chat_state = classroom_commands::chat::create_chat_state();
let classroom_gen_tasks = classroom_commands::create_generation_tasks();
tauri::Builder::default()
.plugin(tauri_plugin_opener::init())
.manage(browser_state)
@@ -110,11 +118,15 @@ pub fn run() {
.manage(scheduler_state)
.manage(kernel_commands::SessionStreamGuard::default())
.manage(pipeline_state)
.manage(classroom_state)
.manage(classroom_chat_state)
.manage(classroom_gen_tasks)
.invoke_handler(tauri::generate_handler![
// Internal ZCLAW Kernel commands (preferred)
kernel_commands::lifecycle::kernel_init,
kernel_commands::lifecycle::kernel_status,
kernel_commands::lifecycle::kernel_shutdown,
kernel_commands::lifecycle::kernel_apply_saas_config,
kernel_commands::agent::agent_create,
kernel_commands::agent::agent_list,
kernel_commands::agent::agent_get,
@@ -300,7 +312,16 @@ pub fn run() {
intelligence::identity::identity_get_snapshots,
intelligence::identity::identity_restore_snapshot,
intelligence::identity::identity_list_agents,
intelligence::identity::identity_delete_agent
intelligence::identity::identity_delete_agent,
// Classroom generation and interaction commands
classroom_commands::generate::classroom_generate,
classroom_commands::generate::classroom_generation_progress,
classroom_commands::generate::classroom_cancel_generation,
classroom_commands::generate::classroom_get,
classroom_commands::generate::classroom_list,
classroom_commands::chat::classroom_chat,
classroom_commands::chat::classroom_chat_history,
classroom_commands::export::classroom_export
])
.run(tauri::generate_context!())
.expect("error while running tauri application");