fix(安全): 修复HTML导出中的XSS漏洞并清理调试日志
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

refactor(日志): 替换console.log为tracing日志系统
style(代码): 移除未使用的代码和依赖项

feat(测试): 添加端到端测试文档和CI工作流
docs(变更日志): 更新CHANGELOG.md记录0.1.0版本变更

perf(构建): 更新依赖版本并优化CI流程
This commit is contained in:
iven
2026-03-26 19:49:03 +08:00
parent b8d565a9eb
commit 978dc5cdd8
79 changed files with 3953 additions and 5724 deletions

View File

@@ -1327,7 +1327,13 @@ pub fn run() {
}
// Initialize Viking storage (async, in background)
let runtime = tokio::runtime::Runtime::new().expect("Failed to create tokio runtime");
let runtime = match tokio::runtime::Runtime::new() {
Ok(rt) => rt,
Err(e) => {
tracing::error!("[VikingCommands] Failed to create tokio runtime: {}", e);
return;
}
};
runtime.block_on(async {
if let Err(e) = crate::viking_commands::init_storage().await {
tracing::error!("[VikingCommands] Failed to initialize storage: {}", e);
@@ -1433,6 +1439,8 @@ pub fn run() {
memory::context_builder::estimate_content_tokens,
// LLM commands (for extraction)
llm::llm_complete,
llm::embedding_create,
llm::embedding_providers,
// Browser automation commands (Fantoccini-based Browser Hand)
browser::commands::browser_create_session,
browser::commands::browser_close_session,

View File

@@ -52,6 +52,47 @@ pub struct LlmUsage {
pub total_tokens: u32,
}
// === Embedding Types ===
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingRequest {
pub input: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub model: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingResponse {
pub embedding: Vec<f32>,
pub model: String,
pub usage: Option<EmbeddingUsage>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingUsage {
pub prompt_tokens: u32,
pub total_tokens: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingConfig {
pub provider: String,
pub api_key: String,
pub endpoint: Option<String>,
pub model: Option<String>,
}
impl Default for EmbeddingConfig {
fn default() -> Self {
Self {
provider: "openai".to_string(),
api_key: String::new(),
endpoint: None,
model: Some("text-embedding-3-small".to_string()),
}
}
}
// === Provider Configuration ===
#[derive(Debug, Clone)]
@@ -98,6 +139,82 @@ pub fn get_provider_configs() -> HashMap<String, ProviderConfig> {
configs
}
// === Embedding Provider Configuration ===
#[derive(Debug, Clone)]
pub struct EmbeddingProviderConfig {
pub name: String,
pub endpoint: String,
pub default_model: String,
pub dimensions: usize,
}
pub fn get_embedding_provider_configs() -> HashMap<String, EmbeddingProviderConfig> {
let mut configs = HashMap::new();
configs.insert(
"openai".to_string(),
EmbeddingProviderConfig {
name: "OpenAI".to_string(),
endpoint: "https://api.openai.com/v1".to_string(),
default_model: "text-embedding-3-small".to_string(),
dimensions: 1536,
},
);
configs.insert(
"zhipu".to_string(),
EmbeddingProviderConfig {
name: "智谱 AI".to_string(),
endpoint: "https://open.bigmodel.cn/api/paas/v4".to_string(),
default_model: "embedding-3".to_string(),
dimensions: 1024,
},
);
configs.insert(
"doubao".to_string(),
EmbeddingProviderConfig {
name: "火山引擎 (Doubao)".to_string(),
endpoint: "https://ark.cn-beijing.volces.com/api/v3".to_string(),
default_model: "doubao-embedding".to_string(),
dimensions: 1024,
},
);
configs.insert(
"qwen".to_string(),
EmbeddingProviderConfig {
name: "百炼/通义千问".to_string(),
endpoint: "https://dashscope.aliyuncs.com/compatible-mode/v1".to_string(),
default_model: "text-embedding-v3".to_string(),
dimensions: 1024,
},
);
configs.insert(
"deepseek".to_string(),
EmbeddingProviderConfig {
name: "DeepSeek".to_string(),
endpoint: "https://api.deepseek.com/v1".to_string(),
default_model: "deepseek-embedding".to_string(),
dimensions: 1536,
},
);
configs.insert(
"local".to_string(),
EmbeddingProviderConfig {
name: "本地模型 (TF-IDF)".to_string(),
endpoint: String::new(),
default_model: "tfidf".to_string(),
dimensions: 0,
},
);
configs
}
// === LLM Client ===
pub struct LlmClient {
@@ -221,6 +338,135 @@ pub async fn llm_complete(
client.complete(messages).await
}
// === Embedding Client ===
pub struct EmbeddingClient {
config: EmbeddingConfig,
provider_config: Option<EmbeddingProviderConfig>,
}
impl EmbeddingClient {
pub fn new(config: EmbeddingConfig) -> Self {
let provider_config = get_embedding_provider_configs()
.get(&config.provider)
.cloned();
Self {
config,
provider_config,
}
}
pub async fn embed(&self, text: &str) -> Result<EmbeddingResponse, String> {
if self.config.provider == "local" || self.config.api_key.is_empty() {
return Err("Local TF-IDF mode does not support API embedding".to_string());
}
let endpoint = self.config.endpoint.clone()
.or_else(|| {
self.provider_config
.as_ref()
.map(|c| c.endpoint.clone())
})
.unwrap_or_else(|| "https://api.openai.com/v1".to_string());
let model = self.config.model.clone()
.or_else(|| {
self.provider_config
.as_ref()
.map(|c| c.default_model.clone())
})
.unwrap_or_else(|| "text-embedding-3-small".to_string());
self.call_embedding_api(&endpoint, text, &model).await
}
async fn call_embedding_api(&self, endpoint: &str, text: &str, model: &str) -> Result<EmbeddingResponse, String> {
let client = reqwest::Client::new();
let request_body = serde_json::json!({
"input": text,
"model": model,
});
let response = client
.post(format!("{}/embeddings", endpoint))
.header("Authorization", format!("Bearer {}", self.config.api_key))
.header("Content-Type", "application/json")
.json(&request_body)
.send()
.await
.map_err(|e| format!("Embedding API request failed: {}", e))?;
if !response.status().is_success() {
let status = response.status();
let body = response.text().await.unwrap_or_default();
return Err(format!("Embedding API error {}: {}", status, body));
}
let json: serde_json::Value = response
.json()
.await
.map_err(|e| format!("Failed to parse embedding response: {}", e))?;
let embedding = json
.get("data")
.and_then(|d| d.get(0))
.and_then(|d| d.get("embedding"))
.and_then(|e| e.as_array())
.ok_or("Invalid embedding response format")?
.iter()
.filter_map(|v| v.as_f64().map(|f| f as f32))
.collect::<Vec<f32>>();
let usage = json.get("usage").map(|u| EmbeddingUsage {
prompt_tokens: u.get("prompt_tokens").and_then(|v| v.as_u64()).unwrap_or(0) as u32,
total_tokens: u.get("total_tokens").and_then(|v| v.as_u64()).unwrap_or(0) as u32,
});
Ok(EmbeddingResponse {
embedding,
model: model.to_string(),
usage,
})
}
pub fn get_dimensions(&self) -> usize {
self.provider_config
.as_ref()
.map(|c| c.dimensions)
.unwrap_or(1536)
}
}
#[tauri::command]
pub async fn embedding_create(
provider: String,
api_key: String,
text: String,
model: Option<String>,
endpoint: Option<String>,
) -> Result<EmbeddingResponse, String> {
let config = EmbeddingConfig {
provider,
api_key,
endpoint,
model,
};
let client = EmbeddingClient::new(config);
client.embed(&text).await
}
#[tauri::command]
pub async fn embedding_providers() -> Result<Vec<(String, String, String, usize)>, String> {
let configs = get_embedding_provider_configs();
Ok(configs
.into_iter()
.map(|(id, c)| (id, c.name, c.default_model, c.dimensions))
.collect())
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -128,7 +128,7 @@ pub async fn viking_status() -> Result<VikingStatus, String> {
Ok(VikingStatus {
available: true,
version: Some("0.2.0-native".to_string()),
version: Some("0.1.0-native".to_string()),
data_dir: get_data_dir_string(),
error: None,
})