fix(安全): 修复HTML导出中的XSS漏洞并清理调试日志
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled
refactor(日志): 替换console.log为tracing日志系统 style(代码): 移除未使用的代码和依赖项 feat(测试): 添加端到端测试文档和CI工作流 docs(变更日志): 更新CHANGELOG.md记录0.1.0版本变更 perf(构建): 更新依赖版本并优化CI流程
This commit is contained in:
@@ -65,8 +65,8 @@ impl LlmDriver for OpenAiDriver {
|
||||
// Debug: log the request details
|
||||
let url = format!("{}/chat/completions", self.base_url);
|
||||
let request_body = serde_json::to_string(&api_request).unwrap_or_default();
|
||||
eprintln!("[OpenAiDriver] Sending request to: {}", url);
|
||||
eprintln!("[OpenAiDriver] Request body: {}", request_body);
|
||||
tracing::debug!(target: "openai_driver", "Sending request to: {}", url);
|
||||
tracing::trace!(target: "openai_driver", "Request body: {}", request_body);
|
||||
|
||||
let response = self.client
|
||||
.post(&url)
|
||||
@@ -80,11 +80,11 @@ impl LlmDriver for OpenAiDriver {
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
eprintln!("[OpenAiDriver] API error {}: {}", status, body);
|
||||
tracing::warn!(target: "openai_driver", "API error {}: {}", status, body);
|
||||
return Err(ZclawError::LlmError(format!("API error {}: {}", status, body)));
|
||||
}
|
||||
|
||||
eprintln!("[OpenAiDriver] Response status: {}", response.status());
|
||||
tracing::debug!(target: "openai_driver", "Response status: {}", response.status());
|
||||
|
||||
let api_response: OpenAiResponse = response
|
||||
.json()
|
||||
@@ -107,11 +107,11 @@ impl LlmDriver for OpenAiDriver {
|
||||
self.base_url.contains("aliyuncs") ||
|
||||
self.base_url.contains("bigmodel.cn");
|
||||
|
||||
eprintln!("[OpenAiDriver:stream] base_url={}, has_tools={}, needs_non_streaming={}",
|
||||
tracing::debug!(target: "openai_driver", "stream config: base_url={}, has_tools={}, needs_non_streaming={}",
|
||||
self.base_url, has_tools, needs_non_streaming);
|
||||
|
||||
if has_tools && needs_non_streaming {
|
||||
eprintln!("[OpenAiDriver:stream] Provider detected that may not support streaming with tools, using non-streaming mode. URL: {}", self.base_url);
|
||||
tracing::info!(target: "openai_driver", "Provider detected that may not support streaming with tools, using non-streaming mode. URL: {}", self.base_url);
|
||||
// Use non-streaming mode and convert to stream
|
||||
return self.stream_from_complete(request);
|
||||
}
|
||||
@@ -458,11 +458,11 @@ impl OpenAiDriver {
|
||||
let api_key = self.api_key.expose_secret().to_string();
|
||||
let model = request.model.clone();
|
||||
|
||||
eprintln!("[OpenAiDriver:stream_from_complete] Starting non-streaming request to: {}/chat/completions", base_url);
|
||||
tracing::debug!(target: "openai_driver", "stream_from_complete: Starting non-streaming request to: {}/chat/completions", base_url);
|
||||
|
||||
Box::pin(stream! {
|
||||
let url = format!("{}/chat/completions", base_url);
|
||||
eprintln!("[OpenAiDriver:stream_from_complete] Sending non-streaming request to: {}", url);
|
||||
tracing::debug!(target: "openai_driver", "stream_from_complete: Sending non-streaming request to: {}", url);
|
||||
|
||||
let response = match self.client
|
||||
.post(&url)
|
||||
@@ -490,15 +490,15 @@ impl OpenAiDriver {
|
||||
let api_response: OpenAiResponse = match response.json().await {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
eprintln!("[OpenAiDriver:stream_from_complete] Failed to parse response: {}", e);
|
||||
tracing::warn!(target: "openai_driver", "stream_from_complete: Failed to parse response: {}", e);
|
||||
yield Err(ZclawError::LlmError(format!("Failed to parse response: {}", e)));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
eprintln!("[OpenAiDriver:stream_from_complete] Got response with {} choices", api_response.choices.len());
|
||||
tracing::debug!(target: "openai_driver", "stream_from_complete: Got response with {} choices", api_response.choices.len());
|
||||
if let Some(choice) = api_response.choices.first() {
|
||||
eprintln!("[OpenAiDriver:stream_from_complete] First choice: content={:?}, tool_calls={:?}, finish_reason={:?}",
|
||||
tracing::debug!(target: "openai_driver", "stream_from_complete: First choice: content={:?}, tool_calls={:?}, finish_reason={:?}",
|
||||
choice.message.content.as_ref().map(|c| {
|
||||
if c.len() > 100 {
|
||||
// 使用 floor_char_boundary 确保不在多字节字符中间截断
|
||||
@@ -514,15 +514,15 @@ impl OpenAiDriver {
|
||||
|
||||
// Convert response to stream chunks
|
||||
let completion = self.convert_response(api_response, model.clone());
|
||||
eprintln!("[OpenAiDriver:stream_from_complete] Converted to {} content blocks, stop_reason: {:?}", completion.content.len(), completion.stop_reason);
|
||||
tracing::debug!(target: "openai_driver", "stream_from_complete: Converted to {} content blocks, stop_reason: {:?}", completion.content.len(), completion.stop_reason);
|
||||
|
||||
// Emit content blocks as stream chunks
|
||||
for block in &completion.content {
|
||||
eprintln!("[OpenAiDriver:stream_from_complete] Emitting block: {:?}", block);
|
||||
tracing::debug!(target: "openai_driver", "stream_from_complete: Emitting block: {:?}", block);
|
||||
match block {
|
||||
ContentBlock::Text { text } => {
|
||||
if !text.is_empty() {
|
||||
eprintln!("[OpenAiDriver:stream_from_complete] Emitting TextDelta: {} chars", text.len());
|
||||
tracing::debug!(target: "openai_driver", "stream_from_complete: Emitting TextDelta: {} chars", text.len());
|
||||
yield Ok(StreamChunk::TextDelta { delta: text.clone() });
|
||||
}
|
||||
}
|
||||
@@ -530,7 +530,7 @@ impl OpenAiDriver {
|
||||
yield Ok(StreamChunk::ThinkingDelta { delta: thinking.clone() });
|
||||
}
|
||||
ContentBlock::ToolUse { id, name, input } => {
|
||||
eprintln!("[OpenAiDriver:stream_from_complete] Emitting ToolUse: id={}, name={}", id, name);
|
||||
tracing::debug!(target: "openai_driver", "stream_from_complete: Emitting ToolUse: id={}, name={}", id, name);
|
||||
// Emit tool use start
|
||||
yield Ok(StreamChunk::ToolUseStart {
|
||||
id: id.clone(),
|
||||
|
||||
Reference in New Issue
Block a user