fix(runtime,hands): 搜索功能修复 — glm空参数回退+schema简化
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

根因: glm-5.1 不理解 oneOf+const 复杂 schema,发送 tool_calls 时
arguments 为空 {}。同时缺少从对话上下文提取用户意图的回退机制。

修复:
1. researcher input_schema 从 oneOf+const 改为扁平化属性 — glm 正确传参
2. loop_runner 增加 empty-input 回退 — 从最近用户消息注入 _fallback_query
3. researcher infer_action 增加 _fallback_query 分支处理
4. 调试日志降级 INFO→DEBUG (openai tool_calls delta, researcher input)
This commit is contained in:
iven
2026-04-22 16:06:47 +08:00
parent 3cb9709caf
commit 5816f56039
3 changed files with 64 additions and 35 deletions

View File

@@ -252,38 +252,32 @@ impl ResearcherHand {
dependencies: vec!["network".to_string()], dependencies: vec!["network".to_string()],
input_schema: Some(serde_json::json!({ input_schema: Some(serde_json::json!({
"type": "object", "type": "object",
"oneOf": [
{
"properties": { "properties": {
"action": { "const": "search" }, "action": {
"type": "string",
"enum": ["search", "fetch", "report", "summarize"],
"description": "Action to perform: search (web search), fetch (get URL content), report (deep research), summarize (multiple URLs)"
},
"query": { "query": {
"type": "object", "type": "string",
"properties": { "description": "Search query string for search/report actions"
"query": { "type": "string" },
"engine": { "type": "string", "enum": ["searxng", "google", "bing", "duckduckgo", "auto"] },
"depth": { "type": "string", "enum": ["quick", "standard", "deep"] },
"maxResults": { "type": "integer" }
}, },
"required": ["query"] "url": {
"type": "string",
"description": "URL to fetch content from"
},
"urls": {
"type": "array",
"items": { "type": "string" },
"description": "List of URLs to summarize"
},
"engine": {
"type": "string",
"enum": ["auto", "searxng", "google", "bing", "duckduckgo"],
"description": "Search engine preference"
} }
}, },
"required": ["action", "query"] "description": "Provide 'query' for search/report, or 'url' for fetch, or 'urls' for summarize"
},
{
"properties": {
"action": { "const": "fetch" },
"url": { "type": "string" }
},
"required": ["action", "url"]
},
{
"properties": {
"action": { "const": "report" },
"query": { "$ref": "#/properties/query" }
},
"required": ["action", "query"]
}
]
})), })),
tags: vec!["research".to_string(), "web".to_string(), "search".to_string()], tags: vec!["research".to_string(), "web".to_string(), "search".to_string()],
enabled: true, enabled: true,
@@ -310,7 +304,7 @@ impl ResearcherHand {
let keys: Vec<&str> = input.as_object() let keys: Vec<&str> = input.as_object()
.map(|obj| obj.keys().map(|k| k.as_str()).collect()) .map(|obj| obj.keys().map(|k| k.as_str()).collect())
.unwrap_or_default(); .unwrap_or_default();
tracing::warn!(target: "researcher", ?keys, %input, "infer_action examining input"); tracing::debug!(target: "researcher", ?keys, %input, "infer_action examining input");
// Check for action field with wrong value // Check for action field with wrong value
if let Some(action) = input.get("action").and_then(|v| v.as_str()) { if let Some(action) = input.get("action").and_then(|v| v.as_str()) {
@@ -364,12 +358,27 @@ impl ResearcherHand {
} }
} }
} }
// Check for injected fallback query from loop_runner (when LLM sends empty args)
if let Some(fallback) = input.get("_fallback_query").and_then(|v| v.as_str()) {
if !fallback.trim().is_empty() {
tracing::debug!(target: "researcher", query = %fallback, "Using fallback user message as search query");
return Ok(ResearcherAction::Search { query: ResearchQuery {
query: fallback.to_string(),
engine: SearchEngine::Auto,
depth: ResearchDepth::Standard,
max_results: 10,
include_related: false,
time_limit_secs: 60,
}});
}
}
// Last resort: if any string field looks like a search query // Last resort: if any string field looks like a search query
if let Some(obj) = input.as_object() { if let Some(obj) = input.as_object() {
for (key, val) in obj { for (key, val) in obj {
if let Some(s) = val.as_str() { if let Some(s) = val.as_str() {
if s.len() > 2 && !s.starts_with("http") && key != "action" && key != "engine" { if s.len() > 2 && !s.starts_with("http") && key != "action" && key != "engine" {
tracing::warn!(target: "researcher", key = %key, value = %s, "Using fallback field as query"); tracing::debug!(target: "researcher", key = %key, value = %s, "Using fallback field as query");
return Ok(ResearcherAction::Search { query: ResearchQuery { return Ok(ResearcherAction::Search { query: ResearchQuery {
query: s.to_string(), query: s.to_string(),
engine: SearchEngine::Auto, engine: SearchEngine::Auto,
@@ -1144,12 +1153,12 @@ impl Hand for ResearcherHand {
} }
async fn execute(&self, _context: &HandContext, input: Value) -> Result<HandResult> { async fn execute(&self, _context: &HandContext, input: Value) -> Result<HandResult> {
tracing::info!(target: "researcher", input = %input, "Researcher hand received input"); tracing::debug!(target: "researcher", input = %input, "Researcher hand received input");
// Try strict deserialization first, then fall back to inference // Try strict deserialization first, then fall back to inference
let action: ResearcherAction = match serde_json::from_value(input.clone()) { let action: ResearcherAction = match serde_json::from_value(input.clone()) {
Ok(a) => a, Ok(a) => a,
Err(e) => { Err(e) => {
tracing::warn!(target: "researcher", error = %e, input = %input, "Strict deserialization failed, trying inference"); tracing::debug!(target: "researcher", error = %e, input = %input, "Strict deserialization failed, trying inference");
Self::infer_action(&input)? Self::infer_action(&input)?
} }
}; };

View File

@@ -208,7 +208,7 @@ impl LlmDriver for OpenAiDriver {
tracing::debug!("[OpenAI:stream] SSE #{}: {}", sse_event_count, &data[..data.len().min(300)]); tracing::debug!("[OpenAI:stream] SSE #{}: {}", sse_event_count, &data[..data.len().min(300)]);
} }
if data == "[DONE]" { if data == "[DONE]" {
tracing::debug!("[OpenAI:stream] Received [DONE], total SSE events: {}, raw bytes: {}", sse_event_count, raw_bytes_total); tracing::debug!("[OpenAI:stream] Received [DONE], total SSE events: {}, raw bytes: {}, tool_calls: {:?}", sse_event_count, raw_bytes_total, accumulated_tool_calls);
// Emit ToolUseEnd for all accumulated tool calls (skip invalid ones with empty name) // Emit ToolUseEnd for all accumulated tool calls (skip invalid ones with empty name)
for (id, (name, args)) in &accumulated_tool_calls { for (id, (name, args)) in &accumulated_tool_calls {
@@ -264,7 +264,7 @@ impl LlmDriver for OpenAiDriver {
// Handle tool calls // Handle tool calls
if let Some(tool_calls) = &delta.tool_calls { if let Some(tool_calls) = &delta.tool_calls {
tracing::trace!("[OpenAI] Received tool_calls delta: {:?}", tool_calls); tracing::debug!("[OpenAI] Received tool_calls delta: {:?}", tool_calls);
for tc in tool_calls { for tc in tool_calls {
// Tool call start - has id and name // Tool call start - has id and name
if let Some(id) = &tc.id { if let Some(id) = &tc.id {

View File

@@ -380,6 +380,26 @@ impl AgentLoop {
if abort_result.is_some() { if abort_result.is_some() {
break; break;
} }
// GLM and other models sometimes send tool calls with empty arguments `{}`
// Inject the last user message as a fallback query so the tool can infer intent.
let input = if input.as_object().map_or(false, |obj| obj.is_empty()) {
if let Some(last_user_msg) = messages.iter().rev().find_map(|m| {
if let Message::User { content } = m {
Some(content.clone())
} else {
None
}
}) {
tracing::info!("[AgentLoop] Tool '{}' received empty input, injecting user message as fallback query", name);
serde_json::json!({ "_fallback_query": last_user_msg })
} else {
input
}
} else {
input
};
// Check tool call safety — via middleware chain // Check tool call safety — via middleware chain
{ {
let mw_ctx_ref = middleware::MiddlewareContext { let mw_ctx_ref = middleware::MiddlewareContext {