Files
zclaw_openfang/crates/zclaw-kernel/tests/smoke_hands.rs
iven 9060935401 perf(runtime): Hermes Phase 1-3 — prompt caching + parallel tools + smart retry
Phase 1: Anthropic prompt caching
- Add cache_control ephemeral on system prompt blocks
- Track cache_creation/cache_read tokens in CompletionResponse + StreamChunk

Phase 2A: Parallel tool execution
- Add ToolConcurrency enum (ReadOnly/Exclusive/Interactive)
- JoinSet + Semaphore(3) for bounded parallel tool calls
- 7 tools annotated with correct concurrency level
- AtomicU32 for lock-free failure tracking in ToolErrorMiddleware

Phase 2B: Tool output pruning
- prune_tool_outputs() trims old ToolResult > 2000 chars to 500 chars
- Integrated into CompactionMiddleware before token estimation

Phase 3: Error classification + smart retry
- LlmErrorKind + ClassifiedLlmError for structured error mapping
- RetryDriver decorator with jittered exponential backoff
- Kernel wraps all LLM calls with RetryDriver
- CONTEXT_OVERFLOW recovery triggers emergency compaction in loop_runner
2026-04-24 08:39:56 +08:00

98 lines
3.8 KiB
Rust

//! Hands smoke test — full lifecycle: trigger tool_call → hand execute → result
//!
//! Uses MockLlmDriver with stream chunks to simulate a real tool call flow.
use std::sync::Arc;
use zclaw_kernel::{Kernel, KernelConfig};
use zclaw_runtime::stream::StreamChunk;
use zclaw_runtime::test_util::MockLlmDriver;
use zclaw_runtime::{LoopEvent, LlmDriver};
use zclaw_types::AgentConfig;
#[tokio::test]
async fn smoke_hands_full_lifecycle() {
// Simulate: LLM calls hand_quiz → quiz hand executes → LLM summarizes
let mock = MockLlmDriver::new()
.with_stream_chunks(vec![
StreamChunk::TextDelta { delta: "正在生成测验...".to_string() },
StreamChunk::ToolUseStart {
id: "call_1".to_string(),
name: "hand_quiz".to_string(),
},
StreamChunk::ToolUseEnd {
id: "call_1".to_string(),
input: serde_json::json!({ "topic": "历史", "count": 2 }),
},
StreamChunk::Complete {
input_tokens: 15,
output_tokens: 10,
stop_reason: "tool_use".to_string(),
cache_creation_input_tokens: None,
cache_read_input_tokens: None,
},
])
// After hand_quiz returns, LLM generates final response
.with_stream_chunks(vec![
StreamChunk::TextDelta { delta: "测验已生成!".to_string() },
StreamChunk::Complete {
input_tokens: 20,
output_tokens: 5,
stop_reason: "end_turn".to_string(),
cache_creation_input_tokens: None,
cache_read_input_tokens: None,
},
]);
let config = KernelConfig::default();
let kernel = Kernel::boot_with_driver(config, Arc::new(mock) as Arc<dyn LlmDriver>)
.await
.expect("kernel boot");
let agent = AgentConfig::new("smoke-agent");
let id = agent.id;
kernel.spawn_agent(agent).await.expect("spawn agent");
let mut rx = kernel
.send_message_stream(&id, "生成一个历史测验".to_string())
.await
.expect("stream");
let mut saw_tool_start = false;
let mut saw_tool_end = false;
let mut saw_delta_before_tool = false;
let mut saw_delta_after_tool = false;
let mut phase = "before_tool";
let mut got_complete = false;
while let Some(event) = rx.recv().await {
match event {
LoopEvent::Delta(_) if phase == "before_tool" => saw_delta_before_tool = true,
LoopEvent::Delta(_) if phase == "after_tool" => saw_delta_after_tool = true,
LoopEvent::ToolStart { name, .. } => {
assert_eq!(name, "hand_quiz", "should be hand_quiz");
saw_tool_start = true;
}
LoopEvent::ToolEnd { name, output } => {
assert!(name.starts_with("hand_"), "should be hand tool");
assert!(output.is_object() || output.is_string(), "hand should produce output");
saw_tool_end = true;
phase = "after_tool";
}
LoopEvent::Complete(result) => {
assert!(result.output_tokens > 0, "should have output tokens");
assert!(result.iterations >= 2, "should take at least 2 iterations");
got_complete = true;
break;
}
LoopEvent::Error(msg) => panic!("unexpected error: {}", msg),
_ => {}
}
}
assert!(saw_delta_before_tool, "should see delta before tool execution");
assert!(saw_tool_start, "should see hand_quiz ToolStart");
assert!(saw_tool_end, "should see hand_quiz ToolEnd");
assert!(saw_delta_after_tool, "should see delta after tool execution");
assert!(got_complete, "should receive complete event");
}