diff --git a/desktop/src/components/ChatArea.tsx b/desktop/src/components/ChatArea.tsx
index 0cf7cb2..b601a4a 100644
--- a/desktop/src/components/ChatArea.tsx
+++ b/desktop/src/components/ChatArea.tsx
@@ -34,7 +34,6 @@ import { ModelSelector } from './ai/ModelSelector';
import { isTauriRuntime } from '../lib/tauri-gateway';
import { SuggestionChips } from './ai/SuggestionChips';
import { PipelineResultPreview } from './pipeline/PipelineResultPreview';
-import { PresentationContainer } from './presentation/PresentationContainer';
// TokenMeter temporarily unused — using inline text counter instead
// Default heights for virtualized messages
@@ -637,12 +636,36 @@ export function ChatArea({ compact, onOpenDetail }: { compact?: boolean; onOpenD
);
}
+/**
+ * Strip LLM tool-usage narration from response content.
+ * When the LLM calls tools (search, fetch, etc.), it often narrates its reasoning
+ * in English ("Now let me execute...", "I need to provide...", "I keep getting errors...")
+ * and Chinese ("让我执行...", "让我尝试..."). These are internal thoughts, not user-facing content.
+ */
+function stripToolNarration(content: string): string {
+ const sentences = content.split(/(?<=[。!?.!?])\s*/);
+ const filtered = sentences.filter(s => {
+ const t = s.trim();
+ if (!t) return false;
+ // English narration patterns
+ if (/^(?:Now )?[Ll]et me\s/i.test(t)) return false;
+ if (/^I\s+(?:need to|keep getting|should|will try|have to|can try|must)\s/i.test(t)) return false;
+ if (/^The hand_researcher\s/i.test(t)) return false;
+ // Chinese narration patterns
+ if (/^让我(?:执行|尝试|使用|进一步|调用|运行)/.test(t)) return false;
+ if (/^好的,让我为您/.test(t)) return false;
+ return true;
+ });
+ const result = filtered.join(' ').replace(/\s{2,}/g, ' ').trim();
+ return result || content; // Fallback: if everything was stripped, show original
+}
+
function MessageBubble({ message, onRetry }: { message: Message; setInput?: (text: string) => void; onRetry?: () => void }) {
if (message.role === 'tool') {
return null;
}
- // Researcher hand results are internal — search results are already in the LLM reply
- if (message.role === 'hand' && message.handName === 'researcher') {
+ // Hand status/result messages are internal — search results are already in the LLM reply
+ if (message.role === 'hand') {
return null;
}
@@ -721,15 +744,15 @@ function MessageBubble({ message, onRetry }: { message: Message; setInput?: (tex
? (isUser
? message.content
:
{message.error}