fix(relay): send conversation history to SaaS relay (BUG-008)
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

SaaS Relay was sending only the current message without conversation
history, giving LLM no context from previous turns. Root cause:
streamStore passed only `content` string to chatStream(), and
saas-relay-client hard-coded a single-element messages array.

Fix:
- GatewayClient.chatStream() opts: add `history` field
- streamStore: extract last 20 messages as history before calling chatStream
- saas-relay-client: build messages array from history + current message
This commit is contained in:
iven
2026-04-09 22:41:56 +08:00
parent f8850ba95a
commit 9442471c98
3 changed files with 18 additions and 1 deletions

View File

@@ -481,6 +481,8 @@ export class GatewayClient {
reasoning_effort?: string; reasoning_effort?: string;
plan_mode?: boolean; plan_mode?: boolean;
subagent_enabled?: boolean; subagent_enabled?: boolean;
/** Conversation history for relay clients that need full context */
history?: Array<{ role: string; content: string }>;
} }
): Promise<{ runId: string }> { ): Promise<{ runId: string }> {
const agentId = opts?.agentId || this.defaultAgentId; const agentId = opts?.agentId || this.defaultAgentId;

View File

@@ -112,6 +112,7 @@ export function createSaaSRelayGatewayClient(
reasoning_effort?: string; reasoning_effort?: string;
plan_mode?: boolean; plan_mode?: boolean;
subagent_enabled?: boolean; subagent_enabled?: boolean;
history?: Array<{ role: string; content: string }>;
}, },
): Promise<{ runId: string }> { ): Promise<{ runId: string }> {
const runId = `run_${Date.now()}`; const runId = `run_${Date.now()}`;
@@ -120,9 +121,15 @@ export function createSaaSRelayGatewayClient(
const aborted = () => abortController.signal.aborted; const aborted = () => abortController.signal.aborted;
try { try {
// Build messages array: use history if available, fallback to current message only
const history = opts?.history || [];
const messages = history.length > 0
? [...history, { role: 'user' as const, content: message }]
: [{ role: 'user' as const, content: message }];
const body: Record<string, unknown> = { const body: Record<string, unknown> = {
model: getModel() || 'glm-4-flash-250414', model: getModel() || 'glm-4-flash-250414',
messages: [{ role: 'user', content: message }], messages,
stream: true, stream: true,
}; };

View File

@@ -296,6 +296,13 @@ export const useStreamStore = create<StreamState>()(
useConversationStore.setState({ sessionKey: effectiveSessionKey }); useConversationStore.setState({ sessionKey: effectiveSessionKey });
} }
// Build conversation history for relay clients (last 20 messages ≈ 10 turns)
const history = (_chat?.getMessages() || [])
.filter(m => m.role === 'user' || m.role === 'assistant')
.filter(m => !m.streaming && !m.optimistic)
.map(m => ({ role: m.role, content: m.content }))
.slice(-20);
const result = await client.chatStream( const result = await client.chatStream(
content, content,
{ {
@@ -528,6 +535,7 @@ export const useStreamStore = create<StreamState>()(
reasoning_effort: get().getChatModeConfig().reasoning_effort, reasoning_effort: get().getChatModeConfig().reasoning_effort,
plan_mode: get().getChatModeConfig().plan_mode, plan_mode: get().getChatModeConfig().plan_mode,
subagent_enabled: get().getChatModeConfig().subagent_enabled, subagent_enabled: get().getChatModeConfig().subagent_enabled,
history,
} }
); );