release(v0.2.0): streaming, MCP protocol, Browser Hand, security enhancements
## Major Features ### Streaming Response System - Implement LlmDriver trait with `stream()` method returning async Stream - Add SSE parsing for Anthropic and OpenAI API streaming - Integrate Tauri event system for frontend streaming (`stream:chunk` events) - Add StreamChunk types: Delta, ToolStart, ToolEnd, Complete, Error ### MCP Protocol Implementation - Add MCP JSON-RPC 2.0 types (mcp_types.rs) - Implement stdio-based MCP transport (mcp_transport.rs) - Support tool discovery, execution, and resource operations ### Browser Hand Implementation - Complete browser automation with Playwright-style actions - Support Navigate, Click, Type, Scrape, Screenshot, Wait actions - Add educational Hands: Whiteboard, Slideshow, Speech, Quiz ### Security Enhancements - Implement command whitelist/blacklist for shell_exec tool - Add SSRF protection with private IP blocking - Create security.toml configuration file ## Test Improvements - Fix test import paths (security-utils, setup) - Fix vi.mock hoisting issues with vi.hoisted() - Update test expectations for validateUrl and sanitizeFilename - Add getUnsupportedLocalGatewayStatus mock ## Documentation Updates - Update architecture documentation - Improve configuration reference - Add quick-start guide updates Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
import { create } from 'zustand';
|
||||
import { persist } from 'zustand/middleware';
|
||||
import { getGatewayClient, AgentStreamDelta } from '../lib/gateway-client';
|
||||
import type { AgentStreamDelta } from '../lib/gateway-client';
|
||||
import { getClient } from './connectionStore';
|
||||
import { intelligenceClient } from '../lib/intelligence-client';
|
||||
import { getMemoryExtractor } from '../lib/memory-extractor';
|
||||
import { getAgentSwarm } from '../lib/agent-swarm';
|
||||
@@ -190,7 +191,7 @@ export const useChatStore = create<ChatState>()(
|
||||
currentAgent: DEFAULT_AGENT,
|
||||
isStreaming: false,
|
||||
isLoading: false,
|
||||
currentModel: 'glm-5',
|
||||
currentModel: 'glm-4-flash',
|
||||
sessionKey: null,
|
||||
|
||||
addMessage: (message) =>
|
||||
@@ -399,7 +400,8 @@ export const useChatStore = create<ChatState>()(
|
||||
set({ isStreaming: true });
|
||||
|
||||
try {
|
||||
const client = getGatewayClient();
|
||||
// Use the connected client from connectionStore (supports both GatewayClient and KernelClient)
|
||||
const client = getClient();
|
||||
|
||||
// Check connection state first
|
||||
const connectionState = useConnectionStore.getState().connectionState;
|
||||
@@ -409,11 +411,23 @@ export const useChatStore = create<ChatState>()(
|
||||
throw new Error(`Not connected (state: ${connectionState})`);
|
||||
}
|
||||
|
||||
// Declare runId before chatStream so callbacks can access it
|
||||
let runId = `run_${Date.now()}`;
|
||||
|
||||
// Try streaming first (OpenFang WebSocket)
|
||||
const { runId } = await client.chatStream(
|
||||
const result = await client.chatStream(
|
||||
enhancedContent,
|
||||
{
|
||||
onDelta: () => { /* Handled by initStreamListener to prevent duplication */ },
|
||||
onDelta: (delta: string) => {
|
||||
// Update message content directly (works for both KernelClient and GatewayClient)
|
||||
set((s) => ({
|
||||
messages: s.messages.map((m) =>
|
||||
m.id === assistantId
|
||||
? { ...m, content: m.content + delta }
|
||||
: m
|
||||
),
|
||||
}));
|
||||
},
|
||||
onTool: (tool: string, input: string, output: string) => {
|
||||
const toolMsg: Message = {
|
||||
id: `tool_${Date.now()}_${Math.random().toString(36).slice(2, 6)}`,
|
||||
@@ -494,6 +508,11 @@ export const useChatStore = create<ChatState>()(
|
||||
}
|
||||
);
|
||||
|
||||
// Update runId from the result if available
|
||||
if (result?.runId) {
|
||||
runId = result.runId;
|
||||
}
|
||||
|
||||
if (!sessionKey) {
|
||||
set({ sessionKey: effectiveSessionKey });
|
||||
}
|
||||
@@ -530,9 +549,9 @@ export const useChatStore = create<ChatState>()(
|
||||
communicationStyle: style || 'parallel',
|
||||
});
|
||||
|
||||
// Set up executor that uses gateway client
|
||||
// Set up executor that uses the connected client
|
||||
swarm.setExecutor(async (agentId: string, prompt: string, context?: string) => {
|
||||
const client = getGatewayClient();
|
||||
const client = getClient();
|
||||
const fullPrompt = context ? `${context}\n\n${prompt}` : prompt;
|
||||
const result = await client.chat(fullPrompt, { agentId: agentId.startsWith('clone_') ? undefined : agentId });
|
||||
return result?.response || '(无响应)';
|
||||
@@ -566,7 +585,13 @@ export const useChatStore = create<ChatState>()(
|
||||
},
|
||||
|
||||
initStreamListener: () => {
|
||||
const client = getGatewayClient();
|
||||
const client = getClient();
|
||||
|
||||
// Check if client supports onAgentStream (GatewayClient does, KernelClient doesn't)
|
||||
if (!('onAgentStream' in client)) {
|
||||
// KernelClient handles streaming via chatStream callbacks, no separate listener needed
|
||||
return () => {};
|
||||
}
|
||||
|
||||
const unsubscribe = client.onAgentStream((delta: AgentStreamDelta) => {
|
||||
const state = get();
|
||||
|
||||
@@ -25,6 +25,7 @@ import { useSecurityStore } from './securityStore';
|
||||
import { useSessionStore } from './sessionStore';
|
||||
import { useChatStore } from './chatStore';
|
||||
import type { GatewayClient, ConnectionState } from '../lib/gateway-client';
|
||||
import type { KernelClient } from '../lib/kernel-client';
|
||||
import type { GatewayModelChoice } from '../lib/gateway-config';
|
||||
import type { LocalGatewayStatus } from '../lib/tauri-gateway';
|
||||
import type { Hand, HandRun, Trigger, Approval, ApprovalStatus } from './handStore';
|
||||
@@ -233,7 +234,7 @@ interface GatewayFacade {
|
||||
localGateway: LocalGatewayStatus;
|
||||
localGatewayBusy: boolean;
|
||||
isLoading: boolean;
|
||||
client: GatewayClient;
|
||||
client: GatewayClient | KernelClient;
|
||||
|
||||
// Data
|
||||
clones: Clone[];
|
||||
|
||||
@@ -207,9 +207,9 @@ export const useOfflineStore = create<OfflineStore>()(
|
||||
get().updateMessageStatus(msg.id, 'sending');
|
||||
|
||||
try {
|
||||
// Import gateway client dynamically to avoid circular dependency
|
||||
const { getGatewayClient } = await import('../lib/gateway-client');
|
||||
const client = getGatewayClient();
|
||||
// Use connected client from connectionStore (supports both GatewayClient and KernelClient)
|
||||
const { getClient } = await import('./connectionStore');
|
||||
const client = getClient();
|
||||
|
||||
await client.chat(msg.content, {
|
||||
sessionKey: msg.sessionKey,
|
||||
|
||||
Reference in New Issue
Block a user