Files
zclaw_openfang/desktop/tests/config-parser.test.ts
iven 3ff08faa56 release(v0.2.0): streaming, MCP protocol, Browser Hand, security enhancements
## Major Features

### Streaming Response System
- Implement LlmDriver trait with `stream()` method returning async Stream
- Add SSE parsing for Anthropic and OpenAI API streaming
- Integrate Tauri event system for frontend streaming (`stream:chunk` events)
- Add StreamChunk types: Delta, ToolStart, ToolEnd, Complete, Error

### MCP Protocol Implementation
- Add MCP JSON-RPC 2.0 types (mcp_types.rs)
- Implement stdio-based MCP transport (mcp_transport.rs)
- Support tool discovery, execution, and resource operations

### Browser Hand Implementation
- Complete browser automation with Playwright-style actions
- Support Navigate, Click, Type, Scrape, Screenshot, Wait actions
- Add educational Hands: Whiteboard, Slideshow, Speech, Quiz

### Security Enhancements
- Implement command whitelist/blacklist for shell_exec tool
- Add SSRF protection with private IP blocking
- Create security.toml configuration file

## Test Improvements
- Fix test import paths (security-utils, setup)
- Fix vi.mock hoisting issues with vi.hoisted()
- Update test expectations for validateUrl and sanitizeFilename
- Add getUnsupportedLocalGatewayStatus mock

## Documentation Updates
- Update architecture documentation
- Improve configuration reference
- Add quick-start guide updates

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-24 03:24:24 +08:00

263 lines
6.3 KiB
TypeScript

/**
* Config Parser Tests
*
* Tests for configuration parsing and validation.
*/
import { describe, it, expect } from 'vitest';
import {
configParser,
ConfigParseError,
ConfigValidationFailedError,
} from '../src/lib/config-parser';
import type { OpenFangConfig } from '../src/types/config';
describe('configParser', () => {
const validToml = `
# Valid OpenFang configuration
[server]
host = "127.0.0.1"
port = 4200
websocket_port = 4200
websocket_path = "/ws"
[agent.defaults]
workspace = "~/.openfang/workspace"
default_model = "gpt-4"
[llm]
default_provider = "openai"
default_model = "gpt-4"
`;
describe('parseConfig', () => {
it('should parse valid TOML configuration', () => {
const config = configParser.parseConfig(validToml);
expect(config).toBeDefined();
expect(config.server).toEqual({
host: '127.0.0.1',
port: 4200,
websocket_port: 4200,
websocket_path: '/ws',
});
expect(config.agent).toBeDefined();
expect(config.agent.defaults).toEqual({
workspace: '~/.openfang/workspace',
default_model: 'gpt-4',
});
expect(config.llm).toEqual({
default_provider: 'openai',
default_model: 'gpt-4',
});
});
});
describe('validateConfig', () => {
it('should validate correct configuration', () => {
const config: OpenFangConfig = {
server: {
host: '127.0.0.1',
port: 4200,
},
agent: {
defaults: {
workspace: '~/.openfang/workspace',
default_model: 'gpt-4',
},
},
llm: {
default_provider: 'openai',
default_model: 'gpt-4',
},
};
const result = configParser.validateConfig(config);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
});
it('should detect missing required fields', () => {
const config = {
server: {
host: '127.0.0.1',
// missing port
},
};
const result = configParser.validateConfig(config);
expect(result.valid).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
});
it('should validate port range', () => {
const config = {
server: {
host: '127.0.0.1',
port: 99999, // invalid port
},
agent: {
defaults: {
workspace: '~/.openfang/workspace',
default_model: 'gpt-4',
},
},
llm: {
default_provider: 'openai',
default_model: 'gpt-4',
},
};
const result = configParser.validateConfig(config);
expect(result.valid).toBe(false);
const portError = result.errors.find(e => e.path === 'server.port');
expect(portError).toBeDefined();
});
it('should detect empty required fields', () => {
const config = {
server: {
host: '',
port: 4200,
},
agent: {
defaults: {
workspace: '~/.openfang/workspace',
default_model: '',
},
},
llm: {
default_provider: '',
default_model: 'gpt-4',
},
};
const result = configParser.validateConfig(config);
expect(result.valid).toBe(false);
});
});
describe('parseAndValidate', () => {
it('should parse and validate valid configuration', () => {
const config = configParser.parseAndValidate(validToml);
expect(config).toBeDefined();
expect(config.server.host).toBe('127.0.0.1');
});
it('should throw on invalid configuration', () => {
const invalidToml = `
[server]
host = "127.0.0.1"
# missing port
`;
expect(() => configParser.parseAndValidate(invalidToml)).toThrow(ConfigValidationFailedError);
});
});
describe('stringifyConfig', () => {
it('should stringify configuration to TOML', () => {
const config: OpenFangConfig = {
server: {
host: '127.0.0.1',
port: 4200,
},
agent: {
defaults: {
workspace: '~/.openfang/workspace',
default_model: 'gpt-4',
},
},
llm: {
default_provider: 'openai',
default_model: 'gpt-4',
},
};
const result = configParser.stringifyConfig(config);
expect(result).toContain('host = "127.0.0.1"');
expect(result).toContain('port = 4200');
});
});
describe('extractMetadata', () => {
it('should extract metadata from TOML content', () => {
const content = `
[server]
host = "127.0.0.1"
port = 4200
[llm.providers]
api_key = "\${API_KEY}"
`;
const metadata = configParser.extractMetadata(content, '/path/to/config.toml');
expect(metadata.path).toBe('/path/to/config.toml');
expect(metadata.name).toBe('config.toml');
expect(metadata.envVars).toContain('API_KEY');
expect(metadata.hasUnresolvedEnvVars).toBe(true);
});
it('should detect no env vars when none present', () => {
const content = `
[server]
host = "127.0.0.1"
port = 4200
`;
const metadata = configParser.extractMetadata(content, '/path/to/config.toml');
expect(metadata.envVars).toEqual([]);
expect(metadata.hasUnresolvedEnvVars).toBe(false);
});
});
describe('mergeWithDefaults', () => {
it('should merge partial config with defaults', () => {
const partial = {
server: {
port: 3000,
},
};
const result = configParser.mergeWithDefaults(partial);
expect(result.server?.port).toBe(3000);
expect(result.server?.host).toBe('127.0.0.1'); // from defaults
});
});
describe('isOpenFangConfig', () => {
it('should return true for valid config', () => {
const config: OpenFangConfig = {
server: {
host: '127.0.0.1',
port: 4200,
},
agent: {
defaults: {
workspace: '~/.openfang/workspace',
default_model: 'gpt-4',
},
},
llm: {
default_provider: 'openai',
default_model: 'gpt-4',
},
};
expect(configParser.isOpenFangConfig(config)).toBe(true);
});
it('should return false for invalid config', () => {
expect(configParser.isOpenFangConfig(null)).toBe(false);
expect(configParser.isOpenFangConfig({})).toBe(false);
});
});
});