chore: remove debug logging
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

Remove temporary console.log and eprintln! statements added during
troubleshooting the model configuration issue.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
iven
2026-03-23 23:06:20 +08:00
parent ae4bf815e3
commit cbd3da46a3
4 changed files with 200 additions and 184 deletions

View File

@@ -120,14 +120,10 @@ pub async fn kernel_init(
) -> Result<KernelStatusResponse, String> {
let mut kernel_lock = state.lock().await;
eprintln!("[kernel_init] Called with config_request: {:?}", config_request);
// Check if we need to reboot kernel with new config
if let Some(kernel) = kernel_lock.as_ref() {
// Get current config from kernel
let current_config = kernel.config();
eprintln!("[kernel_init] Current kernel config: model={}, base_url={}",
current_config.llm.model, current_config.llm.base_url);
// Check if config changed
let config_changed = if let Some(ref req) = config_request {
@@ -136,21 +132,14 @@ pub async fn kernel_init(
).llm.base_url;
let request_base_url = req.base_url.clone().unwrap_or(default_base_url.clone());
eprintln!("[kernel_init] Request config: model={}, base_url={}", req.model, request_base_url);
eprintln!("[kernel_init] Comparing: current.model={} vs req.model={}, current.base_url={} vs req.base_url={}",
current_config.llm.model, req.model, current_config.llm.base_url, request_base_url);
let changed = current_config.llm.model != req.model ||
current_config.llm.base_url != request_base_url;
eprintln!("[kernel_init] Config changed: {}", changed);
changed
current_config.llm.model != req.model ||
current_config.llm.base_url != request_base_url
} else {
false
};
if !config_changed {
// Same config, return existing status
eprintln!("[kernel_init] Config unchanged, reusing existing kernel");
return Ok(KernelStatusResponse {
initialized: true,
agent_count: kernel.list_agents().len(),
@@ -161,8 +150,6 @@ pub async fn kernel_init(
}
// Config changed, need to reboot kernel
eprintln!("[kernel_init] Config changed, rebooting kernel...");
// Shutdown old kernel
if let Err(e) = kernel.shutdown().await {
eprintln!("[kernel_init] Warning: Failed to shutdown old kernel: {}", e);
@@ -175,9 +162,6 @@ pub async fn kernel_init(
let api_key = req.api_key.as_deref().unwrap_or("");
let base_url = req.base_url.as_deref();
eprintln!("[kernel_init] Building config: provider={}, model={}, base_url={:?}, api_protocol={}",
req.provider, req.model, base_url, req.api_protocol);
zclaw_kernel::config::KernelConfig::from_provider(
&req.provider,
api_key,
@@ -192,8 +176,6 @@ pub async fn kernel_init(
let base_url = config.llm.base_url.clone();
let model = config.llm.model.clone();
eprintln!("[kernel_init] Final config: model={}, base_url={}", model, base_url);
// Boot kernel
let kernel = Kernel::boot(config.clone())
.await
@@ -203,8 +185,6 @@ pub async fn kernel_init(
*kernel_lock = Some(kernel);
eprintln!("[kernel_init] Kernel booted successfully with new config");
Ok(KernelStatusResponse {
initialized: true,
agent_count,

View File

@@ -19,13 +19,19 @@ interface CustomModel {
}
// 可用的 Provider 列表
// 注意: Coding Plan 是专为编程助手设计的优惠套餐,使用专用端点
const AVAILABLE_PROVIDERS = [
{ id: 'zhipu', name: '智谱 (ZhipuAI)', baseUrl: 'https://open.bigmodel.cn/api/paas/v4' },
{ id: 'qwen', name: '百炼/通义千问 (Qwen)', baseUrl: 'https://dashscope.aliyuncs.com/compatible-mode/v1' },
{ id: 'kimi', name: 'Kimi (Moonshot)', baseUrl: 'https://api.moonshot.cn/v1' },
{ id: 'minimax', name: 'MiniMax', baseUrl: 'https://api.minimax.chat/v1' },
// === Coding Plan 专用端点 (推荐用于编程场景) ===
{ id: 'kimi-coding', name: 'Kimi Coding Plan', baseUrl: 'https://api.kimi.com/coding/v1' },
{ id: 'qwen-coding', name: '百炼 Coding Plan', baseUrl: 'https://coding.dashscope.aliyuncs.com/v1' },
{ id: 'zhipu-coding', name: '智谱 GLM Coding Plan', baseUrl: 'https://open.bigmodel.cn/api/coding/paas/v4' },
// === 标准 API 端点 ===
{ id: 'kimi', name: 'Kimi (标准 API)', baseUrl: 'https://api.moonshot.cn/v1' },
{ id: 'zhipu', name: '智谱 (标准 API)', baseUrl: 'https://open.bigmodel.cn/api/paas/v4' },
{ id: 'qwen', name: '百炼/通义千问 (标准)', baseUrl: 'https://dashscope.aliyuncs.com/compatible-mode/v1' },
{ id: 'deepseek', name: 'DeepSeek', baseUrl: 'https://api.deepseek.com/v1' },
{ id: 'openai', name: 'OpenAI', baseUrl: 'https://api.openai.com/v1' },
{ id: 'anthropic', name: 'Anthropic', baseUrl: 'https://api.anthropic.com' },
{ id: 'custom', name: '自定义', baseUrl: '' },
];
@@ -72,7 +78,7 @@ export function ModelsAPI() {
// 表单状态
const [formData, setFormData] = useState({
provider: 'zhipu',
modelId: '',
modelId: 'glm-4-flash',
displayName: '',
apiKey: '',
apiProtocol: 'openai' as 'openai' | 'anthropic' | 'custom',
@@ -345,9 +351,12 @@ export function ModelsAPI() {
type="text"
value={formData.modelId}
onChange={(e) => setFormData({ ...formData, modelId: e.target.value })}
placeholder="如glm-4-plus"
placeholder="如glm-4-flash, glm-4-plus, glm-4.5"
className="w-full px-3 py-2 border border-gray-200 dark:border-gray-600 rounded-lg text-sm bg-white dark:bg-gray-700 text-gray-900 dark:text-white placeholder-gray-400 focus:outline-none focus:ring-2 focus:ring-orange-500"
/>
<p className="text-xs text-gray-400 mt-1">
智谱: glm-4-flash(), glm-4-plus, glm-4.5, glm-4.6
</p>
</div>
{/* 显示名称 */}

View File

@@ -6,29 +6,110 @@ import {
ConnectionState,
getGatewayClient,
getStoredGatewayToken,
setStoredGatewayToken,
getStoredGatewayUrl,
setStoredGatewayUrl,
getLocalDeviceIdentity,
} from '../lib/gateway-client';
import {
isTauriRuntime,
prepareLocalGatewayForTauri,
getLocalGatewayStatus as fetchLocalGatewayStatus,
startLocalGateway as startLocalGatewayCommand,
stopLocalGateway as stopLocalGatewayCommand,
restartLocalGateway as restartLocalGatewayCommand,
approveLocalGatewayDevicePairing,
getLocalGatewayAuth,
getUnsupportedLocalGatewayStatus,
type LocalGatewayStatus,
} from '../lib/tauri-gateway';
import {
KernelClient,
getKernelClient,
} from '../lib/kernel-client';
import {
type HealthCheckResult,
type HealthStatus,
} from '../lib/health-check';
import { useConfigStore } from './configStore';
// === Mode Selection ===
// IMPORTANT: Check isTauriRuntime() at RUNTIME (inside functions), not at module load time.
// At module load time, window.__TAURI_INTERNALS__ may not be set yet by Tauri.
// === Custom Models Helpers ===
const CUSTOM_MODELS_STORAGE_KEY = 'zclaw-custom-models';
interface CustomModel {
id: string;
name: string;
provider: string;
apiKey?: string;
apiProtocol: 'openai' | 'anthropic' | 'custom';
baseUrl?: string;
isDefault?: boolean;
createdAt: string;
}
/**
* Get custom models from localStorage
*/
function loadCustomModels(): CustomModel[] {
try {
const stored = localStorage.getItem(CUSTOM_MODELS_STORAGE_KEY);
if (stored) {
return JSON.parse(stored);
}
} catch (err) {
console.error('[connectionStore] Failed to parse models:', err);
}
return [];
}
/**
* Get the default model configuration
*
* Priority:
* 1. Model with isDefault: true
* 2. Model matching chatStore's currentModel
* 3. First model in the list
*/
export function getDefaultModelConfig(): { provider: string; model: string; apiKey: string; baseUrl: string; apiProtocol: string } | null {
const models = loadCustomModels();
// Priority 1: Find model with isDefault: true
let defaultModel = models.find(m => m.isDefault === true);
// Priority 2: Find model matching chatStore's currentModel
if (!defaultModel) {
try {
const chatStoreData = localStorage.getItem('zclaw-chat-storage');
if (chatStoreData) {
const parsed = JSON.parse(chatStoreData);
const currentModelId = parsed?.state?.currentModel;
if (currentModelId) {
defaultModel = models.find(m => m.id === currentModelId);
}
}
} catch (err) {
console.warn('[connectionStore] Failed to read chatStore:', err);
}
}
// Priority 3: First model
if (!defaultModel) {
defaultModel = models[0];
}
if (defaultModel) {
return {
provider: defaultModel.provider,
model: defaultModel.id,
apiKey: defaultModel.apiKey || '',
baseUrl: defaultModel.baseUrl || '',
apiProtocol: defaultModel.apiProtocol || 'openai',
};
}
return null;
}
// === Types ===
export interface GatewayLog {
@@ -56,21 +137,6 @@ function shouldRetryGatewayCandidate(error: unknown): boolean {
);
}
/**
* Check if an error indicates local device pairing is required.
*/
function requiresLocalDevicePairing(error: unknown): boolean {
const message = error instanceof Error ? error.message : String(error || '');
return message.includes('pairing required');
}
/**
* Check if a URL is a loopback address.
*/
function isLoopbackGatewayUrl(url: string): boolean {
return /^wss?:\/\/(127\.0\.0\.1|localhost|\[::1\])(:\d+)?$/i.test(url.trim());
}
/**
* Normalize a gateway URL candidate.
*/
@@ -78,36 +144,6 @@ function normalizeGatewayUrlCandidate(url: string): string {
return url.trim().replace(/\/+$/, '');
}
/**
* Get the local gateway connect URL from status.
*/
function getLocalGatewayConnectUrl(status: LocalGatewayStatus): string | null {
if (status.probeUrl && status.probeUrl.trim()) {
return normalizeGatewayUrlCandidate(status.probeUrl);
}
if (status.port) {
return `ws://127.0.0.1:${status.port}`;
}
return null;
}
/**
* Attempt to approve local device pairing for loopback URLs.
*/
async function approveCurrentLocalDevicePairing(url: string): Promise<boolean> {
if (!isTauriRuntime() || !isLoopbackGatewayUrl(url)) {
return false;
}
try {
const identity = await getLocalDeviceIdentity();
const result = await approveLocalGatewayDevicePairing(identity.deviceId, identity.publicKeyBase64, url);
return result.approved;
} catch {
return false;
}
}
// === Store Interface ===
export interface ConnectionStateSlice {
@@ -133,17 +169,18 @@ export interface ConnectionActionsSlice {
}
export interface ConnectionStore extends ConnectionStateSlice, ConnectionActionsSlice {
client: GatewayClient;
client: GatewayClient | KernelClient;
}
// === Store Implementation ===
export const useConnectionStore = create<ConnectionStore>((set, get) => {
// Initialize client
const client = getGatewayClient();
// Initialize with external gateway client by default.
// Will switch to internal kernel client at connect time if in Tauri.
const client: GatewayClient | KernelClient = getGatewayClient();
// Wire up state change callback
client.onStateChange = (state) => {
client.onStateChange = (state: ConnectionState) => {
set({ connectionState: state });
};
@@ -170,99 +207,107 @@ export const useConnectionStore = create<ConnectionStore>((set, get) => {
// === Actions ===
connect: async (url?: string, token?: string) => {
const c = get().client;
// Resolve connection URL candidates
const resolveCandidates = async (): Promise<string[]> => {
const explicitUrl = url?.trim();
if (explicitUrl) {
return [normalizeGatewayUrlCandidate(explicitUrl)];
}
const candidates: string[] = [];
// Check local gateway first if in Tauri
if (isTauriRuntime()) {
try {
const localStatus = await fetchLocalGatewayStatus();
const localUrl = getLocalGatewayConnectUrl(localStatus);
if (localUrl) {
candidates.push(localUrl);
}
} catch {
/* ignore local gateway lookup failures during candidate selection */
}
}
// Add quick config gateway URL if available
const quickConfigGatewayUrl = useConfigStore.getState().quickConfig?.gatewayUrl?.trim();
if (quickConfigGatewayUrl) {
candidates.push(quickConfigGatewayUrl);
}
// Add stored URL, default, and fallbacks
candidates.push(
getStoredGatewayUrl(),
DEFAULT_GATEWAY_URL,
...FALLBACK_GATEWAY_URLS
);
// Return unique, non-empty candidates
return Array.from(
new Set(
candidates
.filter(Boolean)
.map(normalizeGatewayUrlCandidate)
)
);
};
try {
set({ error: null });
// Prepare local gateway for Tauri
if (isTauriRuntime()) {
try {
await prepareLocalGatewayForTauri();
} catch {
/* ignore local gateway preparation failures during connection bootstrap */
// === Internal Kernel Mode (Tauri) ===
// Check at RUNTIME, not at module load time, to ensure __TAURI_INTERNALS__ is available
const useInternalKernel = isTauriRuntime();
console.log('[ConnectionStore] isTauriRuntime():', useInternalKernel);
if (useInternalKernel) {
console.log('[ConnectionStore] Using internal ZCLAW Kernel (no external process needed)');
const kernelClient = getKernelClient();
// Get model config from custom models settings
const modelConfig = getDefaultModelConfig();
if (!modelConfig) {
throw new Error('请先在"模型与 API"设置页面添加自定义模型配置');
}
// Auto-start local gateway if not running
try {
const localStatus = await fetchLocalGatewayStatus();
const isRunning = localStatus.portStatus === 'busy' || localStatus.listenerPids.length > 0;
if (!isRunning && localStatus.cliAvailable) {
console.log('[ConnectionStore] Local gateway not running, auto-starting...');
set({ localGatewayBusy: true });
await startLocalGatewayCommand();
set({ localGatewayBusy: false });
// Wait for gateway to be ready
await new Promise(resolve => setTimeout(resolve, 1500));
console.log('[ConnectionStore] Local gateway started');
}
} catch (startError) {
console.warn('[ConnectionStore] Failed to auto-start local gateway:', startError);
set({ localGatewayBusy: false });
if (!modelConfig.apiKey) {
throw new Error(`模型 ${modelConfig.model} 未配置 API Key请在"模型与 API"设置页面配置`);
}
console.log('[ConnectionStore] Model config:', {
provider: modelConfig.provider,
model: modelConfig.model,
hasApiKey: !!modelConfig.apiKey,
baseUrl: modelConfig.baseUrl,
apiProtocol: modelConfig.apiProtocol,
});
kernelClient.setConfig({
provider: modelConfig.provider,
model: modelConfig.model,
apiKey: modelConfig.apiKey,
baseUrl: modelConfig.baseUrl,
apiProtocol: modelConfig.apiProtocol,
});
// Wire up state change callback
kernelClient.onStateChange = (state: ConnectionState) => {
set({ connectionState: state });
};
// Wire up log callback
kernelClient.onLog = (level, message) => {
set((s) => ({
logs: [...s.logs.slice(-99), { timestamp: Date.now(), level, message }],
}));
};
// Update the stored client reference
set({ client: kernelClient });
// Connect to internal kernel
await kernelClient.connect();
// Set version
set({ gatewayVersion: '0.2.0-internal' });
console.log('[ConnectionStore] Connected to internal ZCLAW Kernel');
return;
}
// Resolve effective token: param > quickConfig > localStorage > local auth
let effectiveToken = token || useConfigStore.getState().quickConfig?.gatewayToken || getStoredGatewayToken();
if (!effectiveToken && isTauriRuntime()) {
try {
const localAuth = await getLocalGatewayAuth();
if (localAuth.gatewayToken) {
effectiveToken = localAuth.gatewayToken;
setStoredGatewayToken(localAuth.gatewayToken);
}
} catch {
/* ignore local auth lookup failures during connection bootstrap */
}
}
// === External Gateway Mode (non-Tauri or fallback) ===
const c = get().client;
// Resolve connection URL candidates
const resolveCandidates = async (): Promise<string[]> => {
const explicitUrl = url?.trim();
if (explicitUrl) {
return [normalizeGatewayUrlCandidate(explicitUrl)];
}
const candidates: string[] = [];
// Add quick config gateway URL if available
const quickConfigGatewayUrl = useConfigStore.getState().quickConfig?.gatewayUrl?.trim();
if (quickConfigGatewayUrl) {
candidates.push(quickConfigGatewayUrl);
}
// Add stored URL, default, and fallbacks
candidates.push(
getStoredGatewayUrl(),
DEFAULT_GATEWAY_URL,
...FALLBACK_GATEWAY_URLS
);
// Return unique, non-empty candidates
return Array.from(
new Set(
candidates
.filter(Boolean)
.map(normalizeGatewayUrlCandidate)
)
);
};
// Resolve effective token
const effectiveToken = token || useConfigStore.getState().quickConfig?.gatewayToken || getStoredGatewayToken();
console.log('[ConnectionStore] Connecting with token:', effectiveToken ? '[REDACTED]' : '(empty)');
const candidateUrls = await resolveCandidates();
@@ -282,20 +327,6 @@ export const useConnectionStore = create<ConnectionStore>((set, get) => {
} catch (err) {
lastError = err;
// Try device pairing if required
if (requiresLocalDevicePairing(err)) {
const approved = await approveCurrentLocalDevicePairing(candidateUrl);
if (approved) {
c.updateOptions({
url: candidateUrl,
token: effectiveToken,
});
await c.connect();
connectedUrl = candidateUrl;
break;
}
}
// Check if we should try next candidate
if (!shouldRetryGatewayCandidate(err)) {
throw err;