fix: migrate glm-4-flash to glm-4-flash-250414 (model deprecated by Zhipu)
Some checks failed
CI / Lint & TypeCheck (push) Has been cancelled
CI / Unit Tests (push) Has been cancelled
CI / Build Frontend (push) Has been cancelled
CI / Rust Check (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
CI / E2E Tests (push) Has been cancelled

Zhipu AI has deprecated glm-4-flash, causing 404 errors on all chat requests.
Updated all references:
- config: glm-4-flash → glm-4-flash-250414, added glm-z1-flash
- frontend: defaultModel, conversationStore, ChatArea fallback, ModelsAPI
This commit is contained in:
iven
2026-04-09 18:42:47 +08:00
parent 5f47e62a46
commit 1965fa5269
6 changed files with 18 additions and 11 deletions

View File

@@ -25,12 +25,19 @@ max_output_tokens = 4096
supports_streaming = true
[[llm.providers.models]]
id = "glm-4-flash"
alias = "GLM-4-Flash"
id = "glm-4-flash-250414"
alias = "GLM-4-Flash (免费)"
context_window = 128000
max_output_tokens = 4096
supports_streaming = true
[[llm.providers.models]]
id = "glm-z1-flash"
alias = "GLM-Z1-Flash (免费推理)"
context_window = 128000
max_output_tokens = 16384
supports_streaming = true
[[llm.providers.models]]
id = "glm-4v-plus"
alias = "GLM-4V-Plus (视觉)"

View File

@@ -129,7 +129,7 @@ retry_delay = "1s"
[llm.aliases]
# 智谱 GLM 模型 (使用正确的 API 模型 ID)
"glm-4-flash" = "zhipu/glm-4-flash"
"glm-4-flash" = "zhipu/glm-4-flash-250414"
"glm-4-plus" = "zhipu/glm-4-plus"
"glm-4.5" = "zhipu/glm-4.5"
# 其他模型

View File

@@ -85,7 +85,7 @@ export function ChatArea({ compact, onOpenDetail }: { compact?: boolean; onOpenD
}
// Fallback: provide common models when no backend is connected
return [
{ id: 'glm-4-flash', name: 'GLM-4 Flash', provider: 'zhipu' },
{ id: 'glm-4-flash-250414', name: 'GLM-4 Flash (免费)', provider: 'zhipu' },
{ id: 'gpt-4o', name: 'GPT-4o', provider: 'openai' },
{ id: 'gpt-4o-mini', name: 'GPT-4o Mini', provider: 'openai' },
{ id: 'deepseek-chat', name: 'DeepSeek V3', provider: 'deepseek' },
@@ -575,7 +575,7 @@ export function ChatArea({ compact, onOpenDetail }: { compact?: boolean; onOpenD
);
}
function MessageBubble({ message, setInput }: { message: Message; setInput: (text: string) => void }) {
function MessageBubble({ message, setInput, onRetry }: { message: Message; setInput: (text: string) => void; onRetry?: () => void }) {
if (message.role === 'tool') {
return null;
}

View File

@@ -188,7 +188,7 @@ export function ModelsAPI() {
// 表单状态
const [formData, setFormData] = useState({
provider: 'zhipu',
modelId: 'glm-4-flash',
modelId: 'glm-4-flash-250414',
displayName: '',
apiKey: '',
apiProtocol: 'openai' as 'openai' | 'anthropic' | 'custom',
@@ -678,11 +678,11 @@ export function ModelsAPI() {
type="text"
value={formData.modelId}
onChange={(e) => setFormData({ ...formData, modelId: e.target.value })}
placeholder="如glm-4-flash, glm-4-plus, glm-4.5"
placeholder="如glm-4-flash-250414, glm-4-plus, glm-4.7"
className="w-full px-3 py-2 border border-gray-200 dark:border-gray-600 rounded-lg text-sm bg-white dark:bg-gray-700 text-gray-900 dark:text-white placeholder-gray-400 focus:outline-none focus:ring-2 focus:ring-orange-500"
/>
<p className="text-xs text-gray-400 mt-1">
智谱: glm-4-flash(), glm-4-plus, glm-4.5, glm-4.6
智谱: glm-4-flash-250414(), glm-4-plus, glm-4.7, glm-z1-flash()
</p>
</div>

View File

@@ -51,11 +51,11 @@ export const PROVIDER_DEFAULTS = {
},
zhipu: {
baseUrl: 'https://open.bigmodel.cn/api/paas/v4',
defaultModel: 'glm-4-flash',
defaultModel: 'glm-4-flash-250414',
},
zhipu_coding: {
baseUrl: 'https://open.bigmodel.cn/api/coding/paas/v4',
defaultModel: 'glm-4-flash',
defaultModel: 'glm-4-flash-250414',
},
kimi: {
baseUrl: 'https://api.moonshot.cn/v1',

View File

@@ -192,7 +192,7 @@ export const useConversationStore = create<ConversationState>()(
agents: [DEFAULT_AGENT],
currentAgent: DEFAULT_AGENT,
sessionKey: null,
currentModel: 'glm-4-flash',
currentModel: 'glm-4-flash-250414',
newConversation: (currentMessages: ChatMessage[]) => {
const state = get();