From b0c3abcc19f1be8b65f564a6bd8361ff138dc6f3 Mon Sep 17 00:00:00 2001 From: LovelyGuYiMeng <76251800+LovelyGuYiMeng@users.noreply.github.com> Date: Fri, 27 Sep 2024 21:12:51 +0800 Subject: [PATCH] =?UTF-8?q?=F0=9F=92=84=20style:=20add=20zhipu=20glm-4-fla?= =?UTF-8?q?shx=20model=20(#4173)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update zhipu.ts * Update siliconcloud.ts * Update siliconcloud.ts * Update zhipu.ts * Update siliconcloud.ts --- src/config/modelProviders/siliconcloud.ts | 42 +++++++++++------------ src/config/modelProviders/zhipu.ts | 15 +++++++- 2 files changed, 35 insertions(+), 22 deletions(-) diff --git a/src/config/modelProviders/siliconcloud.ts b/src/config/modelProviders/siliconcloud.ts index 3e8709c64243..ade298e1b4ad 100644 --- a/src/config/modelProviders/siliconcloud.ts +++ b/src/config/modelProviders/siliconcloud.ts @@ -86,26 +86,26 @@ const SiliconCloud: ModelProviderCard = { tokens: 32_768, }, { - description: 'Yi-1.5 9B 支持16K Tokens, 提供高效、流畅的语言生成能力。', - displayName: 'Yi-1.5 9B', - id: '01-ai/Yi-1.5-9B-Chat-16K', + description: 'InternLM2.5 提供多场景下的智能对话解决方案。', + displayName: 'Internlm 2.5 7B', + id: 'internlm/internlm2_5-7b-chat', pricing: { currency: 'CNY', input: 0, output: 0, }, - tokens: 16_384, + tokens: 32_768, }, { - description: 'Yi-1.5 34B, 以丰富的训练样本在行业应用中提供优越表现。', - displayName: 'Yi-1.5 34B', - id: '01-ai/Yi-1.5-34B-Chat-16K', + description: '创新的开源模型InternLM2.5,通过大规模的参数提高了对话智能。', + displayName: 'Internlm 2.5 20B', + id: 'internlm/internlm2_5-20b-chat', pricing: { currency: 'CNY', - input: 1.26, - output: 1.26, + input: 1, + output: 1, }, - tokens: 16_384, + tokens: 32_768, }, { description: 'GLM-4 9B 开放源码版本,为会话应用提供优化后的对话体验。', @@ -119,26 +119,26 @@ const SiliconCloud: ModelProviderCard = { tokens: 32_768, }, { - description: 'InternLM2.5 提供多场景下的智能对话解决方案。', - displayName: 'Internlm 2.5 7B', - id: 'internlm/internlm2_5-7b-chat', + description: 'Yi-1.5 9B 支持16K Tokens, 提供高效、流畅的语言生成能力。', + displayName: 'Yi-1.5 9B', + id: '01-ai/Yi-1.5-9B-Chat-16K', pricing: { currency: 'CNY', input: 0, output: 0, }, - tokens: 32_768, + tokens: 16_384, }, { - description: '创新的开源模型InternLM2.5,通过大规模的参数提高了对话智能。', - displayName: 'Internlm 2.5 20B', - id: 'internlm/internlm2_5-20b-chat', + description: 'Yi-1.5 34B, 以丰富的训练样本在行业应用中提供优越表现。', + displayName: 'Yi-1.5 34B', + id: '01-ai/Yi-1.5-34B-Chat-16K', pricing: { currency: 'CNY', - input: 1, - output: 1, + input: 1.26, + output: 1.26, }, - tokens: 32_768, + tokens: 16_384, }, { description: 'Gemma 2 是Google轻量化的开源文本模型系列。', @@ -201,7 +201,7 @@ const SiliconCloud: ModelProviderCard = { tokens: 32_768, }, ], - checkModel: 'Qwen/Qwen2-1.5B-Instruct', + checkModel: 'Qwen/Qwen2.5-7B-Instruct', description: 'SiliconCloud,基于优秀开源基础模型的高性价比 GenAI 云服务', id: 'siliconcloud', modelList: { showModelFetcher: true }, diff --git a/src/config/modelProviders/zhipu.ts b/src/config/modelProviders/zhipu.ts index 1abe5539d700..887e10d9d8ed 100644 --- a/src/config/modelProviders/zhipu.ts +++ b/src/config/modelProviders/zhipu.ts @@ -97,7 +97,20 @@ const ZhiPu: ModelProviderCard = { tokens: 1_024_000, }, { - description: 'GLM-4-Flash 是处理简单任务的理想选择,速度最快且价格最优惠。', + description: 'GLM-4-FlashX 是Flash的增强版本,超快推理速度。', + displayName: 'GLM-4-FlashX', + enabled: true, + functionCall: true, + id: 'glm-4-flashx', + pricing: { + currency: 'CNY', + input: 0.1, + output: 0.1, + }, + tokens: 128_000, + }, + { + description: 'GLM-4-Flash 是处理简单任务的理想选择,速度最快且免费。', displayName: 'GLM-4-Flash', enabled: true, functionCall: true,