Files
config-center/compute/providers/internal-testing.json
yi-ge 5276339aa5 feat(internal-testing): add MiniMax-M2.7-highspeed as provider default model
- 在内测专用 provider 新增模型 MiniMax-M2.7-highspeed (高速版)
- 插入到 models 数组最前面, 基于前端 fallback 逻辑 (app/utils/compute-helpers.ts:
  fallback.models.find(chat) ?? fallback.models[0]) 成为该 provider 的默认选项
- bump presetDataVersion 18 -> 19
2026-04-19 23:32:31 +08:00

333 lines
7.7 KiB
JSON
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
{
"id": "provider-internal-testing-001",
"provider": "internal-testing",
"label": "内测专用",
"baseUrl": "https://api.yizhioa.cn/v1",
"apiFormat": "anthropic-messages",
"apiKeyRef": "internal-testing",
"apiKeyVerified": false,
"enabled": false,
"status": "unconfigured",
"priceCurrency": "CNY",
"services": [
"chat"
],
"models": [
{
"modelName": "MiniMax-M2.7-highspeed",
"displayName": "MiniMax-M2.7 高速版",
"serviceType": [
"chat"
],
"description": "MiniMax M2.7 高速版低延迟吞吐优化200K 上下文",
"contextWindow": 200000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "glm-5.1",
"displayName": "GLM-5.1",
"serviceType": [
"chat"
],
"description": "智谱 GLM-5.1 新一代旗舰模型编程与推理能力断档领先204K 上下文",
"contextWindow": 204800,
"maxOutputTokens": 131072,
"capabilities": [
"chat",
"reasoning",
"code",
"deep_thinking",
"tool_use",
"agent"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 0.95,
"extra": {}
},
{
"modelName": "glm-5",
"displayName": "GLM-5",
"serviceType": [
"chat"
],
"description": "智谱 GLM-5 大语言模型",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "glm-5-turbo",
"displayName": "GLM-5-Turbo",
"serviceType": [
"chat"
],
"description": "智谱 GLM-5-Turbo 大语言模型,更快的推理速度",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "glm-4.7",
"displayName": "GLM-4.7",
"serviceType": [
"chat"
],
"description": "智谱 GLM-4.7 大语言模型",
"contextWindow": 200000,
"maxOutputTokens": 128000,
"capabilities": [
"chat",
"reasoning",
"code"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "MiniMax-M2.7",
"displayName": "MiniMax-M2.7",
"serviceType": [
"chat"
],
"description": "MiniMax M2.7 大语言模型增强编码与推理能力200K 上下文",
"contextWindow": 200000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "MiniMax-M2.5",
"displayName": "MiniMax-M2.5",
"serviceType": [
"chat"
],
"description": "MiniMax M2.5 大语言模型",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use",
"long_context"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "kimi-k2.6-code-preview",
"displayName": "Kimi K2.6 Code Preview",
"serviceType": [
"chat"
],
"description": "月之暗面 Kimi K2.6 Code Preview万亿参数 MoE 架构32B 激活256K 上下文,代码与智能体能力增强",
"contextWindow": 256000,
"maxOutputTokens": 16384,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use",
"agent",
"vision",
"long_context"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 0.7,
"extra": {}
},
{
"modelName": "kimi-k2.5",
"displayName": "Kimi-2.5",
"serviceType": [
"chat"
],
"description": "月之暗面 Kimi-2.5 大语言模型",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use",
"agent",
"vision",
"long_context"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "qwen3.6-plus",
"displayName": "Qwen3.6 Plus",
"serviceType": [
"chat"
],
"description": "阿里通义千问 Qwen3.6 Plus原生 1M 上下文Hybrid MoE 架构,内置 CoT 推理",
"contextWindow": 1000000,
"maxOutputTokens": 65536,
"capabilities": [
"chat",
"reasoning",
"code",
"vision",
"tool_use",
"agent",
"deep_thinking"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 0.7,
"extra": {}
},
{
"modelName": "qwen3.5-plus",
"displayName": "Qwen3.5 Plus",
"serviceType": [
"chat"
],
"description": "阿里通义千问 Qwen3.5 Plus",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use",
"agent",
"vision",
"long_context"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "qwen3.5-35b-a3b",
"displayName": "Qwen3.5 35B-A3B",
"serviceType": [
"chat"
],
"description": "阿里通义千问 Qwen3.5 35B-A3B MoE 模型35B 总参/3B 激活262K 上下文",
"contextWindow": 262144,
"maxOutputTokens": 65536,
"capabilities": [
"chat",
"reasoning",
"code",
"vision",
"tool_use"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 0.7,
"extra": {}
},
{
"modelName": "qwen3.5-27b",
"displayName": "Qwen3.5 27B",
"serviceType": [
"chat"
],
"description": "阿里通义千问 Qwen3.5 27B Dense 模型262K 上下文",
"contextWindow": 262144,
"maxOutputTokens": 65536,
"capabilities": [
"chat",
"reasoning",
"code",
"vision",
"tool_use"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 0.7,
"extra": {}
},
{
"modelName": "qwen3-max-2026-01-23",
"displayName": "Qwen3 Max",
"serviceType": [
"chat"
],
"description": "阿里通义千问 Qwen3 Max (2026-01-23)",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use",
"agent",
"vision",
"long_context"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "doubao-seed-2-0-code-preview-260215",
"displayName": "豆包 Seed 2.0 Code Preview",
"serviceType": [
"chat"
],
"description": "字节跳动豆包 Seed 2.0 Code Preview (260215)",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
}
]
}