Files
config-center/compute/providers/internal-testing.json
yi-ge ca99ea272f feat(internal-testing): restore MiniMax-M2.5
- \u53ea\u79fb\u9664 MiniMax-M2.7\uff0c\u4fdd\u7559 MiniMax-M2.5 \u4f5c\u4e3a\u5907\u9009
- MiniMax-M2.5 \u63d2\u5165\u5728 kimi-k2.5 \u4e4b\u540e (\u4e0e\u5176\u4ed6\u975e-highspeed MiniMax \u7cfb\u5217\u4e00\u81f4\u7684\u4f4d\u7f6e)
- MiniMax-M2.7-highspeed \u4ecd\u7136\u5728 models[0]\uff0c\u4f5c\u4e3a provider \u9ed8\u8ba4\u6a21\u578b
- tombstones: ["MiniMax-M2.7", "MiniMax-M2.5"] -> ["MiniMax-M2.7"]
- bump presetDataVersion 23 -> 24
2026-04-20 11:10:37 +08:00

316 lines
7.3 KiB
JSON
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
{
"id": "provider-internal-testing-001",
"provider": "internal-testing",
"label": "内测专用",
"baseUrl": "https://api.yizhioa.cn/v1",
"apiFormat": "anthropic-messages",
"apiKeyRef": "internal-testing",
"apiKeyVerified": false,
"enabled": false,
"status": "unconfigured",
"priceCurrency": "CNY",
"services": [
"chat"
],
"tombstones": [
"MiniMax-M2.7"
],
"models": [
{
"modelName": "MiniMax-M2.7-highspeed",
"displayName": "MiniMax-M2.7 高速版",
"serviceType": [
"chat"
],
"description": "MiniMax M2.7 高速版低延迟吞吐优化200K 上下文",
"contextWindow": 200000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "glm-5.1",
"displayName": "GLM-5.1",
"serviceType": [
"chat"
],
"description": "智谱 GLM-5.1 新一代旗舰模型编程与推理能力断档领先204K 上下文",
"contextWindow": 204800,
"maxOutputTokens": 131072,
"capabilities": [
"chat",
"reasoning",
"code",
"deep_thinking",
"tool_use",
"agent"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 0.95,
"extra": {}
},
{
"modelName": "glm-5",
"displayName": "GLM-5",
"serviceType": [
"chat"
],
"description": "智谱 GLM-5 大语言模型",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "glm-5-turbo",
"displayName": "GLM-5-Turbo",
"serviceType": [
"chat"
],
"description": "智谱 GLM-5-Turbo 大语言模型,更快的推理速度",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "glm-4.7",
"displayName": "GLM-4.7",
"serviceType": [
"chat"
],
"description": "智谱 GLM-4.7 大语言模型",
"contextWindow": 200000,
"maxOutputTokens": 128000,
"capabilities": [
"chat",
"reasoning",
"code"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "kimi-k2.6-code-preview",
"displayName": "Kimi K2.6 Code Preview",
"serviceType": [
"chat"
],
"description": "月之暗面 Kimi K2.6 Code Preview万亿参数 MoE 架构32B 激活256K 上下文,代码与智能体能力增强",
"contextWindow": 256000,
"maxOutputTokens": 16384,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use",
"agent",
"vision",
"long_context"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 0.7,
"extra": {}
},
{
"modelName": "kimi-k2.5",
"displayName": "Kimi-2.5",
"serviceType": [
"chat"
],
"description": "月之暗面 Kimi-2.5 大语言模型",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use",
"agent",
"vision",
"long_context"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "MiniMax-M2.5",
"displayName": "MiniMax-M2.5",
"serviceType": [
"chat"
],
"description": "MiniMax M2.5 大语言模型",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use",
"long_context"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "qwen3.6-plus",
"displayName": "Qwen3.6 Plus",
"serviceType": [
"chat"
],
"description": "阿里通义千问 Qwen3.6 Plus原生 1M 上下文Hybrid MoE 架构,内置 CoT 推理",
"contextWindow": 1000000,
"maxOutputTokens": 65536,
"capabilities": [
"chat",
"reasoning",
"code",
"vision",
"tool_use",
"agent",
"deep_thinking"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 0.7,
"extra": {}
},
{
"modelName": "qwen3.5-plus",
"displayName": "Qwen3.5 Plus",
"serviceType": [
"chat"
],
"description": "阿里通义千问 Qwen3.5 Plus",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use",
"agent",
"vision",
"long_context"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "qwen3.5-35b-a3b",
"displayName": "Qwen3.5 35B-A3B",
"serviceType": [
"chat"
],
"description": "阿里通义千问 Qwen3.5 35B-A3B MoE 模型35B 总参/3B 激活262K 上下文",
"contextWindow": 262144,
"maxOutputTokens": 65536,
"capabilities": [
"chat",
"reasoning",
"code",
"vision",
"tool_use"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 0.7,
"extra": {}
},
{
"modelName": "qwen3.5-27b",
"displayName": "Qwen3.5 27B",
"serviceType": [
"chat"
],
"description": "阿里通义千问 Qwen3.5 27B Dense 模型262K 上下文",
"contextWindow": 262144,
"maxOutputTokens": 65536,
"capabilities": [
"chat",
"reasoning",
"code",
"vision",
"tool_use"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 0.7,
"extra": {}
},
{
"modelName": "qwen3-max-2026-01-23",
"displayName": "Qwen3 Max",
"serviceType": [
"chat"
],
"description": "阿里通义千问 Qwen3 Max (2026-01-23)",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code",
"tool_use",
"agent",
"vision",
"long_context"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
},
{
"modelName": "doubao-seed-2-0-code-preview-260215",
"displayName": "豆包 Seed 2.0 Code Preview",
"serviceType": [
"chat"
],
"description": "字节跳动豆包 Seed 2.0 Code Preview (260215)",
"contextWindow": 128000,
"maxOutputTokens": 8192,
"capabilities": [
"chat",
"reasoning",
"code"
],
"inputPrice": 0,
"outputPrice": 0,
"defaultTemperature": 1,
"extra": {}
}
]
}