mirror of
https://git.openapi.site/https://github.com/desirecore/config-center.git
synced 2026-02-28 13:18:22 +08:00
- 删除所有 Provider 顶层的 defaultTemperature / defaultTopP - 为所有对话类模型添加 model 级别的推荐温度参数(基于官方文档) - volcengine:将 extra 内温度参数提升到模型顶层正式字段 - presetDataVersion 7→8 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
113 lines
2.9 KiB
JSON
113 lines
2.9 KiB
JSON
{
|
||
"id": "provider-minimax-001",
|
||
"provider": "minimax",
|
||
"label": "MiniMax",
|
||
"baseUrl": "https://api.minimax.io/v1",
|
||
"apiFormat": "openai-completions",
|
||
"apiKeyRef": "minimax",
|
||
"apiKeyVerified": false,
|
||
"enabled": false,
|
||
"status": "unconfigured",
|
||
"priceCurrency": "CNY",
|
||
"services": ["chat", "fast", "reasoning"],
|
||
"models": [
|
||
{
|
||
"modelName": "MiniMax-M2.5",
|
||
"displayName": "MiniMax M2.5",
|
||
"serviceType": ["chat", "reasoning"],
|
||
"description": "MiniMax 最新旗舰模型,230B参数(10B激活)MoE架构,百万级上下文,支持深度推理和工具调用",
|
||
"contextWindow": 1000000,
|
||
"maxOutputTokens": 131072,
|
||
"capabilities": [
|
||
"chat",
|
||
"reasoning",
|
||
"code",
|
||
"tool_use",
|
||
"long_context"
|
||
],
|
||
"inputPrice": 2.1,
|
||
"outputPrice": 8.4,
|
||
"defaultTemperature": 1,
|
||
"defaultTopP": 1,
|
||
"extra": {}
|
||
},
|
||
{
|
||
"modelName": "MiniMax-M2.5-highspeed",
|
||
"displayName": "MiniMax M2.5 高速版",
|
||
"serviceType": ["fast"],
|
||
"description": "MiniMax M2.5 高速推理版本(100TPS),适合低延迟场景",
|
||
"contextWindow": 200000,
|
||
"maxOutputTokens": 131072,
|
||
"capabilities": [
|
||
"chat",
|
||
"reasoning",
|
||
"code",
|
||
"tool_use",
|
||
"fast"
|
||
],
|
||
"inputPrice": 2.1,
|
||
"outputPrice": 16.8,
|
||
"defaultTemperature": 1,
|
||
"defaultTopP": 1,
|
||
"extra": {}
|
||
},
|
||
{
|
||
"modelName": "MiniMax-M2.1",
|
||
"displayName": "MiniMax M2.1",
|
||
"serviceType": ["chat"],
|
||
"description": "MiniMax 上一代主力模型,200K 上下文,性价比优秀",
|
||
"contextWindow": 204800,
|
||
"maxOutputTokens": 131072,
|
||
"capabilities": [
|
||
"chat",
|
||
"code",
|
||
"tool_use",
|
||
"reasoning"
|
||
],
|
||
"inputPrice": 2.1,
|
||
"outputPrice": 8.4,
|
||
"defaultTemperature": 1,
|
||
"defaultTopP": 1,
|
||
"extra": {}
|
||
},
|
||
{
|
||
"modelName": "MiniMax-M2.1-highspeed",
|
||
"displayName": "MiniMax M2.1 高速版",
|
||
"serviceType": ["fast"],
|
||
"description": "MiniMax M2.1 高速推理版本,适合低延迟场景",
|
||
"contextWindow": 204800,
|
||
"maxOutputTokens": 131072,
|
||
"capabilities": [
|
||
"chat",
|
||
"code",
|
||
"tool_use",
|
||
"fast"
|
||
],
|
||
"inputPrice": 2.1,
|
||
"outputPrice": 16.8,
|
||
"defaultTemperature": 1,
|
||
"defaultTopP": 1,
|
||
"extra": {}
|
||
},
|
||
{
|
||
"modelName": "MiniMax-Text-01",
|
||
"displayName": "MiniMax Text 01",
|
||
"serviceType": ["chat"],
|
||
"description": "MiniMax 百万级长上下文文本模型,擅长长文档处理",
|
||
"contextWindow": 1000000,
|
||
"maxOutputTokens": 131072,
|
||
"capabilities": [
|
||
"chat",
|
||
"reasoning",
|
||
"code",
|
||
"long_context"
|
||
],
|
||
"inputPrice": 1.4,
|
||
"outputPrice": 7.7,
|
||
"defaultTemperature": 1,
|
||
"defaultTopP": 1,
|
||
"extra": {}
|
||
}
|
||
]
|
||
}
|