add gpt-5.1-codex-max support

This commit is contained in:
1eon
2025-12-05 21:11:53 +08:00
parent 93284c80ff
commit aa3bb3c65b
7 changed files with 23 additions and 10 deletions

View File

@@ -58,7 +58,7 @@ export function getModelReasoning(modelId) {
return null;
}
const reasoningLevel = model.reasoning.toLowerCase();
if (['low', 'medium', 'high', 'auto'].includes(reasoningLevel)) {
if (['low', 'medium', 'high', 'xhigh', 'auto'].includes(reasoningLevel)) {
return reasoningLevel;
}
return null;

View File

@@ -57,12 +57,19 @@
"provider": "openai"
},
{
"name": "GPT-5.1-Codex",
"name": "GPT-5.1 Codex",
"id": "gpt-5.1-codex",
"type": "openai",
"reasoning": "off",
"provider": "openai"
},
{
"name": "GPT-5.1 Codex Max",
"id": "gpt-5.1-codex-max",
"type": "openai",
"reasoning": "auto",
"provider": "openai"
},
{
"name": "GLM-4.6",
"id": "glm-4.6",

View File

@@ -312,7 +312,7 @@ async function handleDirectResponses(req, res) {
if (reasoningLevel === 'auto') {
// Auto模式保持原始请求的reasoning字段不变
// 如果原始请求有reasoning字段就保留没有就不添加
} else if (reasoningLevel && ['low', 'medium', 'high'].includes(reasoningLevel)) {
} else if (reasoningLevel && ['low', 'medium', 'high', 'xhigh'].includes(reasoningLevel)) {
modifiedRequest.reasoning = {
effort: reasoningLevel,
summary: 'auto'
@@ -463,11 +463,12 @@ async function handleDirectMessages(req, res) {
if (reasoningLevel === 'auto') {
// Auto模式保持原始请求的thinking字段不变
// 如果原始请求有thinking字段就保留没有就不添加
} else if (reasoningLevel && ['low', 'medium', 'high'].includes(reasoningLevel)) {
} else if (reasoningLevel && ['low', 'medium', 'high', 'xhigh'].includes(reasoningLevel)) {
const budgetTokens = {
'low': 4096,
'medium': 12288,
'high': 24576
'high': 24576,
'xhigh': 40960
};
modifiedRequest.thinking = {

View File

@@ -1,4 +1,8 @@
#!/bin/bash
echo "FACTORY_API_KEY 当前值是" $FACTORY_API_KEY
echo $FACTORY_API_KEY
echo "Reset FACTORY_API_KEY..."
export FACTORY_API_KEY=""
echo "Starting droid2api server..."
node server.js

View File

@@ -119,12 +119,13 @@ export function transformToAnthropic(openaiRequest) {
anthropicRequest.thinking = openaiRequest.thinking;
}
// If original request has no thinking field, don't add one
} else if (reasoningLevel && ['low', 'medium', 'high'].includes(reasoningLevel)) {
} else if (reasoningLevel && ['low', 'medium', 'high', 'xhigh'].includes(reasoningLevel)) {
// Specific level: override with model configuration
const budgetTokens = {
'low': 4096,
'medium': 12288,
'high': 24576
'high': 24576,
'xhigh': 24576
};
anthropicRequest.thinking = {
@@ -189,7 +190,7 @@ export function getAnthropicHeaders(authHeader, clientHeaders = {}, isStreaming
if (reasoningLevel === 'auto') {
// Auto mode: don't modify anthropic-beta header, preserve original
// betaValues remain unchanged from client headers
} else if (reasoningLevel && ['low', 'medium', 'high'].includes(reasoningLevel)) {
} else if (reasoningLevel && ['low', 'medium', 'high', 'xhigh'].includes(reasoningLevel)) {
// Add thinking beta if not already present
if (!betaValues.includes(thinkingBeta)) {
betaValues.push(thinkingBeta);

View File

@@ -44,7 +44,7 @@ export function transformToCommon(openaiRequest) {
if (reasoningLevel === 'auto') {
// Auto mode: preserve original request's reasoning_effort field exactly as-is
// If original request has reasoning_effort field, keep it; otherwise don't add one
} else if (reasoningLevel && ['low', 'medium', 'high'].includes(reasoningLevel)) {
} else if (reasoningLevel && ['low', 'medium', 'high', 'xhigh'].includes(reasoningLevel)) {
// Specific level: override with model configuration
commonRequest.reasoning_effort = reasoningLevel;
} else {

View File

@@ -100,7 +100,7 @@ export function transformToOpenAI(openaiRequest) {
targetRequest.reasoning = openaiRequest.reasoning;
}
// If original request has no reasoning field, don't add one
} else if (reasoningLevel && ['low', 'medium', 'high'].includes(reasoningLevel)) {
} else if (reasoningLevel && ['low', 'medium', 'high', 'xhigh'].includes(reasoningLevel)) {
// Specific level: override with model configuration
targetRequest.reasoning = {
effort: reasoningLevel,