diff --git a/config.js b/config.js index eaa13a1..99da137 100644 --- a/config.js +++ b/config.js @@ -1,6 +1,7 @@ import fs from 'fs'; import path from 'path'; import { fileURLToPath } from 'url'; +import { logInfo } from './logger.js'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); @@ -66,3 +67,14 @@ export function getUserAgent() { const cfg = getConfig(); return cfg.user_agent || 'factory-cli/0.19.3'; } + +export function getRedirectedModelId(modelId) { + const cfg = getConfig(); + if (cfg.model_redirects && cfg.model_redirects[modelId]) { + const redirectedId = cfg.model_redirects[modelId]; + console.log(`[REDIRECT] Model redirected: ${modelId} -> ${redirectedId}`); + logInfo(`Model redirected: ${modelId} -> ${redirectedId}`); + return redirectedId; + } + return modelId; +} diff --git a/config.json b/config.json index b4bad11..2367abd 100644 --- a/config.json +++ b/config.json @@ -1,5 +1,8 @@ { "port": 3000, + "model_redirects": { + "claude-3-5-haiku-20241022": "claude-sonnet-4-5-20250929" + }, "endpoint": [ { "name": "openai", diff --git a/routes.js b/routes.js index d5cbe46..bb88f7b 100644 --- a/routes.js +++ b/routes.js @@ -1,6 +1,6 @@ import express from 'express'; import fetch from 'node-fetch'; -import { getConfig, getModelById, getEndpointByType, getSystemPrompt, getModelReasoning } from './config.js'; +import { getConfig, getModelById, getEndpointByType, getSystemPrompt, getModelReasoning, getRedirectedModelId } from './config.js'; import { logInfo, logDebug, logError, logRequest, logResponse } from './logger.js'; import { transformToAnthropic, getAnthropicHeaders } from './transformers/request-anthropic.js'; import { transformToOpenAI, getOpenAIHeaders } from './transformers/request-openai.js'; @@ -80,10 +80,10 @@ router.get('/v1/models', (req, res) => { // 标准 OpenAI 聊天补全处理函数(带格式转换) async function handleChatCompletions(req, res) { logInfo('POST /v1/chat/completions'); - + try { const openaiRequest = req.body; - const modelId = openaiRequest.model; + const modelId = getRedirectedModelId(openaiRequest.model); if (!modelId) { return res.status(400).json({ error: 'model is required' }); @@ -125,15 +125,18 @@ async function handleChatCompletions(req, res) { 'user-agent': clientHeaders['user-agent'] }); + // Update request body with redirected model ID before transformation + const requestWithRedirectedModel = { ...openaiRequest, model: modelId }; + if (model.type === 'anthropic') { - transformedRequest = transformToAnthropic(openaiRequest); + transformedRequest = transformToAnthropic(requestWithRedirectedModel); const isStreaming = openaiRequest.stream === true; headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId); } else if (model.type === 'openai') { - transformedRequest = transformToOpenAI(openaiRequest); + transformedRequest = transformToOpenAI(requestWithRedirectedModel); headers = getOpenAIHeaders(authHeader, clientHeaders); } else if (model.type === 'common') { - transformedRequest = transformToCommon(openaiRequest); + transformedRequest = transformToCommon(requestWithRedirectedModel); headers = getCommonHeaders(authHeader, clientHeaders); } else { return res.status(500).json({ error: `Unknown endpoint type: ${model.type}` }); @@ -228,10 +231,10 @@ async function handleChatCompletions(req, res) { // 直接转发 OpenAI 请求(不做格式转换) async function handleDirectResponses(req, res) { logInfo('POST /v1/responses'); - + try { const openaiRequest = req.body; - const modelId = openaiRequest.model; + const modelId = getRedirectedModelId(openaiRequest.model); if (!modelId) { return res.status(400).json({ error: 'model is required' }); @@ -277,9 +280,9 @@ async function handleDirectResponses(req, res) { // 获取 headers const headers = getOpenAIHeaders(authHeader, clientHeaders); - // 注入系统提示到 instructions 字段 + // 注入系统提示到 instructions 字段,并更新重定向后的模型ID const systemPrompt = getSystemPrompt(); - const modifiedRequest = { ...openaiRequest }; + const modifiedRequest = { ...openaiRequest, model: modelId }; if (systemPrompt) { // 如果已有 instructions,则在前面添加系统提示 if (modifiedRequest.instructions) { @@ -363,10 +366,10 @@ async function handleDirectResponses(req, res) { // 直接转发 Anthropic 请求(不做格式转换) async function handleDirectMessages(req, res) { logInfo('POST /v1/messages'); - + try { const anthropicRequest = req.body; - const modelId = anthropicRequest.model; + const modelId = getRedirectedModelId(anthropicRequest.model); if (!modelId) { return res.status(400).json({ error: 'model is required' }); @@ -413,9 +416,9 @@ async function handleDirectMessages(req, res) { const isStreaming = anthropicRequest.stream === true; const headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId); - // 注入系统提示到 system 字段 + // 注入系统提示到 system 字段,并更新重定向后的模型ID const systemPrompt = getSystemPrompt(); - const modifiedRequest = { ...anthropicRequest }; + const modifiedRequest = { ...anthropicRequest, model: modelId }; if (systemPrompt) { if (modifiedRequest.system && Array.isArray(modifiedRequest.system)) { // 如果已有 system 数组,则在最前面插入系统提示