Merge pull request #10 from Hwwwww-dev/main
新增模型重定向功能 可以用于适配Claude Code或者Codex里面模型id不匹配的情况
This commit is contained in:
12
config.js
12
config.js
@@ -1,6 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
|
import { logInfo } from './logger.js';
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
const __dirname = path.dirname(__filename);
|
const __dirname = path.dirname(__filename);
|
||||||
@@ -66,3 +67,14 @@ export function getUserAgent() {
|
|||||||
const cfg = getConfig();
|
const cfg = getConfig();
|
||||||
return cfg.user_agent || 'factory-cli/0.19.3';
|
return cfg.user_agent || 'factory-cli/0.19.3';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getRedirectedModelId(modelId) {
|
||||||
|
const cfg = getConfig();
|
||||||
|
if (cfg.model_redirects && cfg.model_redirects[modelId]) {
|
||||||
|
const redirectedId = cfg.model_redirects[modelId];
|
||||||
|
console.log(`[REDIRECT] Model redirected: ${modelId} -> ${redirectedId}`);
|
||||||
|
logInfo(`Model redirected: ${modelId} -> ${redirectedId}`);
|
||||||
|
return redirectedId;
|
||||||
|
}
|
||||||
|
return modelId;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
{
|
{
|
||||||
"port": 3000,
|
"port": 3000,
|
||||||
|
"model_redirects": {
|
||||||
|
"claude-3-5-haiku-20241022": "claude-sonnet-4-5-20250929"
|
||||||
|
},
|
||||||
"endpoint": [
|
"endpoint": [
|
||||||
{
|
{
|
||||||
"name": "openai",
|
"name": "openai",
|
||||||
|
|||||||
31
routes.js
31
routes.js
@@ -1,6 +1,6 @@
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import fetch from 'node-fetch';
|
import fetch from 'node-fetch';
|
||||||
import { getConfig, getModelById, getEndpointByType, getSystemPrompt, getModelReasoning } from './config.js';
|
import { getConfig, getModelById, getEndpointByType, getSystemPrompt, getModelReasoning, getRedirectedModelId } from './config.js';
|
||||||
import { logInfo, logDebug, logError, logRequest, logResponse } from './logger.js';
|
import { logInfo, logDebug, logError, logRequest, logResponse } from './logger.js';
|
||||||
import { transformToAnthropic, getAnthropicHeaders } from './transformers/request-anthropic.js';
|
import { transformToAnthropic, getAnthropicHeaders } from './transformers/request-anthropic.js';
|
||||||
import { transformToOpenAI, getOpenAIHeaders } from './transformers/request-openai.js';
|
import { transformToOpenAI, getOpenAIHeaders } from './transformers/request-openai.js';
|
||||||
@@ -80,10 +80,10 @@ router.get('/v1/models', (req, res) => {
|
|||||||
// 标准 OpenAI 聊天补全处理函数(带格式转换)
|
// 标准 OpenAI 聊天补全处理函数(带格式转换)
|
||||||
async function handleChatCompletions(req, res) {
|
async function handleChatCompletions(req, res) {
|
||||||
logInfo('POST /v1/chat/completions');
|
logInfo('POST /v1/chat/completions');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const openaiRequest = req.body;
|
const openaiRequest = req.body;
|
||||||
const modelId = openaiRequest.model;
|
const modelId = getRedirectedModelId(openaiRequest.model);
|
||||||
|
|
||||||
if (!modelId) {
|
if (!modelId) {
|
||||||
return res.status(400).json({ error: 'model is required' });
|
return res.status(400).json({ error: 'model is required' });
|
||||||
@@ -125,15 +125,18 @@ async function handleChatCompletions(req, res) {
|
|||||||
'user-agent': clientHeaders['user-agent']
|
'user-agent': clientHeaders['user-agent']
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Update request body with redirected model ID before transformation
|
||||||
|
const requestWithRedirectedModel = { ...openaiRequest, model: modelId };
|
||||||
|
|
||||||
if (model.type === 'anthropic') {
|
if (model.type === 'anthropic') {
|
||||||
transformedRequest = transformToAnthropic(openaiRequest);
|
transformedRequest = transformToAnthropic(requestWithRedirectedModel);
|
||||||
const isStreaming = openaiRequest.stream === true;
|
const isStreaming = openaiRequest.stream === true;
|
||||||
headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId);
|
headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId);
|
||||||
} else if (model.type === 'openai') {
|
} else if (model.type === 'openai') {
|
||||||
transformedRequest = transformToOpenAI(openaiRequest);
|
transformedRequest = transformToOpenAI(requestWithRedirectedModel);
|
||||||
headers = getOpenAIHeaders(authHeader, clientHeaders);
|
headers = getOpenAIHeaders(authHeader, clientHeaders);
|
||||||
} else if (model.type === 'common') {
|
} else if (model.type === 'common') {
|
||||||
transformedRequest = transformToCommon(openaiRequest);
|
transformedRequest = transformToCommon(requestWithRedirectedModel);
|
||||||
headers = getCommonHeaders(authHeader, clientHeaders);
|
headers = getCommonHeaders(authHeader, clientHeaders);
|
||||||
} else {
|
} else {
|
||||||
return res.status(500).json({ error: `Unknown endpoint type: ${model.type}` });
|
return res.status(500).json({ error: `Unknown endpoint type: ${model.type}` });
|
||||||
@@ -228,10 +231,10 @@ async function handleChatCompletions(req, res) {
|
|||||||
// 直接转发 OpenAI 请求(不做格式转换)
|
// 直接转发 OpenAI 请求(不做格式转换)
|
||||||
async function handleDirectResponses(req, res) {
|
async function handleDirectResponses(req, res) {
|
||||||
logInfo('POST /v1/responses');
|
logInfo('POST /v1/responses');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const openaiRequest = req.body;
|
const openaiRequest = req.body;
|
||||||
const modelId = openaiRequest.model;
|
const modelId = getRedirectedModelId(openaiRequest.model);
|
||||||
|
|
||||||
if (!modelId) {
|
if (!modelId) {
|
||||||
return res.status(400).json({ error: 'model is required' });
|
return res.status(400).json({ error: 'model is required' });
|
||||||
@@ -277,9 +280,9 @@ async function handleDirectResponses(req, res) {
|
|||||||
// 获取 headers
|
// 获取 headers
|
||||||
const headers = getOpenAIHeaders(authHeader, clientHeaders);
|
const headers = getOpenAIHeaders(authHeader, clientHeaders);
|
||||||
|
|
||||||
// 注入系统提示到 instructions 字段
|
// 注入系统提示到 instructions 字段,并更新重定向后的模型ID
|
||||||
const systemPrompt = getSystemPrompt();
|
const systemPrompt = getSystemPrompt();
|
||||||
const modifiedRequest = { ...openaiRequest };
|
const modifiedRequest = { ...openaiRequest, model: modelId };
|
||||||
if (systemPrompt) {
|
if (systemPrompt) {
|
||||||
// 如果已有 instructions,则在前面添加系统提示
|
// 如果已有 instructions,则在前面添加系统提示
|
||||||
if (modifiedRequest.instructions) {
|
if (modifiedRequest.instructions) {
|
||||||
@@ -363,10 +366,10 @@ async function handleDirectResponses(req, res) {
|
|||||||
// 直接转发 Anthropic 请求(不做格式转换)
|
// 直接转发 Anthropic 请求(不做格式转换)
|
||||||
async function handleDirectMessages(req, res) {
|
async function handleDirectMessages(req, res) {
|
||||||
logInfo('POST /v1/messages');
|
logInfo('POST /v1/messages');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const anthropicRequest = req.body;
|
const anthropicRequest = req.body;
|
||||||
const modelId = anthropicRequest.model;
|
const modelId = getRedirectedModelId(anthropicRequest.model);
|
||||||
|
|
||||||
if (!modelId) {
|
if (!modelId) {
|
||||||
return res.status(400).json({ error: 'model is required' });
|
return res.status(400).json({ error: 'model is required' });
|
||||||
@@ -413,9 +416,9 @@ async function handleDirectMessages(req, res) {
|
|||||||
const isStreaming = anthropicRequest.stream === true;
|
const isStreaming = anthropicRequest.stream === true;
|
||||||
const headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId);
|
const headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId);
|
||||||
|
|
||||||
// 注入系统提示到 system 字段
|
// 注入系统提示到 system 字段,并更新重定向后的模型ID
|
||||||
const systemPrompt = getSystemPrompt();
|
const systemPrompt = getSystemPrompt();
|
||||||
const modifiedRequest = { ...anthropicRequest };
|
const modifiedRequest = { ...anthropicRequest, model: modelId };
|
||||||
if (systemPrompt) {
|
if (systemPrompt) {
|
||||||
if (modifiedRequest.system && Array.isArray(modifiedRequest.system)) {
|
if (modifiedRequest.system && Array.isArray(modifiedRequest.system)) {
|
||||||
// 如果已有 system 数组,则在最前面插入系统提示
|
// 如果已有 system 数组,则在最前面插入系统提示
|
||||||
|
|||||||
Reference in New Issue
Block a user