diff --git a/routes.js b/routes.js index 4c1f1a2..3d968c1 100644 --- a/routes.js +++ b/routes.js @@ -89,7 +89,7 @@ async function handleChatCompletions(req, res) { if (model.type === 'anthropic') { transformedRequest = transformToAnthropic(openaiRequest); - const isStreaming = openaiRequest.stream !== false; + const isStreaming = openaiRequest.stream === true; headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId); } else if (model.type === 'openai') { transformedRequest = transformToOpenAI(openaiRequest); @@ -120,7 +120,7 @@ async function handleChatCompletions(req, res) { }); } - const isStreaming = transformedRequest.stream !== false; + const isStreaming = transformedRequest.stream === true; if (isStreaming) { res.setHeader('Content-Type', 'text/event-stream'); @@ -268,7 +268,7 @@ async function handleDirectResponses(req, res) { }); } - const isStreaming = openaiRequest.stream !== false; + const isStreaming = openaiRequest.stream === true; if (isStreaming) { // 直接转发流式响应,不做任何转换 @@ -350,7 +350,7 @@ async function handleDirectMessages(req, res) { const clientHeaders = req.headers; // 获取 headers - const isStreaming = anthropicRequest.stream !== false; + const isStreaming = anthropicRequest.stream === true; const headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId); // 注入系统提示到 system 字段 diff --git a/transformers/request-anthropic.js b/transformers/request-anthropic.js index 7f533ca..1fea716 100644 --- a/transformers/request-anthropic.js +++ b/transformers/request-anthropic.js @@ -6,10 +6,14 @@ export function transformToAnthropic(openaiRequest) { const anthropicRequest = { model: openaiRequest.model, - messages: [], - stream: openaiRequest.stream !== false + messages: [] }; + // Only add stream parameter if explicitly provided by client + if (openaiRequest.stream !== undefined) { + anthropicRequest.stream = openaiRequest.stream; + } + // Handle max_tokens if (openaiRequest.max_tokens) { anthropicRequest.max_tokens = openaiRequest.max_tokens; diff --git a/transformers/request-openai.js b/transformers/request-openai.js index fb95ea1..2d6a44c 100644 --- a/transformers/request-openai.js +++ b/transformers/request-openai.js @@ -7,10 +7,14 @@ export function transformToOpenAI(openaiRequest) { const targetRequest = { model: openaiRequest.model, input: [], - store: false, - stream: openaiRequest.stream !== false + store: false }; + // Only add stream parameter if explicitly provided by client + if (openaiRequest.stream !== undefined) { + targetRequest.stream = openaiRequest.stream; + } + // Transform max_tokens to max_output_tokens if (openaiRequest.max_tokens) { targetRequest.max_output_tokens = openaiRequest.max_tokens;