修复流式参数处理:尊重客户端明确指定的stream参数

- 修正transformers中强制添加stream=true的错误逻辑
- 只有客户端明确指定stream参数时才转发该参数
- 客户端未指定stream时不强制添加,保持原有意图
- 更新routes.js中相应的流式判断逻辑
- 确保非流式请求得到正确处理
This commit is contained in:
1e0n
2025-10-09 11:50:49 +08:00
parent 69fdb27b07
commit 1b1a25e68d
3 changed files with 16 additions and 8 deletions

View File

@@ -89,7 +89,7 @@ async function handleChatCompletions(req, res) {
if (model.type === 'anthropic') {
transformedRequest = transformToAnthropic(openaiRequest);
const isStreaming = openaiRequest.stream !== false;
const isStreaming = openaiRequest.stream === true;
headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId);
} else if (model.type === 'openai') {
transformedRequest = transformToOpenAI(openaiRequest);
@@ -120,7 +120,7 @@ async function handleChatCompletions(req, res) {
});
}
const isStreaming = transformedRequest.stream !== false;
const isStreaming = transformedRequest.stream === true;
if (isStreaming) {
res.setHeader('Content-Type', 'text/event-stream');
@@ -268,7 +268,7 @@ async function handleDirectResponses(req, res) {
});
}
const isStreaming = openaiRequest.stream !== false;
const isStreaming = openaiRequest.stream === true;
if (isStreaming) {
// 直接转发流式响应,不做任何转换
@@ -350,7 +350,7 @@ async function handleDirectMessages(req, res) {
const clientHeaders = req.headers;
// 获取 headers
const isStreaming = anthropicRequest.stream !== false;
const isStreaming = anthropicRequest.stream === true;
const headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId);
// 注入系统提示到 system 字段

View File

@@ -6,10 +6,14 @@ export function transformToAnthropic(openaiRequest) {
const anthropicRequest = {
model: openaiRequest.model,
messages: [],
stream: openaiRequest.stream !== false
messages: []
};
// Only add stream parameter if explicitly provided by client
if (openaiRequest.stream !== undefined) {
anthropicRequest.stream = openaiRequest.stream;
}
// Handle max_tokens
if (openaiRequest.max_tokens) {
anthropicRequest.max_tokens = openaiRequest.max_tokens;

View File

@@ -7,10 +7,14 @@ export function transformToOpenAI(openaiRequest) {
const targetRequest = {
model: openaiRequest.model,
input: [],
store: false,
stream: openaiRequest.stream !== false
store: false
};
// Only add stream parameter if explicitly provided by client
if (openaiRequest.stream !== undefined) {
targetRequest.stream = openaiRequest.stream;
}
// Transform max_tokens to max_output_tokens
if (openaiRequest.max_tokens) {
targetRequest.max_output_tokens = openaiRequest.max_tokens;