修复流式参数处理:尊重客户端明确指定的stream参数
- 修正transformers中强制添加stream=true的错误逻辑 - 只有客户端明确指定stream参数时才转发该参数 - 客户端未指定stream时不强制添加,保持原有意图 - 更新routes.js中相应的流式判断逻辑 - 确保非流式请求得到正确处理
This commit is contained in:
@@ -89,7 +89,7 @@ async function handleChatCompletions(req, res) {
|
|||||||
|
|
||||||
if (model.type === 'anthropic') {
|
if (model.type === 'anthropic') {
|
||||||
transformedRequest = transformToAnthropic(openaiRequest);
|
transformedRequest = transformToAnthropic(openaiRequest);
|
||||||
const isStreaming = openaiRequest.stream !== false;
|
const isStreaming = openaiRequest.stream === true;
|
||||||
headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId);
|
headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId);
|
||||||
} else if (model.type === 'openai') {
|
} else if (model.type === 'openai') {
|
||||||
transformedRequest = transformToOpenAI(openaiRequest);
|
transformedRequest = transformToOpenAI(openaiRequest);
|
||||||
@@ -120,7 +120,7 @@ async function handleChatCompletions(req, res) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const isStreaming = transformedRequest.stream !== false;
|
const isStreaming = transformedRequest.stream === true;
|
||||||
|
|
||||||
if (isStreaming) {
|
if (isStreaming) {
|
||||||
res.setHeader('Content-Type', 'text/event-stream');
|
res.setHeader('Content-Type', 'text/event-stream');
|
||||||
@@ -268,7 +268,7 @@ async function handleDirectResponses(req, res) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const isStreaming = openaiRequest.stream !== false;
|
const isStreaming = openaiRequest.stream === true;
|
||||||
|
|
||||||
if (isStreaming) {
|
if (isStreaming) {
|
||||||
// 直接转发流式响应,不做任何转换
|
// 直接转发流式响应,不做任何转换
|
||||||
@@ -350,7 +350,7 @@ async function handleDirectMessages(req, res) {
|
|||||||
const clientHeaders = req.headers;
|
const clientHeaders = req.headers;
|
||||||
|
|
||||||
// 获取 headers
|
// 获取 headers
|
||||||
const isStreaming = anthropicRequest.stream !== false;
|
const isStreaming = anthropicRequest.stream === true;
|
||||||
const headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId);
|
const headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId);
|
||||||
|
|
||||||
// 注入系统提示到 system 字段
|
// 注入系统提示到 system 字段
|
||||||
|
|||||||
@@ -6,10 +6,14 @@ export function transformToAnthropic(openaiRequest) {
|
|||||||
|
|
||||||
const anthropicRequest = {
|
const anthropicRequest = {
|
||||||
model: openaiRequest.model,
|
model: openaiRequest.model,
|
||||||
messages: [],
|
messages: []
|
||||||
stream: openaiRequest.stream !== false
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Only add stream parameter if explicitly provided by client
|
||||||
|
if (openaiRequest.stream !== undefined) {
|
||||||
|
anthropicRequest.stream = openaiRequest.stream;
|
||||||
|
}
|
||||||
|
|
||||||
// Handle max_tokens
|
// Handle max_tokens
|
||||||
if (openaiRequest.max_tokens) {
|
if (openaiRequest.max_tokens) {
|
||||||
anthropicRequest.max_tokens = openaiRequest.max_tokens;
|
anthropicRequest.max_tokens = openaiRequest.max_tokens;
|
||||||
|
|||||||
@@ -7,10 +7,14 @@ export function transformToOpenAI(openaiRequest) {
|
|||||||
const targetRequest = {
|
const targetRequest = {
|
||||||
model: openaiRequest.model,
|
model: openaiRequest.model,
|
||||||
input: [],
|
input: [],
|
||||||
store: false,
|
store: false
|
||||||
stream: openaiRequest.stream !== false
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Only add stream parameter if explicitly provided by client
|
||||||
|
if (openaiRequest.stream !== undefined) {
|
||||||
|
targetRequest.stream = openaiRequest.stream;
|
||||||
|
}
|
||||||
|
|
||||||
// Transform max_tokens to max_output_tokens
|
// Transform max_tokens to max_output_tokens
|
||||||
if (openaiRequest.max_tokens) {
|
if (openaiRequest.max_tokens) {
|
||||||
targetRequest.max_output_tokens = openaiRequest.max_tokens;
|
targetRequest.max_output_tokens = openaiRequest.max_tokens;
|
||||||
|
|||||||
Reference in New Issue
Block a user