154 lines
4.1 KiB
JavaScript
154 lines
4.1 KiB
JavaScript
import { Readable } from 'stream';
|
|
|
|
const providers = {
|
|
deepseek: {
|
|
id: 'deepseek',
|
|
name: 'DeepSeek',
|
|
description: '深度求索',
|
|
apiUrl: process.env.DEEPSEEK_API_URL || 'https://api.deepseek.com/chat/completions',
|
|
apiKey: process.env.DEEPSEEK_API_KEY || '',
|
|
model: process.env.DEEPSEEK_MODEL || 'deepseek-chat'
|
|
},
|
|
qianfan: {
|
|
id: 'qianfan',
|
|
name: '千帆大模型',
|
|
description: '百度文心一言',
|
|
apiUrl: process.env.QIANFAN_API_URL || 'https://qianfan.baidubce.com/v2/chat/completions',
|
|
apiKey: process.env.QIANFAN_API_KEY || '',
|
|
appId: process.env.QIANFAN_APP_ID || '',
|
|
model: process.env.QIANFAN_MODEL || 'ernie-4.0-8k'
|
|
},
|
|
openai: {
|
|
id: 'openai',
|
|
name: 'OpenAI',
|
|
description: 'GPT 系列',
|
|
apiUrl: process.env.OPENAI_API_URL || 'https://api.openai.com/v1/chat/completions',
|
|
apiKey: process.env.OPENAI_API_KEY || '',
|
|
model: process.env.OPENAI_MODEL || 'gpt-4o'
|
|
},
|
|
claude: {
|
|
id: 'claude',
|
|
name: 'Claude',
|
|
description: 'Anthropic Claude',
|
|
apiUrl: process.env.CLAUDE_API_URL || 'https://api.anthropic.com/v1/messages',
|
|
apiKey: process.env.CLAUDE_API_KEY || '',
|
|
model: process.env.CLAUDE_MODEL || 'claude-3-5-sonnet'
|
|
},
|
|
custom: {
|
|
id: 'custom',
|
|
name: '自定义',
|
|
description: '自定义 API 端点',
|
|
apiUrl: process.env.CUSTOM_API_URL || '',
|
|
apiKey: process.env.CUSTOM_API_KEY || '',
|
|
model: process.env.CUSTOM_MODEL || ''
|
|
}
|
|
};
|
|
|
|
export const getProviderSummary = () => {
|
|
return Object.values(providers).map(provider => ({
|
|
id: provider.id,
|
|
name: provider.name,
|
|
description: provider.description,
|
|
model: provider.model,
|
|
enabled: Boolean(provider.apiKey)
|
|
}));
|
|
};
|
|
|
|
const buildHeaders = (provider) => {
|
|
const headers = {
|
|
'Content-Type': 'application/json'
|
|
};
|
|
|
|
if (provider.apiKey) {
|
|
headers['Authorization'] = provider.apiKey.trim().startsWith('Bearer ')
|
|
? provider.apiKey.trim()
|
|
: `Bearer ${provider.apiKey.trim()}`;
|
|
}
|
|
|
|
if (provider.appId) {
|
|
headers['appid'] = provider.appId;
|
|
}
|
|
|
|
return headers;
|
|
};
|
|
|
|
export const streamChat = async ({
|
|
providerId,
|
|
model,
|
|
appId,
|
|
messages,
|
|
options,
|
|
res
|
|
}) => {
|
|
const provider = providers[providerId];
|
|
|
|
if (!provider) {
|
|
res.status(400).json({ success: false, error: '未知模型服务商' });
|
|
return;
|
|
}
|
|
|
|
if (!provider.apiKey) {
|
|
res.status(400).json({ success: false, error: '模型服务商未配置 API Key' });
|
|
return;
|
|
}
|
|
|
|
if (!provider.apiUrl) {
|
|
res.status(400).json({ success: false, error: '模型服务商未配置 API 地址' });
|
|
return;
|
|
}
|
|
|
|
const payload = {
|
|
model: model || provider.model,
|
|
messages,
|
|
...options,
|
|
stream: true
|
|
};
|
|
|
|
const headers = buildHeaders({ ...provider, appId: appId || provider.appId });
|
|
|
|
console.log('LLM 代理: 发起请求', { providerId, model: payload.model, apiUrl: provider.apiUrl });
|
|
|
|
let response;
|
|
try {
|
|
response = await fetch(provider.apiUrl, {
|
|
method: 'POST',
|
|
headers,
|
|
body: JSON.stringify(payload)
|
|
});
|
|
} catch (fetchError) {
|
|
console.error('LLM 代理: Fetch 错误', fetchError);
|
|
if (!res.headersSent) {
|
|
res.status(500).json({ success: false, error: fetchError.message || '网络请求失败' });
|
|
}
|
|
return;
|
|
}
|
|
|
|
if (!response.ok) {
|
|
const errorText = await response.text();
|
|
console.error('LLM 代理: 上游响应错误', response.status, errorText);
|
|
res.status(500).json({ success: false, error: errorText || `上游请求失败: ${response.status}` });
|
|
return;
|
|
}
|
|
|
|
console.log('LLM 代理: 开始流式响应');
|
|
res.status(200);
|
|
res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');
|
|
res.setHeader('Cache-Control', 'no-cache');
|
|
res.setHeader('Connection', 'keep-alive');
|
|
res.flushHeaders();
|
|
|
|
const readable = Readable.fromWeb(response.body);
|
|
readable.on('data', (chunk) => {
|
|
res.write(chunk);
|
|
});
|
|
readable.on('end', () => {
|
|
console.log('LLM 代理: 流式响应完成');
|
|
res.end();
|
|
});
|
|
readable.on('error', (error) => {
|
|
console.error('LLM 流式代理错误:', error);
|
|
res.end();
|
|
});
|
|
};
|
|
|