feat: 添加文章修改保存功能及首页入口

This commit is contained in:
empty
2026-01-21 23:50:35 +08:00
parent 94301c81a6
commit 44848cd40f
8 changed files with 1360 additions and 16 deletions

View File

@@ -78,8 +78,7 @@ export const streamChat = async ({
appId,
messages,
options,
res,
signal
res
}) => {
const provider = providers[providerId];
@@ -107,19 +106,31 @@ export const streamChat = async ({
const headers = buildHeaders({ ...provider, appId: appId || provider.appId });
const response = await fetch(provider.apiUrl, {
method: 'POST',
headers,
body: JSON.stringify(payload),
signal
});
console.log('LLM 代理: 发起请求', { providerId, model: payload.model, apiUrl: provider.apiUrl });
let response;
try {
response = await fetch(provider.apiUrl, {
method: 'POST',
headers,
body: JSON.stringify(payload)
});
} catch (fetchError) {
console.error('LLM 代理: Fetch 错误', fetchError);
if (!res.headersSent) {
res.status(500).json({ success: false, error: fetchError.message || '网络请求失败' });
}
return;
}
if (!response.ok) {
const errorText = await response.text();
console.error('LLM 代理: 上游响应错误', response.status, errorText);
res.status(500).json({ success: false, error: errorText || `上游请求失败: ${response.status}` });
return;
}
console.log('LLM 代理: 开始流式响应');
res.status(200);
res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');
res.setHeader('Cache-Control', 'no-cache');
@@ -131,6 +142,7 @@ export const streamChat = async ({
res.write(chunk);
});
readable.on('end', () => {
console.log('LLM 代理: 流式响应完成');
res.end();
});
readable.on('error', (error) => {
@@ -138,3 +150,4 @@ export const streamChat = async ({
res.end();
});
};