Add common endpoint support and system prompt injection, v1.1.0

- Add common endpoint type for GLM-4.6 model
- Implement automatic system prompt injection for all requests
- Simplify README documentation for better user focus
- Update version to 1.1.0
- Add *.txt to .gitignore

Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com>
This commit is contained in:
1e0n
2025-10-07 21:06:28 +08:00
parent 5fc2df4cd7
commit 43803ca9da
9 changed files with 260 additions and 363 deletions

View File

@@ -1,4 +1,5 @@
import { logDebug } from '../logger.js';
import { getSystemPrompt } from '../config.js';
export function transformToAnthropic(openaiRequest) {
logDebug('Transforming OpenAI request to Anthropic format');
@@ -77,9 +78,19 @@ export function transformToAnthropic(openaiRequest) {
}
}
// Add system parameter if system content exists
if (systemContent.length > 0) {
anthropicRequest.system = systemContent;
// Add system parameter with system prompt prepended
const systemPrompt = getSystemPrompt();
if (systemPrompt || systemContent.length > 0) {
anthropicRequest.system = [];
// Prepend system prompt as first element if it exists
if (systemPrompt) {
anthropicRequest.system.push({
type: 'text',
text: systemPrompt
});
}
// Add user-provided system content
anthropicRequest.system.push(...systemContent);
}
// Transform tools if present
@@ -125,11 +136,11 @@ export function getAnthropicHeaders(authHeader, clientHeaders = {}, isStreaming
'anthropic-beta': 'interleaved-thinking-2025-05-14',
'x-api-key': 'placeholder',
'authorization': authHeader || '',
'x-model-provider': 'anthropic',
'x-api-provider': 'anthropic',
'x-factory-client': 'cli',
'x-session-id': sessionId,
'x-assistant-message-id': messageId,
'user-agent': 'a$/JS 0.57.0',
'user-agent': 'uX/JS 0.57.0',
'x-stainless-timeout': '600',
'connection': 'keep-alive'
};

View File

@@ -0,0 +1,88 @@
import { logDebug } from '../logger.js';
import { getSystemPrompt } from '../config.js';
export function transformToCommon(openaiRequest) {
logDebug('Transforming OpenAI request to Common format');
// 基本保持 OpenAI 格式,只在 messages 前面插入 system 消息
const commonRequest = {
...openaiRequest
};
const systemPrompt = getSystemPrompt();
if (systemPrompt) {
// 检查是否已有 system 消息
const hasSystemMessage = commonRequest.messages?.some(m => m.role === 'system');
if (hasSystemMessage) {
// 如果已有 system 消息,在第一个 system 消息前插入我们的 system prompt
commonRequest.messages = commonRequest.messages.map((msg, index) => {
if (msg.role === 'system' && index === commonRequest.messages.findIndex(m => m.role === 'system')) {
// 找到第一个 system 消息,前置我们的 prompt
return {
role: 'system',
content: systemPrompt + (typeof msg.content === 'string' ? msg.content : '')
};
}
return msg;
});
} else {
// 如果没有 system 消息,在 messages 数组最前面插入
commonRequest.messages = [
{
role: 'system',
content: systemPrompt
},
...(commonRequest.messages || [])
];
}
}
logDebug('Transformed Common request', commonRequest);
return commonRequest;
}
export function getCommonHeaders(authHeader, clientHeaders = {}) {
// Generate unique IDs if not provided
const sessionId = clientHeaders['x-session-id'] || generateUUID();
const messageId = clientHeaders['x-assistant-message-id'] || generateUUID();
const headers = {
'accept': 'application/json',
'content-type': 'application/json',
'authorization': authHeader || '',
'x-api-provider': 'baseten',
'x-factory-client': 'cli',
'x-session-id': sessionId,
'x-assistant-message-id': messageId,
'user-agent': 'pB/JS 5.23.2',
'connection': 'keep-alive'
};
// Pass through Stainless SDK headers with defaults
const stainlessDefaults = {
'x-stainless-arch': 'x64',
'x-stainless-lang': 'js',
'x-stainless-os': 'MacOS',
'x-stainless-runtime': 'node',
'x-stainless-retry-count': '0',
'x-stainless-package-version': '5.23.2',
'x-stainless-runtime-version': 'v24.3.0'
};
// Copy Stainless headers from client or use defaults
Object.keys(stainlessDefaults).forEach(header => {
headers[header] = clientHeaders[header] || stainlessDefaults[header];
});
return headers;
}
function generateUUID() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
const r = Math.random() * 16 | 0;
const v = c == 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}

View File

@@ -1,4 +1,5 @@
import { logDebug } from '../logger.js';
import { getSystemPrompt } from '../config.js';
export function transformToOpenAI(openaiRequest) {
logDebug('Transforming OpenAI request to target OpenAI format');
@@ -66,18 +67,25 @@ export function transformToOpenAI(openaiRequest) {
}));
}
// Extract system message as instructions
// Extract system message as instructions and prepend system prompt
const systemPrompt = getSystemPrompt();
const systemMessage = openaiRequest.messages?.find(m => m.role === 'system');
if (systemMessage) {
let userInstructions = '';
if (typeof systemMessage.content === 'string') {
targetRequest.instructions = systemMessage.content;
userInstructions = systemMessage.content;
} else if (Array.isArray(systemMessage.content)) {
targetRequest.instructions = systemMessage.content
userInstructions = systemMessage.content
.filter(p => p.type === 'text')
.map(p => p.text)
.join('\n');
}
targetRequest.instructions = systemPrompt + userInstructions;
targetRequest.input = targetRequest.input.filter(m => m.role !== 'system');
} else if (systemPrompt) {
// If no user-provided system message, just add the system prompt
targetRequest.instructions = systemPrompt;
}
// Pass through other parameters
@@ -109,11 +117,11 @@ export function getOpenAIHeaders(authHeader, clientHeaders = {}) {
const headers = {
'content-type': 'application/json',
'authorization': authHeader || '',
'x-api-key': 'placeholder',
'x-api-provider': 'azure_openai',
'x-factory-client': 'cli',
'x-session-id': sessionId,
'x-assistant-message-id': messageId,
'user-agent': 'cB/JS 5.22.0',
'user-agent': 'pB/JS 5.23.2',
'connection': 'keep-alive'
};
@@ -124,7 +132,7 @@ export function getOpenAIHeaders(authHeader, clientHeaders = {}) {
'x-stainless-os': 'MacOS',
'x-stainless-runtime': 'node',
'x-stainless-retry-count': '0',
'x-stainless-package-version': '5.22.0',
'x-stainless-package-version': '5.23.2',
'x-stainless-runtime-version': 'v24.3.0'
};
@@ -133,8 +141,6 @@ export function getOpenAIHeaders(authHeader, clientHeaders = {}) {
headers[header] = clientHeaders[header] || stainlessDefaults[header];
});
return headers;
}