Initial commit: OpenAI compatible API proxy with auto token refresh
- Implemented OpenAI compatible API proxy server - Support for Anthropic and custom OpenAI format conversion - Automatic API key refresh with WorkOS OAuth - SSE streaming response transformation - Smart header management for Factory endpoints - Chinese documentation
This commit is contained in:
172
transformers/request-anthropic.js
Normal file
172
transformers/request-anthropic.js
Normal file
@@ -0,0 +1,172 @@
|
||||
import { logDebug } from '../logger.js';
|
||||
|
||||
export function transformToAnthropic(openaiRequest) {
|
||||
logDebug('Transforming OpenAI request to Anthropic format');
|
||||
|
||||
const anthropicRequest = {
|
||||
model: openaiRequest.model,
|
||||
messages: [],
|
||||
stream: openaiRequest.stream !== false
|
||||
};
|
||||
|
||||
// Handle max_tokens
|
||||
if (openaiRequest.max_tokens) {
|
||||
anthropicRequest.max_tokens = openaiRequest.max_tokens;
|
||||
} else if (openaiRequest.max_completion_tokens) {
|
||||
anthropicRequest.max_tokens = openaiRequest.max_completion_tokens;
|
||||
} else {
|
||||
anthropicRequest.max_tokens = 4096;
|
||||
}
|
||||
|
||||
// Extract system message(s) and transform other messages
|
||||
let systemContent = [];
|
||||
|
||||
if (openaiRequest.messages && Array.isArray(openaiRequest.messages)) {
|
||||
for (const msg of openaiRequest.messages) {
|
||||
// Handle system messages separately
|
||||
if (msg.role === 'system') {
|
||||
if (typeof msg.content === 'string') {
|
||||
systemContent.push({
|
||||
type: 'text',
|
||||
text: msg.content
|
||||
});
|
||||
} else if (Array.isArray(msg.content)) {
|
||||
for (const part of msg.content) {
|
||||
if (part.type === 'text') {
|
||||
systemContent.push({
|
||||
type: 'text',
|
||||
text: part.text
|
||||
});
|
||||
} else {
|
||||
systemContent.push(part);
|
||||
}
|
||||
}
|
||||
}
|
||||
continue; // Skip adding system messages to messages array
|
||||
}
|
||||
|
||||
const anthropicMsg = {
|
||||
role: msg.role,
|
||||
content: []
|
||||
};
|
||||
|
||||
if (typeof msg.content === 'string') {
|
||||
anthropicMsg.content.push({
|
||||
type: 'text',
|
||||
text: msg.content
|
||||
});
|
||||
} else if (Array.isArray(msg.content)) {
|
||||
for (const part of msg.content) {
|
||||
if (part.type === 'text') {
|
||||
anthropicMsg.content.push({
|
||||
type: 'text',
|
||||
text: part.text
|
||||
});
|
||||
} else if (part.type === 'image_url') {
|
||||
anthropicMsg.content.push({
|
||||
type: 'image',
|
||||
source: part.image_url
|
||||
});
|
||||
} else {
|
||||
anthropicMsg.content.push(part);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
anthropicRequest.messages.push(anthropicMsg);
|
||||
}
|
||||
}
|
||||
|
||||
// Add system parameter if system content exists
|
||||
if (systemContent.length > 0) {
|
||||
anthropicRequest.system = systemContent;
|
||||
}
|
||||
|
||||
// Transform tools if present
|
||||
if (openaiRequest.tools && Array.isArray(openaiRequest.tools)) {
|
||||
anthropicRequest.tools = openaiRequest.tools.map(tool => {
|
||||
if (tool.type === 'function') {
|
||||
return {
|
||||
name: tool.function.name,
|
||||
description: tool.function.description,
|
||||
input_schema: tool.function.parameters || {}
|
||||
};
|
||||
}
|
||||
return tool;
|
||||
});
|
||||
}
|
||||
|
||||
// Pass through other compatible parameters
|
||||
if (openaiRequest.temperature !== undefined) {
|
||||
anthropicRequest.temperature = openaiRequest.temperature;
|
||||
}
|
||||
if (openaiRequest.top_p !== undefined) {
|
||||
anthropicRequest.top_p = openaiRequest.top_p;
|
||||
}
|
||||
if (openaiRequest.stop !== undefined) {
|
||||
anthropicRequest.stop_sequences = Array.isArray(openaiRequest.stop)
|
||||
? openaiRequest.stop
|
||||
: [openaiRequest.stop];
|
||||
}
|
||||
|
||||
logDebug('Transformed Anthropic request', anthropicRequest);
|
||||
return anthropicRequest;
|
||||
}
|
||||
|
||||
export function getAnthropicHeaders(authHeader, clientHeaders = {}, isStreaming = true) {
|
||||
// Generate unique IDs if not provided
|
||||
const sessionId = clientHeaders['x-session-id'] || generateUUID();
|
||||
const messageId = clientHeaders['x-assistant-message-id'] || generateUUID();
|
||||
|
||||
const headers = {
|
||||
'accept': 'application/json',
|
||||
'content-type': 'application/json',
|
||||
'anthropic-version': '2023-06-01',
|
||||
'anthropic-beta': 'interleaved-thinking-2025-05-14',
|
||||
'x-api-key': 'placeholder',
|
||||
'authorization': authHeader || '',
|
||||
'x-model-provider': 'anthropic',
|
||||
'x-factory-client': 'cli',
|
||||
'x-session-id': sessionId,
|
||||
'x-assistant-message-id': messageId,
|
||||
'user-agent': 'a$/JS 0.57.0',
|
||||
'x-stainless-timeout': '600',
|
||||
'connection': 'keep-alive'
|
||||
};
|
||||
|
||||
// Pass through Stainless SDK headers with defaults
|
||||
const stainlessDefaults = {
|
||||
'x-stainless-arch': 'x64',
|
||||
'x-stainless-lang': 'js',
|
||||
'x-stainless-os': 'MacOS',
|
||||
'x-stainless-runtime': 'node',
|
||||
'x-stainless-retry-count': '0',
|
||||
'x-stainless-package-version': '0.57.0',
|
||||
'x-stainless-runtime-version': 'v24.3.0'
|
||||
};
|
||||
|
||||
// Set helper-method based on streaming
|
||||
if (isStreaming) {
|
||||
headers['x-stainless-helper-method'] = 'stream';
|
||||
}
|
||||
|
||||
// Copy Stainless headers from client or use defaults
|
||||
Object.keys(stainlessDefaults).forEach(header => {
|
||||
headers[header] = clientHeaders[header] || stainlessDefaults[header];
|
||||
});
|
||||
|
||||
// Override timeout from defaults if client provided
|
||||
if (clientHeaders['x-stainless-timeout']) {
|
||||
headers['x-stainless-timeout'] = clientHeaders['x-stainless-timeout'];
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
function generateUUID() {
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
const r = Math.random() * 16 | 0;
|
||||
const v = c == 'x' ? r : (r & 0x3 | 0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
||||
147
transformers/request-openai.js
Normal file
147
transformers/request-openai.js
Normal file
@@ -0,0 +1,147 @@
|
||||
import { logDebug } from '../logger.js';
|
||||
|
||||
export function transformToOpenAI(openaiRequest) {
|
||||
logDebug('Transforming OpenAI request to target OpenAI format');
|
||||
|
||||
const targetRequest = {
|
||||
model: openaiRequest.model,
|
||||
input: [],
|
||||
store: false,
|
||||
stream: openaiRequest.stream !== false
|
||||
};
|
||||
|
||||
// Transform max_tokens to max_output_tokens
|
||||
if (openaiRequest.max_tokens) {
|
||||
targetRequest.max_output_tokens = openaiRequest.max_tokens;
|
||||
} else if (openaiRequest.max_completion_tokens) {
|
||||
targetRequest.max_output_tokens = openaiRequest.max_completion_tokens;
|
||||
}
|
||||
|
||||
// Transform messages to input
|
||||
if (openaiRequest.messages && Array.isArray(openaiRequest.messages)) {
|
||||
for (const msg of openaiRequest.messages) {
|
||||
const inputMsg = {
|
||||
role: msg.role,
|
||||
content: []
|
||||
};
|
||||
|
||||
// Determine content type based on role
|
||||
// user role uses 'input_text', assistant role uses 'output_text'
|
||||
const textType = msg.role === 'assistant' ? 'output_text' : 'input_text';
|
||||
const imageType = msg.role === 'assistant' ? 'output_image' : 'input_image';
|
||||
|
||||
if (typeof msg.content === 'string') {
|
||||
inputMsg.content.push({
|
||||
type: textType,
|
||||
text: msg.content
|
||||
});
|
||||
} else if (Array.isArray(msg.content)) {
|
||||
for (const part of msg.content) {
|
||||
if (part.type === 'text') {
|
||||
inputMsg.content.push({
|
||||
type: textType,
|
||||
text: part.text
|
||||
});
|
||||
} else if (part.type === 'image_url') {
|
||||
inputMsg.content.push({
|
||||
type: imageType,
|
||||
image_url: part.image_url
|
||||
});
|
||||
} else {
|
||||
// Pass through other types as-is
|
||||
inputMsg.content.push(part);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
targetRequest.input.push(inputMsg);
|
||||
}
|
||||
}
|
||||
|
||||
// Transform tools if present
|
||||
if (openaiRequest.tools && Array.isArray(openaiRequest.tools)) {
|
||||
targetRequest.tools = openaiRequest.tools.map(tool => ({
|
||||
...tool,
|
||||
strict: false
|
||||
}));
|
||||
}
|
||||
|
||||
// Extract system message as instructions
|
||||
const systemMessage = openaiRequest.messages?.find(m => m.role === 'system');
|
||||
if (systemMessage) {
|
||||
if (typeof systemMessage.content === 'string') {
|
||||
targetRequest.instructions = systemMessage.content;
|
||||
} else if (Array.isArray(systemMessage.content)) {
|
||||
targetRequest.instructions = systemMessage.content
|
||||
.filter(p => p.type === 'text')
|
||||
.map(p => p.text)
|
||||
.join('\n');
|
||||
}
|
||||
targetRequest.input = targetRequest.input.filter(m => m.role !== 'system');
|
||||
}
|
||||
|
||||
// Pass through other parameters
|
||||
if (openaiRequest.temperature !== undefined) {
|
||||
targetRequest.temperature = openaiRequest.temperature;
|
||||
}
|
||||
if (openaiRequest.top_p !== undefined) {
|
||||
targetRequest.top_p = openaiRequest.top_p;
|
||||
}
|
||||
if (openaiRequest.presence_penalty !== undefined) {
|
||||
targetRequest.presence_penalty = openaiRequest.presence_penalty;
|
||||
}
|
||||
if (openaiRequest.frequency_penalty !== undefined) {
|
||||
targetRequest.frequency_penalty = openaiRequest.frequency_penalty;
|
||||
}
|
||||
if (openaiRequest.parallel_tool_calls !== undefined) {
|
||||
targetRequest.parallel_tool_calls = openaiRequest.parallel_tool_calls;
|
||||
}
|
||||
|
||||
logDebug('Transformed target OpenAI request', targetRequest);
|
||||
return targetRequest;
|
||||
}
|
||||
|
||||
export function getOpenAIHeaders(authHeader, clientHeaders = {}) {
|
||||
// Generate unique IDs if not provided
|
||||
const sessionId = clientHeaders['x-session-id'] || generateUUID();
|
||||
const messageId = clientHeaders['x-assistant-message-id'] || generateUUID();
|
||||
|
||||
const headers = {
|
||||
'content-type': 'application/json',
|
||||
'authorization': authHeader || '',
|
||||
'x-api-key': 'placeholder',
|
||||
'x-factory-client': 'cli',
|
||||
'x-session-id': sessionId,
|
||||
'x-assistant-message-id': messageId,
|
||||
'user-agent': 'cB/JS 5.22.0',
|
||||
'connection': 'keep-alive'
|
||||
};
|
||||
|
||||
// Pass through Stainless SDK headers with defaults
|
||||
const stainlessDefaults = {
|
||||
'x-stainless-arch': 'x64',
|
||||
'x-stainless-lang': 'js',
|
||||
'x-stainless-os': 'MacOS',
|
||||
'x-stainless-runtime': 'node',
|
||||
'x-stainless-retry-count': '0',
|
||||
'x-stainless-package-version': '5.22.0',
|
||||
'x-stainless-runtime-version': 'v24.3.0'
|
||||
};
|
||||
|
||||
// Copy Stainless headers from client or use defaults
|
||||
Object.keys(stainlessDefaults).forEach(header => {
|
||||
headers[header] = clientHeaders[header] || stainlessDefaults[header];
|
||||
});
|
||||
|
||||
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
function generateUUID() {
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
const r = Math.random() * 16 | 0;
|
||||
const v = c == 'x' ? r : (r & 0x3 | 0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
||||
138
transformers/response-anthropic.js
Normal file
138
transformers/response-anthropic.js
Normal file
@@ -0,0 +1,138 @@
|
||||
import { logDebug } from '../logger.js';
|
||||
|
||||
export class AnthropicResponseTransformer {
|
||||
constructor(model, requestId) {
|
||||
this.model = model;
|
||||
this.requestId = requestId || `chatcmpl-${Date.now()}`;
|
||||
this.created = Math.floor(Date.now() / 1000);
|
||||
this.messageId = null;
|
||||
this.currentIndex = 0;
|
||||
}
|
||||
|
||||
parseSSELine(line) {
|
||||
if (line.startsWith('event:')) {
|
||||
return { type: 'event', value: line.slice(6).trim() };
|
||||
}
|
||||
if (line.startsWith('data:')) {
|
||||
const dataStr = line.slice(5).trim();
|
||||
try {
|
||||
return { type: 'data', value: JSON.parse(dataStr) };
|
||||
} catch (e) {
|
||||
return { type: 'data', value: dataStr };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
transformEvent(eventType, eventData) {
|
||||
logDebug(`Anthropic event: ${eventType}`);
|
||||
|
||||
if (eventType === 'message_start') {
|
||||
this.messageId = eventData.message?.id || this.requestId;
|
||||
return this.createOpenAIChunk('', 'assistant', false);
|
||||
}
|
||||
|
||||
if (eventType === 'content_block_start') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (eventType === 'content_block_delta') {
|
||||
const text = eventData.delta?.text || '';
|
||||
return this.createOpenAIChunk(text, null, false);
|
||||
}
|
||||
|
||||
if (eventType === 'content_block_stop') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (eventType === 'message_delta') {
|
||||
const stopReason = eventData.delta?.stop_reason;
|
||||
if (stopReason) {
|
||||
return this.createOpenAIChunk('', null, true, this.mapStopReason(stopReason));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if (eventType === 'message_stop') {
|
||||
return this.createDoneSignal();
|
||||
}
|
||||
|
||||
if (eventType === 'ping') {
|
||||
return null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
createOpenAIChunk(content, role = null, finish = false, finishReason = null) {
|
||||
const chunk = {
|
||||
id: this.requestId,
|
||||
object: 'chat.completion.chunk',
|
||||
created: this.created,
|
||||
model: this.model,
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {},
|
||||
finish_reason: finish ? finishReason : null
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
if (role) {
|
||||
chunk.choices[0].delta.role = role;
|
||||
}
|
||||
if (content) {
|
||||
chunk.choices[0].delta.content = content;
|
||||
}
|
||||
|
||||
return `data: ${JSON.stringify(chunk)}\n\n`;
|
||||
}
|
||||
|
||||
createDoneSignal() {
|
||||
return 'data: [DONE]\n\n';
|
||||
}
|
||||
|
||||
mapStopReason(anthropicReason) {
|
||||
const mapping = {
|
||||
'end_turn': 'stop',
|
||||
'max_tokens': 'length',
|
||||
'stop_sequence': 'stop',
|
||||
'tool_use': 'tool_calls'
|
||||
};
|
||||
return mapping[anthropicReason] || 'stop';
|
||||
}
|
||||
|
||||
async *transformStream(sourceStream) {
|
||||
let buffer = '';
|
||||
let currentEvent = null;
|
||||
|
||||
try {
|
||||
for await (const chunk of sourceStream) {
|
||||
buffer += chunk.toString();
|
||||
const lines = buffer.split('\n');
|
||||
buffer = lines.pop() || '';
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim()) continue;
|
||||
|
||||
const parsed = this.parseSSELine(line);
|
||||
if (!parsed) continue;
|
||||
|
||||
if (parsed.type === 'event') {
|
||||
currentEvent = parsed.value;
|
||||
} else if (parsed.type === 'data' && currentEvent) {
|
||||
const transformed = this.transformEvent(currentEvent, parsed.value);
|
||||
if (transformed) {
|
||||
yield transformed;
|
||||
}
|
||||
currentEvent = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logDebug('Error in Anthropic stream transformation', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
127
transformers/response-openai.js
Normal file
127
transformers/response-openai.js
Normal file
@@ -0,0 +1,127 @@
|
||||
import { logDebug } from '../logger.js';
|
||||
|
||||
export class OpenAIResponseTransformer {
|
||||
constructor(model, requestId) {
|
||||
this.model = model;
|
||||
this.requestId = requestId || `chatcmpl-${Date.now()}`;
|
||||
this.created = Math.floor(Date.now() / 1000);
|
||||
}
|
||||
|
||||
parseSSELine(line) {
|
||||
if (line.startsWith('event:')) {
|
||||
return { type: 'event', value: line.slice(6).trim() };
|
||||
}
|
||||
if (line.startsWith('data:')) {
|
||||
const dataStr = line.slice(5).trim();
|
||||
try {
|
||||
return { type: 'data', value: JSON.parse(dataStr) };
|
||||
} catch (e) {
|
||||
return { type: 'data', value: dataStr };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
transformEvent(eventType, eventData) {
|
||||
logDebug(`Target OpenAI event: ${eventType}`);
|
||||
|
||||
if (eventType === 'response.created') {
|
||||
return this.createOpenAIChunk('', 'assistant', false);
|
||||
}
|
||||
|
||||
if (eventType === 'response.in_progress') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (eventType === 'response.output_text.delta') {
|
||||
const text = eventData.delta || eventData.text || '';
|
||||
return this.createOpenAIChunk(text, null, false);
|
||||
}
|
||||
|
||||
if (eventType === 'response.output_text.done') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (eventType === 'response.done') {
|
||||
const status = eventData.response?.status;
|
||||
let finishReason = 'stop';
|
||||
|
||||
if (status === 'completed') {
|
||||
finishReason = 'stop';
|
||||
} else if (status === 'incomplete') {
|
||||
finishReason = 'length';
|
||||
}
|
||||
|
||||
const finalChunk = this.createOpenAIChunk('', null, true, finishReason);
|
||||
const done = this.createDoneSignal();
|
||||
return finalChunk + done;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
createOpenAIChunk(content, role = null, finish = false, finishReason = null) {
|
||||
const chunk = {
|
||||
id: this.requestId,
|
||||
object: 'chat.completion.chunk',
|
||||
created: this.created,
|
||||
model: this.model,
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {},
|
||||
finish_reason: finish ? finishReason : null
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
if (role) {
|
||||
chunk.choices[0].delta.role = role;
|
||||
}
|
||||
if (content) {
|
||||
chunk.choices[0].delta.content = content;
|
||||
}
|
||||
|
||||
return `data: ${JSON.stringify(chunk)}\n\n`;
|
||||
}
|
||||
|
||||
createDoneSignal() {
|
||||
return 'data: [DONE]\n\n';
|
||||
}
|
||||
|
||||
async *transformStream(sourceStream) {
|
||||
let buffer = '';
|
||||
let currentEvent = null;
|
||||
|
||||
try {
|
||||
for await (const chunk of sourceStream) {
|
||||
buffer += chunk.toString();
|
||||
const lines = buffer.split('\n');
|
||||
buffer = lines.pop() || '';
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim()) continue;
|
||||
|
||||
const parsed = this.parseSSELine(line);
|
||||
if (!parsed) continue;
|
||||
|
||||
if (parsed.type === 'event') {
|
||||
currentEvent = parsed.value;
|
||||
} else if (parsed.type === 'data' && currentEvent) {
|
||||
const transformed = this.transformEvent(currentEvent, parsed.value);
|
||||
if (transformed) {
|
||||
yield transformed;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (currentEvent === 'response.done' || currentEvent === 'response.completed') {
|
||||
yield this.createDoneSignal();
|
||||
}
|
||||
} catch (error) {
|
||||
logDebug('Error in OpenAI stream transformation', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user