Gateway: disable OpenAI HTTP chat completions by default (#686)

* feat(gateway): disable OpenAI chat completions HTTP by default

* test(gateway): deflake mock OpenAI tool-calling

* docs(changelog): note OpenAI HTTP endpoint default-off
This commit is contained in:
Peter Steinberger
2026-01-10 21:55:54 +00:00
committed by GitHub
parent 06052640e8
commit 1c257f170a
8 changed files with 52 additions and 9 deletions

View File

@@ -161,7 +161,7 @@ const FIELD_HELP: Record<string, string> = {
"gateway.controlUi.basePath":
"Optional URL prefix where the Control UI is served (e.g. /clawdbot).",
"gateway.http.endpoints.chatCompletions.enabled":
"Enable the OpenAI-compatible `POST /v1/chat/completions` endpoint (default: true).",
"Enable the OpenAI-compatible `POST /v1/chat/completions` endpoint (default: false).",
"gateway.reload.mode":
'Hot reload strategy for config changes ("hybrid" recommended).',
"gateway.reload.debounceMs":

View File

@@ -1158,7 +1158,7 @@ export type GatewayReloadConfig = {
export type GatewayHttpChatCompletionsConfig = {
/**
* If false, the Gateway will not serve `POST /v1/chat/completions`.
* Default: true when absent.
* Default: false when absent.
*/
enabled?: boolean;
};

View File

@@ -6,6 +6,9 @@ import path from "node:path";
import { describe, expect, it } from "vitest";
import { GatewayClient } from "./client.js";
import { startGatewayServer } from "./server.js";
type OpenAIResponsesParams = {
input?: unknown[];
};
@@ -227,7 +230,6 @@ function extractPayloadText(result: unknown): string {
}
async function connectClient(params: { url: string; token: string }) {
const { GatewayClient } = await import("./client.js");
return await new Promise<InstanceType<typeof GatewayClient>>(
(resolve, reject) => {
let settled = false;
@@ -368,7 +370,6 @@ describe("gateway (mock openai): tool calling", () => {
process.env.CLAWDBOT_CONFIG_PATH = configPath;
const port = await getFreeGatewayPort();
const { startGatewayServer } = await import("./server.js");
const server = await startGatewayServer(port, {
bind: "loopback",
auth: { mode: "token", token },

View File

@@ -9,6 +9,15 @@ import {
installGatewayTestHooks();
async function startServerWithDefaultConfig(port: number) {
const { startGatewayServer } = await import("./server.js");
return await startGatewayServer(port, {
host: "127.0.0.1",
auth: { mode: "token", token: "secret" },
controlUiEnabled: false,
});
}
async function startServer(
port: number,
opts?: { openAiChatCompletionsEnabled?: boolean },
@@ -18,7 +27,7 @@ async function startServer(
host: "127.0.0.1",
auth: { mode: "token", token: "secret" },
controlUiEnabled: false,
openAiChatCompletionsEnabled: opts?.openAiChatCompletionsEnabled,
openAiChatCompletionsEnabled: opts?.openAiChatCompletionsEnabled ?? true,
});
}
@@ -48,6 +57,20 @@ function parseSseDataLines(text: string): string[] {
}
describe("OpenAI-compatible HTTP API (e2e)", () => {
it("is disabled by default (requires config)", async () => {
const port = await getFreePort();
const server = await startServerWithDefaultConfig(port);
try {
const res = await postChatCompletions(port, {
model: "clawdbot",
messages: [{ role: "user", content: "hi" }],
});
expect(res.status).toBe(404);
} finally {
await server.close({ reason: "test done" });
}
});
it("can be disabled via config (404)", async () => {
const port = await getFreePort();
const server = await startServer(port, {

View File

@@ -330,7 +330,7 @@ export type GatewayServerOptions = {
controlUiEnabled?: boolean;
/**
* If false, do not serve `POST /v1/chat/completions`.
* Default: config `gateway.http.endpoints.chatCompletions.enabled` (or true when absent).
* Default: config `gateway.http.endpoints.chatCompletions.enabled` (or false when absent).
*/
openAiChatCompletionsEnabled?: boolean;
/**
@@ -440,7 +440,7 @@ export async function startGatewayServer(
const openAiChatCompletionsEnabled =
opts.openAiChatCompletionsEnabled ??
cfgAtStart.gateway?.http?.endpoints?.chatCompletions?.enabled ??
true;
false;
const controlUiBasePath = normalizeControlUiBasePath(
cfgAtStart.gateway?.controlUi?.basePath,
);