chore(gate): fix lint and formatting

This commit is contained in:
Peter Steinberger
2026-01-18 06:01:09 +00:00
parent d1c85cb32d
commit d4bd387e0e
15 changed files with 244 additions and 231 deletions

View File

@@ -4,10 +4,7 @@ import path from "node:path";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
resolveBootstrapContextForRun,
resolveBootstrapFilesForRun,
} from "./bootstrap-files.js";
import { resolveBootstrapContextForRun, resolveBootstrapFilesForRun } from "./bootstrap-files.js";
import {
clearInternalHooks,
registerInternalHook,

View File

@@ -146,78 +146,82 @@ const readSessionMessages = async (sessionFile: string) => {
};
describe("runEmbeddedPiAgent", () => {
it("appends new user + assistant after existing transcript entries", { timeout: 90_000 }, async () => {
const { SessionManager } = await import("@mariozechner/pi-coding-agent");
it(
"appends new user + assistant after existing transcript entries",
{ timeout: 90_000 },
async () => {
const { SessionManager } = await import("@mariozechner/pi-coding-agent");
const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-agent-"));
const workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-workspace-"));
const sessionFile = path.join(workspaceDir, "session.jsonl");
const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-agent-"));
const workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-workspace-"));
const sessionFile = path.join(workspaceDir, "session.jsonl");
const sessionManager = SessionManager.open(sessionFile);
sessionManager.appendMessage({
role: "user",
content: [{ type: "text", text: "seed user" }],
});
sessionManager.appendMessage({
role: "assistant",
content: [{ type: "text", text: "seed assistant" }],
stopReason: "stop",
api: "openai-responses",
provider: "openai",
model: "mock-1",
usage: {
input: 1,
output: 1,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 2,
cost: {
input: 0,
output: 0,
const sessionManager = SessionManager.open(sessionFile);
sessionManager.appendMessage({
role: "user",
content: [{ type: "text", text: "seed user" }],
});
sessionManager.appendMessage({
role: "assistant",
content: [{ type: "text", text: "seed assistant" }],
stopReason: "stop",
api: "openai-responses",
provider: "openai",
model: "mock-1",
usage: {
input: 1,
output: 1,
cacheRead: 0,
cacheWrite: 0,
total: 0,
totalTokens: 2,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
},
},
timestamp: Date.now(),
});
timestamp: Date.now(),
});
const cfg = makeOpenAiConfig(["mock-1"]);
await ensureModels(cfg, agentDir);
const cfg = makeOpenAiConfig(["mock-1"]);
await ensureModels(cfg, agentDir);
await runEmbeddedPiAgent({
sessionId: "session:test",
sessionKey: testSessionKey,
sessionFile,
workspaceDir,
config: cfg,
prompt: "hello",
provider: "openai",
model: "mock-1",
timeoutMs: 5_000,
agentDir,
enqueue: immediateEnqueue,
});
await runEmbeddedPiAgent({
sessionId: "session:test",
sessionKey: testSessionKey,
sessionFile,
workspaceDir,
config: cfg,
prompt: "hello",
provider: "openai",
model: "mock-1",
timeoutMs: 5_000,
agentDir,
enqueue: immediateEnqueue,
});
const messages = await readSessionMessages(sessionFile);
const seedUserIndex = messages.findIndex(
(message) => message?.role === "user" && textFromContent(message.content) === "seed user",
);
const seedAssistantIndex = messages.findIndex(
(message) =>
message?.role === "assistant" && textFromContent(message.content) === "seed assistant",
);
const newUserIndex = messages.findIndex(
(message) => message?.role === "user" && textFromContent(message.content) === "hello",
);
const newAssistantIndex = messages.findIndex(
(message, index) => index > newUserIndex && message?.role === "assistant",
);
expect(seedUserIndex).toBeGreaterThanOrEqual(0);
expect(seedAssistantIndex).toBeGreaterThan(seedUserIndex);
expect(newUserIndex).toBeGreaterThan(seedAssistantIndex);
expect(newAssistantIndex).toBeGreaterThan(newUserIndex);
});
const messages = await readSessionMessages(sessionFile);
const seedUserIndex = messages.findIndex(
(message) => message?.role === "user" && textFromContent(message.content) === "seed user",
);
const seedAssistantIndex = messages.findIndex(
(message) =>
message?.role === "assistant" && textFromContent(message.content) === "seed assistant",
);
const newUserIndex = messages.findIndex(
(message) => message?.role === "user" && textFromContent(message.content) === "hello",
);
const newAssistantIndex = messages.findIndex(
(message, index) => index > newUserIndex && message?.role === "assistant",
);
expect(seedUserIndex).toBeGreaterThanOrEqual(0);
expect(seedAssistantIndex).toBeGreaterThan(seedUserIndex);
expect(newUserIndex).toBeGreaterThan(seedAssistantIndex);
expect(newAssistantIndex).toBeGreaterThan(newUserIndex);
},
);
it("persists multi-turn user/assistant ordering across runs", async () => {
const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-agent-"));
const workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-workspace-"));

View File

@@ -23,7 +23,6 @@ import { getApiKeyForModel, resolveModelAuthMode } from "../model-auth.js";
import { ensureClawdbotModelsJson } from "../models-config.js";
import {
ensureSessionHeader,
resolveBootstrapMaxChars,
validateAnthropicTurns,
validateGeminiTurns,
} from "../pi-embedded-helpers.js";

View File

@@ -136,7 +136,7 @@ async function resolveContextReport(
bootstrapMaxChars,
sandbox: { mode: sandboxRuntime.mode, sandboxed: sandboxRuntime.sandboxed },
systemPrompt,
bootstrapFiles: hookAdjustedBootstrapFiles,
bootstrapFiles,
injectedFiles,
skillsPrompt,
tools,

View File

@@ -12,7 +12,8 @@ export async function checkGatewayHealth(params: {
timeoutMs?: number;
}) {
const gatewayDetails = buildGatewayConnectionDetails({ config: params.cfg });
const timeoutMs = typeof params.timeoutMs === "number" && params.timeoutMs > 0 ? params.timeoutMs : 10_000;
const timeoutMs =
typeof params.timeoutMs === "number" && params.timeoutMs > 0 ? params.timeoutMs : 10_000;
let healthOk = false;
try {
await healthCommand({ json: false, timeoutMs }, params.runtime);

View File

@@ -211,161 +211,157 @@ async function connectClient(params: { url: string; token: string }) {
}
describe("gateway (mock openai): tool calling", () => {
it(
"runs a Read tool call end-to-end via gateway agent loop",
{ timeout: 90_000 },
async () => {
const prev = {
home: process.env.HOME,
configPath: process.env.CLAWDBOT_CONFIG_PATH,
token: process.env.CLAWDBOT_GATEWAY_TOKEN,
skipChannels: process.env.CLAWDBOT_SKIP_CHANNELS,
skipGmail: process.env.CLAWDBOT_SKIP_GMAIL_WATCHER,
skipCron: process.env.CLAWDBOT_SKIP_CRON,
skipCanvas: process.env.CLAWDBOT_SKIP_CANVAS_HOST,
};
it("runs a Read tool call end-to-end via gateway agent loop", { timeout: 90_000 }, async () => {
const prev = {
home: process.env.HOME,
configPath: process.env.CLAWDBOT_CONFIG_PATH,
token: process.env.CLAWDBOT_GATEWAY_TOKEN,
skipChannels: process.env.CLAWDBOT_SKIP_CHANNELS,
skipGmail: process.env.CLAWDBOT_SKIP_GMAIL_WATCHER,
skipCron: process.env.CLAWDBOT_SKIP_CRON,
skipCanvas: process.env.CLAWDBOT_SKIP_CANVAS_HOST,
};
const originalFetch = globalThis.fetch;
const openaiBaseUrl = "https://api.openai.com/v1";
const openaiResponsesUrl = `${openaiBaseUrl}/responses`;
const isOpenAIResponsesRequest = (url: string) =>
url === openaiResponsesUrl ||
url.startsWith(`${openaiResponsesUrl}/`) ||
url.startsWith(`${openaiResponsesUrl}?`);
const fetchImpl = async (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
const url =
typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
const originalFetch = globalThis.fetch;
const openaiBaseUrl = "https://api.openai.com/v1";
const openaiResponsesUrl = `${openaiBaseUrl}/responses`;
const isOpenAIResponsesRequest = (url: string) =>
url === openaiResponsesUrl ||
url.startsWith(`${openaiResponsesUrl}/`) ||
url.startsWith(`${openaiResponsesUrl}?`);
const fetchImpl = async (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
const url =
typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
if (isOpenAIResponsesRequest(url)) {
const bodyText =
typeof (init as { body?: unknown } | undefined)?.body !== "undefined"
? decodeBodyText((init as { body?: unknown }).body)
: input instanceof Request
? await input.clone().text()
: "";
if (isOpenAIResponsesRequest(url)) {
const bodyText =
typeof (init as { body?: unknown } | undefined)?.body !== "undefined"
? decodeBodyText((init as { body?: unknown }).body)
: input instanceof Request
? await input.clone().text()
: "";
const parsed = bodyText ? (JSON.parse(bodyText) as Record<string, unknown>) : {};
const inputItems = Array.isArray(parsed.input) ? parsed.input : [];
return await buildOpenAIResponsesSse({ input: inputItems });
}
if (url.startsWith(openaiBaseUrl)) {
throw new Error(`unexpected OpenAI request in mock test: ${url}`);
}
const parsed = bodyText ? (JSON.parse(bodyText) as Record<string, unknown>) : {};
const inputItems = Array.isArray(parsed.input) ? parsed.input : [];
return await buildOpenAIResponsesSse({ input: inputItems });
}
if (url.startsWith(openaiBaseUrl)) {
throw new Error(`unexpected OpenAI request in mock test: ${url}`);
}
if (!originalFetch) {
throw new Error(`fetch is not available (url=${url})`);
}
return await originalFetch(input, init);
};
// TypeScript: Bun's fetch typing includes extra properties; keep this test portable.
(globalThis as unknown as { fetch: unknown }).fetch = fetchImpl;
if (!originalFetch) {
throw new Error(`fetch is not available (url=${url})`);
}
return await originalFetch(input, init);
};
// TypeScript: Bun's fetch typing includes extra properties; keep this test portable.
(globalThis as unknown as { fetch: unknown }).fetch = fetchImpl;
const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-gw-mock-home-"));
process.env.HOME = tempHome;
process.env.CLAWDBOT_SKIP_CHANNELS = "1";
process.env.CLAWDBOT_SKIP_GMAIL_WATCHER = "1";
process.env.CLAWDBOT_SKIP_CRON = "1";
process.env.CLAWDBOT_SKIP_CANVAS_HOST = "1";
const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-gw-mock-home-"));
process.env.HOME = tempHome;
process.env.CLAWDBOT_SKIP_CHANNELS = "1";
process.env.CLAWDBOT_SKIP_GMAIL_WATCHER = "1";
process.env.CLAWDBOT_SKIP_CRON = "1";
process.env.CLAWDBOT_SKIP_CANVAS_HOST = "1";
const token = `test-${randomUUID()}`;
process.env.CLAWDBOT_GATEWAY_TOKEN = token;
const token = `test-${randomUUID()}`;
process.env.CLAWDBOT_GATEWAY_TOKEN = token;
const workspaceDir = path.join(tempHome, "clawd");
await fs.mkdir(workspaceDir, { recursive: true });
const workspaceDir = path.join(tempHome, "clawd");
await fs.mkdir(workspaceDir, { recursive: true });
const nonceA = randomUUID();
const nonceB = randomUUID();
const toolProbePath = path.join(workspaceDir, `.clawdbot-tool-probe.${nonceA}.txt`);
await fs.writeFile(toolProbePath, `nonceA=${nonceA}\nnonceB=${nonceB}\n`);
const nonceA = randomUUID();
const nonceB = randomUUID();
const toolProbePath = path.join(workspaceDir, `.clawdbot-tool-probe.${nonceA}.txt`);
await fs.writeFile(toolProbePath, `nonceA=${nonceA}\nnonceB=${nonceB}\n`);
const configDir = path.join(tempHome, ".clawdbot");
await fs.mkdir(configDir, { recursive: true });
const configPath = path.join(configDir, "clawdbot.json");
const configDir = path.join(tempHome, ".clawdbot");
await fs.mkdir(configDir, { recursive: true });
const configPath = path.join(configDir, "clawdbot.json");
const cfg = {
agents: { defaults: { workspace: workspaceDir } },
models: {
mode: "replace",
providers: {
openai: {
baseUrl: openaiBaseUrl,
apiKey: "test",
api: "openai-responses",
models: [
{
id: "gpt-5.2",
name: "gpt-5.2",
api: "openai-responses",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128_000,
maxTokens: 4096,
},
],
},
const cfg = {
agents: { defaults: { workspace: workspaceDir } },
models: {
mode: "replace",
providers: {
openai: {
baseUrl: openaiBaseUrl,
apiKey: "test",
api: "openai-responses",
models: [
{
id: "gpt-5.2",
name: "gpt-5.2",
api: "openai-responses",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128_000,
maxTokens: 4096,
},
],
},
},
gateway: { auth: { token } },
};
},
gateway: { auth: { token } },
};
await fs.writeFile(configPath, `${JSON.stringify(cfg, null, 2)}\n`);
process.env.CLAWDBOT_CONFIG_PATH = configPath;
await fs.writeFile(configPath, `${JSON.stringify(cfg, null, 2)}\n`);
process.env.CLAWDBOT_CONFIG_PATH = configPath;
const port = await getFreeGatewayPort();
const server = await startGatewayServer(port, {
bind: "loopback",
auth: { mode: "token", token },
controlUiEnabled: false,
const port = await getFreeGatewayPort();
const server = await startGatewayServer(port, {
bind: "loopback",
auth: { mode: "token", token },
controlUiEnabled: false,
});
const client = await connectClient({
url: `ws://127.0.0.1:${port}`,
token,
});
try {
const sessionKey = "agent:dev:mock-openai";
await client.request<Record<string, unknown>>("sessions.patch", {
key: sessionKey,
model: "openai/gpt-5.2",
});
const client = await connectClient({
url: `ws://127.0.0.1:${port}`,
token,
});
const runId = randomUUID();
const payload = await client.request<{
status?: unknown;
result?: unknown;
}>(
"agent",
{
sessionKey,
idempotencyKey: `idem-${runId}`,
message:
`Call the read tool on "${toolProbePath}". ` +
`Then reply with exactly: ${nonceA} ${nonceB}. No extra text.`,
deliver: false,
},
{ expectFinal: true },
);
try {
const sessionKey = "agent:dev:mock-openai";
await client.request<Record<string, unknown>>("sessions.patch", {
key: sessionKey,
model: "openai/gpt-5.2",
});
const runId = randomUUID();
const payload = await client.request<{
status?: unknown;
result?: unknown;
}>(
"agent",
{
sessionKey,
idempotencyKey: `idem-${runId}`,
message:
`Call the read tool on "${toolProbePath}". ` +
`Then reply with exactly: ${nonceA} ${nonceB}. No extra text.`,
deliver: false,
},
{ expectFinal: true },
);
expect(payload?.status).toBe("ok");
const text = extractPayloadText(payload?.result);
expect(text).toContain(nonceA);
expect(text).toContain(nonceB);
} finally {
client.stop();
await server.close({ reason: "mock openai test complete" });
await fs.rm(tempHome, { recursive: true, force: true });
(globalThis as unknown as { fetch: unknown }).fetch = originalFetch;
process.env.HOME = prev.home;
process.env.CLAWDBOT_CONFIG_PATH = prev.configPath;
process.env.CLAWDBOT_GATEWAY_TOKEN = prev.token;
process.env.CLAWDBOT_SKIP_CHANNELS = prev.skipChannels;
process.env.CLAWDBOT_SKIP_GMAIL_WATCHER = prev.skipGmail;
process.env.CLAWDBOT_SKIP_CRON = prev.skipCron;
process.env.CLAWDBOT_SKIP_CANVAS_HOST = prev.skipCanvas;
}
},
);
expect(payload?.status).toBe("ok");
const text = extractPayloadText(payload?.result);
expect(text).toContain(nonceA);
expect(text).toContain(nonceB);
} finally {
client.stop();
await server.close({ reason: "mock openai test complete" });
await fs.rm(tempHome, { recursive: true, force: true });
(globalThis as unknown as { fetch: unknown }).fetch = originalFetch;
process.env.HOME = prev.home;
process.env.CLAWDBOT_CONFIG_PATH = prev.configPath;
process.env.CLAWDBOT_GATEWAY_TOKEN = prev.token;
process.env.CLAWDBOT_SKIP_CHANNELS = prev.skipChannels;
process.env.CLAWDBOT_SKIP_GMAIL_WATCHER = prev.skipGmail;
process.env.CLAWDBOT_SKIP_CRON = prev.skipCron;
process.env.CLAWDBOT_SKIP_CANVAS_HOST = prev.skipCanvas;
}
});
});

View File

@@ -4,8 +4,8 @@ import { bm25RankToScore, buildFtsQuery, mergeHybridResults } from "./hybrid.js"
describe("memory hybrid helpers", () => {
it("buildFtsQuery tokenizes and AND-joins", () => {
expect(buildFtsQuery("hello world")).toBe("\"hello\" AND \"world\"");
expect(buildFtsQuery("FOO_bar baz-1")).toBe("\"FOO_bar\" AND \"baz\" AND \"1\"");
expect(buildFtsQuery("hello world")).toBe('"hello" AND "world"');
expect(buildFtsQuery("FOO_bar baz-1")).toBe('"FOO_bar" AND "baz" AND "1"');
expect(buildFtsQuery(" ")).toBeNull();
});
@@ -84,4 +84,3 @@ describe("memory hybrid helpers", () => {
expect(merged[0]?.score).toBeCloseTo(0.5 * 0.2 + 0.5 * 1.0);
});
});

View File

@@ -21,9 +21,13 @@ export type HybridKeywordResult = {
};
export function buildFtsQuery(raw: string): string | null {
const tokens = raw.match(/[A-Za-z0-9_]+/g)?.map((t) => t.trim()).filter(Boolean) ?? [];
const tokens =
raw
.match(/[A-Za-z0-9_]+/g)
?.map((t) => t.trim())
.filter(Boolean) ?? [];
if (tokens.length === 0) return null;
const quoted = tokens.map((t) => `"${t.replaceAll("\"", "")}"`);
const quoted = tokens.map((t) => `"${t.replaceAll('"', "")}"`);
return quoted.join(" AND ");
}
@@ -105,4 +109,3 @@ export function mergeHybridResults(params: {
return merged.sort((a, b) => b.score - a.score);
}

View File

@@ -236,7 +236,12 @@ describe("memory index", () => {
query: {
minScore: 0,
maxResults: 200,
hybrid: { enabled: true, vectorWeight: 0.99, textWeight: 0.01, candidateMultiplier: 10 },
hybrid: {
enabled: true,
vectorWeight: 0.99,
textWeight: 0.01,
candidateMultiplier: 10,
},
},
},
},
@@ -284,7 +289,12 @@ describe("memory index", () => {
query: {
minScore: 0,
maxResults: 200,
hybrid: { enabled: true, vectorWeight: 0.01, textWeight: 0.99, candidateMultiplier: 10 },
hybrid: {
enabled: true,
vectorWeight: 0.01,
textWeight: 0.99,
candidateMultiplier: 10,
},
},
},
},

View File

@@ -3,7 +3,8 @@ import type { DatabaseSync } from "node:sqlite";
import { truncateUtf16Safe } from "../utils.js";
import { cosineSimilarity, parseEmbedding } from "./internal.js";
const vectorToBlob = (embedding: number[]): Buffer => Buffer.from(new Float32Array(embedding).buffer);
const vectorToBlob = (embedding: number[]): Buffer =>
Buffer.from(new Float32Array(embedding).buffer);
export type SearchSource = string;
@@ -47,9 +48,9 @@ export async function searchVector(params: {
...params.sourceFilterVec.params,
params.limit,
) as Array<{
id: string;
path: string;
start_line: number;
id: string;
path: string;
start_line: number;
end_line: number;
text: string;
source: SearchSource;

View File

@@ -92,4 +92,3 @@ function ensureColumn(
if (rows.some((row) => row.name === column)) return;
db.exec(`ALTER TABLE ${table} ADD COLUMN ${column} ${definition}`);
}

View File

@@ -156,7 +156,10 @@ async function readOpenAiBatchError(params: {
errorFileId: string;
}): Promise<string | undefined> {
try {
const content = await fetchOpenAiFileContent({ openAi: params.openAi, fileId: params.errorFileId });
const content = await fetchOpenAiFileContent({
openAi: params.openAi,
fileId: params.errorFileId,
});
const lines = parseOpenAiBatchOutput(content);
const first = lines.find((line) => line.error?.message || line.response?.body?.error);
const message =
@@ -357,4 +360,3 @@ export async function runOpenAiEmbeddingBatches(params: {
await runWithConcurrency(tasks, params.concurrency);
return byCustomId;
}

View File

@@ -22,4 +22,3 @@ export async function loadSqliteVecExtension(params: {
return { ok: false, error: message };
}
}

View File

@@ -11,7 +11,10 @@ import { dispatchReplyWithBufferedBlockDispatcher } from "../../auto-reply/reply
import { createReplyDispatcherWithTyping } from "../../auto-reply/reply/reply-dispatcher.js";
import { resolveEffectiveMessagesConfig, resolveHumanDelayConfig } from "../../agents/identity.js";
import { resolveCommandAuthorizedFromAuthorizers } from "../../channels/command-gating.js";
import { resolveChannelGroupPolicy, resolveChannelGroupRequireMention } from "../../config/group-policy.js";
import {
resolveChannelGroupPolicy,
resolveChannelGroupRequireMention,
} from "../../config/group-policy.js";
import { resolveStateDir } from "../../config/paths.js";
import { shouldLogVerbose } from "../../globals.js";
import { getChildLogger } from "../../logging.js";

View File

@@ -63,9 +63,9 @@ export async function getDeterministicFreePortBlock(params?: {
for (let attempt = 0; attempt < usable; attempt += 1) {
const start = base + ((nextTestPortOffset + attempt) % usable);
// eslint-disable-next-line no-await-in-loop
const ok = (
await Promise.all(offsets.map((offset) => isPortFree(start + offset)))
).every(Boolean);
const ok = (await Promise.all(offsets.map((offset) => isPortFree(start + offset)))).every(
Boolean,
);
if (!ok) continue;
nextTestPortOffset = (nextTestPortOffset + attempt + blockSize) % usable;
return start;