fix: stabilize ci protocol + openai batch retry

This commit is contained in:
Peter Steinberger
2026-01-18 17:05:21 +00:00
parent a4ee933022
commit f16b0cf80d
5 changed files with 96 additions and 19 deletions

View File

@@ -23,8 +23,10 @@ vi.mock("../logger.js", () => {
};
});
vi.mock("../logging.js", () => {
vi.mock("../logging.js", async () => {
const actual = await vi.importActual<typeof import("../logging.js")>("../logging.js");
return {
...actual,
getLogger: () => ({ info: (...args: unknown[]) => getLoggerInfo(...args) }),
};
});

View File

@@ -36,7 +36,20 @@ function extractErrorMessage(err: unknown): string | undefined {
if (typeof err === "object" && "message" in err && typeof err.message === "string") {
return err.message;
}
return String(err);
if (typeof err === "number" || typeof err === "boolean" || typeof err === "bigint") {
return String(err);
}
if (typeof err === "symbol") {
return err.toString();
}
if (typeof err === "object") {
try {
return JSON.stringify(err);
} catch {
return undefined;
}
}
return undefined;
}
function logRemoteBinProbeFailure(nodeId: string, err: unknown) {

View File

@@ -1,3 +1,4 @@
import { retryAsync } from "../infra/retry.js";
import type { OpenAiEmbeddingClient } from "./embeddings-openai.js";
import { hashText } from "./internal.js";
@@ -92,23 +93,42 @@ async function submitOpenAiBatch(params: {
throw new Error("openai batch file upload failed: missing file id");
}
const batchRes = await fetch(`${baseUrl}/batches`, {
method: "POST",
headers: getOpenAiHeaders(params.openAi, { json: true }),
body: JSON.stringify({
input_file_id: filePayload.id,
endpoint: OPENAI_BATCH_ENDPOINT,
completion_window: OPENAI_BATCH_COMPLETION_WINDOW,
metadata: {
source: "clawdbot-memory",
agent: params.agentId,
const batchRes = await retryAsync(
async () => {
const res = await fetch(`${baseUrl}/batches`, {
method: "POST",
headers: getOpenAiHeaders(params.openAi, { json: true }),
body: JSON.stringify({
input_file_id: filePayload.id,
endpoint: OPENAI_BATCH_ENDPOINT,
completion_window: OPENAI_BATCH_COMPLETION_WINDOW,
metadata: {
source: "clawdbot-memory",
agent: params.agentId,
},
}),
});
if (!res.ok) {
const text = await res.text();
const err = new Error(`openai batch create failed: ${res.status} ${text}`) as Error & {
status?: number;
};
err.status = res.status;
throw err;
}
return res;
},
{
attempts: 3,
minDelayMs: 300,
maxDelayMs: 2000,
jitter: 0.2,
shouldRetry: (err) => {
const status = (err as { status?: number }).status;
return status === 429 || (typeof status === "number" && status >= 500);
},
}),
});
if (!batchRes.ok) {
const text = await batchRes.text();
throw new Error(`openai batch create failed: ${batchRes.status} ${text}`);
}
},
);
return (await batchRes.json()) as OpenAiBatchStatus;
}

View File

@@ -1,4 +1,4 @@
import { describe, expect, it, vi } from "vitest";
import { beforeEach, describe, expect, it, vi } from "vitest";
const loadJsonFile = vi.fn();
const saveJsonFile = vi.fn();
@@ -14,6 +14,14 @@ vi.mock("../config/paths.js", () => ({
}));
describe("github-copilot token", () => {
beforeEach(() => {
vi.resetModules();
loadJsonFile.mockReset();
saveJsonFile.mockReset();
resolveStateDir.mockReset();
resolveStateDir.mockReturnValue("/tmp/clawdbot-state");
});
it("derives baseUrl from token", async () => {
const { deriveCopilotApiBaseUrlFromToken } = await import("./github-copilot-token.js");