import fs from "node:fs/promises"; import path from "node:path"; import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; import { withTempHome as withTempHomeBase } from "../../test/helpers/temp-home.js"; import type { ClawdbotConfig } from "../config/config.js"; async function withTempHome(fn: (home: string) => Promise): Promise { return withTempHomeBase(fn, { prefix: "clawdbot-models-" }); } const MODELS_CONFIG: ClawdbotConfig = { models: { providers: { "custom-proxy": { baseUrl: "http://localhost:4000/v1", apiKey: "TEST_KEY", api: "openai-completions", models: [ { id: "llama-3.1-8b", name: "Llama 3.1 8B (Proxy)", api: "openai-completions", reasoning: false, input: ["text"], cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, contextWindow: 128000, maxTokens: 32000, }, ], }, }, }, }; describe("models config", () => { let previousHome: string | undefined; beforeEach(() => { previousHome = process.env.HOME; }); afterEach(() => { process.env.HOME = previousHome; }); it("writes models.json for configured providers", async () => { await withTempHome(async () => { vi.resetModules(); const { ensureClawdbotModelsJson } = await import("./models-config.js"); const { resolveClawdbotAgentDir } = await import("./agent-paths.js"); await ensureClawdbotModelsJson(MODELS_CONFIG); const modelPath = path.join(resolveClawdbotAgentDir(), "models.json"); const raw = await fs.readFile(modelPath, "utf8"); const parsed = JSON.parse(raw) as { providers: Record; }; expect(parsed.providers["custom-proxy"]?.baseUrl).toBe( "http://localhost:4000/v1", ); }); }); it("merges providers by default", async () => { await withTempHome(async () => { vi.resetModules(); const { ensureClawdbotModelsJson } = await import("./models-config.js"); const { resolveClawdbotAgentDir } = await import("./agent-paths.js"); const agentDir = resolveClawdbotAgentDir(); await fs.mkdir(agentDir, { recursive: true }); await fs.writeFile( path.join(agentDir, "models.json"), JSON.stringify( { providers: { existing: { baseUrl: "http://localhost:1234/v1", apiKey: "EXISTING_KEY", api: "openai-completions", models: [ { id: "existing-model", name: "Existing", api: "openai-completions", reasoning: false, input: ["text"], cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, contextWindow: 8192, maxTokens: 2048, }, ], }, }, }, null, 2, ), "utf8", ); await ensureClawdbotModelsJson(MODELS_CONFIG); const raw = await fs.readFile(path.join(agentDir, "models.json"), "utf8"); const parsed = JSON.parse(raw) as { providers: Record; }; expect(parsed.providers.existing?.baseUrl).toBe( "http://localhost:1234/v1", ); expect(parsed.providers["custom-proxy"]?.baseUrl).toBe( "http://localhost:4000/v1", ); }); }); it("normalizes gemini 3 ids to preview for google providers", async () => { await withTempHome(async () => { vi.resetModules(); const { ensureClawdbotModelsJson } = await import("./models-config.js"); const { resolveClawdbotAgentDir } = await import("./agent-paths.js"); const cfg: ClawdbotConfig = { models: { providers: { google: { baseUrl: "https://generativelanguage.googleapis.com/v1beta", apiKey: "GEMINI_KEY", api: "google-generative-ai", models: [ { id: "gemini-3-pro", name: "Gemini 3 Pro", api: "google-generative-ai", reasoning: true, input: ["text", "image"], cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, contextWindow: 1048576, maxTokens: 65536, }, { id: "gemini-3-flash", name: "Gemini 3 Flash", api: "google-generative-ai", reasoning: false, input: ["text", "image"], cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, contextWindow: 1048576, maxTokens: 65536, }, ], }, }, }, }; await ensureClawdbotModelsJson(cfg); const modelPath = path.join(resolveClawdbotAgentDir(), "models.json"); const raw = await fs.readFile(modelPath, "utf8"); const parsed = JSON.parse(raw) as { providers: Record }>; }; const ids = parsed.providers.google?.models?.map((model) => model.id); expect(ids).toEqual(["gemini-3-pro-preview", "gemini-3-flash-preview"]); }); }); });