import fs from "node:fs/promises"; import type { AgentMessage } from "@mariozechner/pi-agent-core"; import { SessionManager } from "@mariozechner/pi-coding-agent"; import { describe, expect, it, vi } from "vitest"; import type { ClawdbotConfig } from "../config/config.js"; import { ensureClawdbotModelsJson } from "./models-config.js"; import { applyGoogleTurnOrderingFix } from "./pi-embedded-runner.js"; vi.mock("@mariozechner/pi-ai", async () => { const actual = await vi.importActual("@mariozechner/pi-ai"); return { ...actual, streamSimple: (model: { api: string; provider: string; id: string }) => { if (model.id === "mock-error") { throw new Error("boom"); } const stream = new actual.AssistantMessageEventStream(); queueMicrotask(() => { stream.push({ type: "done", reason: "stop", message: { role: "assistant", content: [{ type: "text", text: "ok" }], stopReason: "stop", api: model.api, provider: model.provider, model: model.id, usage: { input: 1, output: 1, cacheRead: 0, cacheWrite: 0, totalTokens: 2, cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0, }, }, timestamp: Date.now(), }, }); }); return stream; }, }; }); const _makeOpenAiConfig = (modelIds: string[]) => ({ models: { providers: { openai: { api: "openai-responses", apiKey: "sk-test", baseUrl: "https://example.com", models: modelIds.map((id) => ({ id, name: `Mock ${id}`, reasoning: false, input: ["text"], cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, contextWindow: 16_000, maxTokens: 2048, })), }, }, }, }) satisfies ClawdbotConfig; const _ensureModels = (cfg: ClawdbotConfig, agentDir: string) => ensureClawdbotModelsJson(cfg, agentDir); const _textFromContent = (content: unknown) => { if (typeof content === "string") return content; if (Array.isArray(content) && content[0]?.type === "text") { return (content[0] as { text?: string }).text; } return undefined; }; const _readSessionMessages = async (sessionFile: string) => { const raw = await fs.readFile(sessionFile, "utf-8"); return raw .split(/\r?\n/) .filter(Boolean) .map( (line) => JSON.parse(line) as { type?: string; message?: { role?: string; content?: unknown }; }, ) .filter((entry) => entry.type === "message") .map((entry) => entry.message as { role?: string; content?: unknown }); }; describe("applyGoogleTurnOrderingFix", () => { const makeAssistantFirst = () => [ { role: "assistant", content: [{ type: "toolCall", id: "call_1", name: "exec", arguments: {} }], }, ] satisfies AgentMessage[]; it("prepends a bootstrap once and records a marker for Google models", () => { const sessionManager = SessionManager.inMemory(); const warn = vi.fn(); const input = makeAssistantFirst(); const first = applyGoogleTurnOrderingFix({ messages: input, modelApi: "google-generative-ai", sessionManager, sessionId: "session:1", warn, }); expect(first.messages[0]?.role).toBe("user"); expect(first.messages[1]?.role).toBe("assistant"); expect(warn).toHaveBeenCalledTimes(1); expect( sessionManager .getEntries() .some( (entry) => entry.type === "custom" && entry.customType === "google-turn-ordering-bootstrap", ), ).toBe(true); applyGoogleTurnOrderingFix({ messages: input, modelApi: "google-generative-ai", sessionManager, sessionId: "session:1", warn, }); expect(warn).toHaveBeenCalledTimes(1); }); it("skips non-Google models", () => { const sessionManager = SessionManager.inMemory(); const warn = vi.fn(); const input = makeAssistantFirst(); const result = applyGoogleTurnOrderingFix({ messages: input, modelApi: "openai", sessionManager, sessionId: "session:2", warn, }); expect(result.messages).toBe(input); expect(warn).not.toHaveBeenCalled(); }); });