import fs from "node:fs"; import os from "node:os"; import path from "node:path"; import { afterEach, describe, expect, it, vi } from "vitest"; import { buildStatusMessage } from "./status.js"; afterEach(() => { vi.restoreAllMocks(); }); describe("buildStatusMessage", () => { it("summarizes agent readiness and context usage", () => { const text = buildStatusMessage({ agent: { model: "anthropic/pi:opus", contextTokens: 32_000, }, sessionEntry: { sessionId: "abc", updatedAt: 0, totalTokens: 16_000, contextTokens: 32_000, thinkingLevel: "low", verboseLevel: "on", compactionCount: 2, }, sessionKey: "agent:main:main", sessionScope: "per-sender", resolvedThink: "medium", resolvedVerbose: "off", queue: { mode: "collect", depth: 0 }, now: 10 * 60_000, // 10 minutes later }); expect(text).toContain("๐Ÿฆž ClawdBot"); expect(text).toContain("๐Ÿง  Model:"); expect(text).toContain("Runtime: direct"); expect(text).toContain("Context: 16k/32k (50%)"); expect(text).toContain("๐Ÿงน Compactions: 2"); expect(text).toContain("Session: agent:main:main"); expect(text).toContain("updated 10m ago"); expect(text).toContain("Think: medium"); expect(text).toContain("Verbose: off"); expect(text).toContain("Elevated: on"); expect(text).toContain("Queue: collect"); }); it("prefers model overrides over last-run model", () => { const text = buildStatusMessage({ agent: { model: "anthropic/claude-opus-4-5", contextTokens: 32_000, }, sessionEntry: { sessionId: "override-1", updatedAt: 0, providerOverride: "openai", modelOverride: "gpt-4.1-mini", modelProvider: "anthropic", model: "claude-haiku-4-5", contextTokens: 32_000, }, sessionKey: "agent:main:main", sessionScope: "per-sender", queue: { mode: "collect", depth: 0 }, }); expect(text).toContain("๐Ÿง  Model: openai/gpt-4.1-mini"); }); it("handles missing agent config gracefully", () => { const text = buildStatusMessage({ agent: {}, sessionScope: "per-sender", webLinked: false, }); expect(text).toContain("๐Ÿง  Model:"); expect(text).toContain("Context:"); expect(text).toContain("Queue:"); }); it("includes group activation for group sessions", () => { const text = buildStatusMessage({ agent: {}, sessionEntry: { sessionId: "g1", updatedAt: 0, groupActivation: "always", chatType: "group", }, sessionKey: "agent:main:whatsapp:group:123@g.us", sessionScope: "per-sender", queue: { mode: "collect", depth: 0 }, }); expect(text).toContain("Activation: always"); }); it("shows queue details when overridden", () => { const text = buildStatusMessage({ agent: {}, sessionEntry: { sessionId: "q1", updatedAt: 0 }, sessionKey: "agent:main:main", sessionScope: "per-sender", queue: { mode: "collect", depth: 3, debounceMs: 2000, cap: 5, dropPolicy: "old", showDetails: true, }, }); expect(text).toContain( "Queue: collect (depth 3 ยท debounce 2s ยท cap 5 ยท drop old)", ); }); it("inserts usage summary beneath context line", () => { const text = buildStatusMessage({ agent: { model: "anthropic/claude-opus-4-5", contextTokens: 32_000 }, sessionEntry: { sessionId: "u1", updatedAt: 0, totalTokens: 1000 }, sessionKey: "agent:main:main", sessionScope: "per-sender", queue: { mode: "collect", depth: 0 }, usageLine: "๐Ÿ“Š Usage: Claude 80% left (5h)", }); const lines = text.split("\n"); const contextIndex = lines.findIndex((line) => line.startsWith("๐Ÿ“š ")); expect(contextIndex).toBeGreaterThan(-1); expect(lines[contextIndex + 1]).toBe("๐Ÿ“Š Usage: Claude 80% left (5h)"); }); it("prefers cached prompt tokens from the session log", async () => { const dir = fs.mkdtempSync(path.join(os.tmpdir(), "clawdbot-status-")); const previousHome = process.env.HOME; process.env.HOME = dir; try { vi.resetModules(); const { buildStatusMessage: buildStatusMessageDynamic } = await import( "./status.js" ); const sessionId = "sess-1"; const logPath = path.join( dir, ".clawdbot", "agents", "main", "sessions", `${sessionId}.jsonl`, ); fs.mkdirSync(path.dirname(logPath), { recursive: true }); fs.writeFileSync( logPath, [ JSON.stringify({ type: "message", message: { role: "assistant", model: "claude-opus-4-5", usage: { input: 1, output: 2, cacheRead: 1000, cacheWrite: 0, totalTokens: 1003, }, }, }), ].join("\n"), "utf-8", ); const text = buildStatusMessageDynamic({ agent: { model: "anthropic/claude-opus-4-5", contextTokens: 32_000, }, sessionEntry: { sessionId, updatedAt: 0, totalTokens: 3, // would be wrong if cached prompt tokens exist contextTokens: 32_000, }, sessionKey: "agent:main:main", sessionScope: "per-sender", queue: { mode: "collect", depth: 0 }, includeTranscriptUsage: true, }); expect(text).toContain("Context: 1.0k/32k"); } finally { process.env.HOME = previousHome; fs.rmSync(dir, { recursive: true, force: true }); } }); });