fix: restore heartbeat defaults and model listing
This commit is contained in:
@@ -15,6 +15,9 @@
|
||||
- Models: normalize `${ENV_VAR}` apiKey config values and auto-fill missing provider `apiKey` from env/auth when custom provider models are configured (fixes MiniMax “Unknown model” on fresh installs).
|
||||
- Telegram: show typing indicator in General forum topics. (#779) — thanks @azade-c.
|
||||
- Models: keep explicit GitHub Copilot provider config and honor agent-dir auth profiles for auto-injection. (#705) — thanks @TAGOOZ.
|
||||
- Auto-reply: restore 300-char heartbeat ack limit and keep >300 char replies instead of dropping them; adjust long heartbeat test content accordingly.
|
||||
- Gateway: `agents.list` now honors explicit `agents.list` config without pulling stray agents from disk; GitHub Copilot CLI auth path uses the updated provider build.
|
||||
- Google: apply patched pi-ai `google-gemini-cli` function call handling (strips ids) after upgrading to pi-ai 0.43.0.
|
||||
|
||||
## 2026.1.11
|
||||
|
||||
|
||||
@@ -11,20 +11,142 @@ import {
|
||||
ensureAuthProfileStore,
|
||||
listProfilesForProvider,
|
||||
} from "./auth-profiles.js";
|
||||
import type { ProviderConfig } from "./models-config.providers.js";
|
||||
import {
|
||||
normalizeProviders,
|
||||
resolveImplicitProviders,
|
||||
} from "./models-config.providers.js";
|
||||
import { resolveEnvApiKey } from "./model-auth.js";
|
||||
|
||||
type ModelsConfig = NonNullable<ClawdbotConfig["models"]>;
|
||||
type ProviderConfig = NonNullable<ModelsConfig["providers"]>[string];
|
||||
|
||||
const DEFAULT_MODE: NonNullable<ModelsConfig["mode"]> = "merge";
|
||||
|
||||
const MINIMAX_API_BASE_URL = "https://api.minimax.io/anthropic";
|
||||
const MINIMAX_DEFAULT_MODEL_ID = "MiniMax-M2.1";
|
||||
const MINIMAX_DEFAULT_CONTEXT_WINDOW = 200000;
|
||||
const MINIMAX_DEFAULT_MAX_TOKENS = 8192;
|
||||
// Pricing: MiniMax doesn't publish public rates. Override in models.json for accurate costs.
|
||||
const MINIMAX_API_COST = {
|
||||
input: 15,
|
||||
output: 60,
|
||||
cacheRead: 2,
|
||||
cacheWrite: 10,
|
||||
};
|
||||
|
||||
const MOONSHOT_BASE_URL = "https://api.moonshot.ai/v1";
|
||||
const MOONSHOT_DEFAULT_MODEL_ID = "kimi-k2-0905-preview";
|
||||
const MOONSHOT_DEFAULT_CONTEXT_WINDOW = 256000;
|
||||
const MOONSHOT_DEFAULT_MAX_TOKENS = 8192;
|
||||
const MOONSHOT_DEFAULT_COST = {
|
||||
input: 0,
|
||||
output: 0,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
};
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return Boolean(value && typeof value === "object" && !Array.isArray(value));
|
||||
}
|
||||
|
||||
function normalizeApiKeyConfig(value: string): string {
|
||||
const trimmed = value.trim();
|
||||
const match = /^\$\{([A-Z0-9_]+)\}$/.exec(trimmed);
|
||||
if (match?.[1]) return match[1];
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
function resolveEnvApiKeyVarName(provider: string): string | undefined {
|
||||
const resolved = resolveEnvApiKey(provider);
|
||||
if (!resolved) return undefined;
|
||||
const match = /^(?:env: |shell env: )([A-Z0-9_]+)$/.exec(resolved.source);
|
||||
return match ? match[1] : undefined;
|
||||
}
|
||||
|
||||
function resolveApiKeyFromProfiles(params: {
|
||||
provider: string;
|
||||
store: ReturnType<typeof ensureAuthProfileStore>;
|
||||
}): string | undefined {
|
||||
const ids = listProfilesForProvider(params.store, params.provider);
|
||||
for (const id of ids) {
|
||||
const cred = params.store.profiles[id];
|
||||
if (!cred) continue;
|
||||
if (cred.type === "api_key") return cred.key;
|
||||
if (cred.type === "token") return cred.token;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function normalizeGoogleModelId(id: string): string {
|
||||
if (id === "gemini-3-pro") return "gemini-3-pro-preview";
|
||||
if (id === "gemini-3-flash") return "gemini-3-flash-preview";
|
||||
return id;
|
||||
}
|
||||
|
||||
function normalizeGoogleProvider(provider: ProviderConfig): ProviderConfig {
|
||||
let mutated = false;
|
||||
const models = provider.models.map((model) => {
|
||||
const nextId = normalizeGoogleModelId(model.id);
|
||||
if (nextId === model.id) return model;
|
||||
mutated = true;
|
||||
return { ...model, id: nextId };
|
||||
});
|
||||
return mutated ? { ...provider, models } : provider;
|
||||
}
|
||||
|
||||
function normalizeProviders(params: {
|
||||
providers: ModelsConfig["providers"];
|
||||
agentDir: string;
|
||||
}): ModelsConfig["providers"] {
|
||||
const { providers } = params;
|
||||
if (!providers) return providers;
|
||||
const authStore = ensureAuthProfileStore(params.agentDir, {
|
||||
allowKeychainPrompt: false,
|
||||
});
|
||||
let mutated = false;
|
||||
const next: Record<string, ProviderConfig> = {};
|
||||
for (const [key, provider] of Object.entries(providers)) {
|
||||
const normalizedKey = key.trim();
|
||||
let normalizedProvider = provider;
|
||||
|
||||
// Fix common misconfig: apiKey set to "${ENV_VAR}" instead of "ENV_VAR".
|
||||
if (
|
||||
normalizedProvider.apiKey &&
|
||||
normalizeApiKeyConfig(normalizedProvider.apiKey) !==
|
||||
normalizedProvider.apiKey
|
||||
) {
|
||||
mutated = true;
|
||||
normalizedProvider = {
|
||||
...normalizedProvider,
|
||||
apiKey: normalizeApiKeyConfig(normalizedProvider.apiKey),
|
||||
};
|
||||
}
|
||||
|
||||
// If a provider defines models, pi's ModelRegistry requires apiKey to be set.
|
||||
// Fill it from the environment or auth profiles when possible.
|
||||
const hasModels =
|
||||
Array.isArray(normalizedProvider.models) &&
|
||||
normalizedProvider.models.length > 0;
|
||||
if (hasModels && !normalizedProvider.apiKey?.trim()) {
|
||||
const fromEnv = resolveEnvApiKeyVarName(normalizedKey);
|
||||
const fromProfiles = resolveApiKeyFromProfiles({
|
||||
provider: normalizedKey,
|
||||
store: authStore,
|
||||
});
|
||||
const apiKey = fromEnv ?? fromProfiles;
|
||||
if (apiKey?.trim()) {
|
||||
mutated = true;
|
||||
normalizedProvider = { ...normalizedProvider, apiKey };
|
||||
}
|
||||
}
|
||||
|
||||
if (normalizedKey === "google") {
|
||||
const googleNormalized = normalizeGoogleProvider(normalizedProvider);
|
||||
if (googleNormalized !== normalizedProvider) mutated = true;
|
||||
normalizedProvider = googleNormalized;
|
||||
}
|
||||
|
||||
next[key] = normalizedProvider;
|
||||
}
|
||||
return mutated ? next : providers;
|
||||
}
|
||||
|
||||
async function readJson(pathname: string): Promise<unknown> {
|
||||
try {
|
||||
const raw = await fs.readFile(pathname, "utf8");
|
||||
@@ -34,14 +156,75 @@ async function readJson(pathname: string): Promise<unknown> {
|
||||
}
|
||||
}
|
||||
|
||||
function buildMinimaxApiProvider(): ProviderConfig {
|
||||
return {
|
||||
baseUrl: MINIMAX_API_BASE_URL,
|
||||
api: "anthropic-messages",
|
||||
models: [
|
||||
{
|
||||
id: MINIMAX_DEFAULT_MODEL_ID,
|
||||
name: "MiniMax M2.1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: MINIMAX_API_COST,
|
||||
contextWindow: MINIMAX_DEFAULT_CONTEXT_WINDOW,
|
||||
maxTokens: MINIMAX_DEFAULT_MAX_TOKENS,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
function buildMoonshotProvider(): ProviderConfig {
|
||||
return {
|
||||
baseUrl: MOONSHOT_BASE_URL,
|
||||
api: "openai-completions",
|
||||
models: [
|
||||
{
|
||||
id: MOONSHOT_DEFAULT_MODEL_ID,
|
||||
name: "Kimi K2 0905 Preview",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: MOONSHOT_DEFAULT_COST,
|
||||
contextWindow: MOONSHOT_DEFAULT_CONTEXT_WINDOW,
|
||||
maxTokens: MOONSHOT_DEFAULT_MAX_TOKENS,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
function resolveImplicitProviders(params: {
|
||||
cfg: ClawdbotConfig;
|
||||
agentDir: string;
|
||||
}): ModelsConfig["providers"] {
|
||||
const providers: Record<string, ProviderConfig> = {};
|
||||
|
||||
const authStore = ensureAuthProfileStore(params.agentDir, {
|
||||
allowKeychainPrompt: false,
|
||||
});
|
||||
|
||||
const minimaxKey =
|
||||
resolveEnvApiKeyVarName("minimax") ??
|
||||
resolveApiKeyFromProfiles({ provider: "minimax", store: authStore });
|
||||
if (minimaxKey) {
|
||||
providers.minimax = { ...buildMinimaxApiProvider(), apiKey: minimaxKey };
|
||||
}
|
||||
|
||||
const moonshotKey =
|
||||
resolveEnvApiKeyVarName("moonshot") ??
|
||||
resolveApiKeyFromProfiles({ provider: "moonshot", store: authStore });
|
||||
if (moonshotKey) {
|
||||
providers.moonshot = { ...buildMoonshotProvider(), apiKey: moonshotKey };
|
||||
}
|
||||
|
||||
return providers;
|
||||
}
|
||||
|
||||
async function maybeBuildCopilotProvider(params: {
|
||||
agentDir: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
}): Promise<ProviderConfig | null> {
|
||||
const env = params.env ?? process.env;
|
||||
const authStore = ensureAuthProfileStore(params.agentDir, {
|
||||
allowKeychainPrompt: false,
|
||||
});
|
||||
const authStore = ensureAuthProfileStore(params.agentDir);
|
||||
const hasProfile =
|
||||
listProfilesForProvider(authStore, "github-copilot").length > 0;
|
||||
const envToken = env.COPILOT_GITHUB_TOKEN ?? env.GH_TOKEN ?? env.GITHUB_TOKEN;
|
||||
@@ -93,6 +276,7 @@ async function maybeBuildCopilotProvider(params: {
|
||||
models: [],
|
||||
} satisfies ProviderConfig;
|
||||
}
|
||||
|
||||
export async function ensureClawdbotModelsJson(
|
||||
config?: ClawdbotConfig,
|
||||
agentDirOverride?: string,
|
||||
@@ -101,16 +285,18 @@ export async function ensureClawdbotModelsJson(
|
||||
const agentDir = agentDirOverride?.trim()
|
||||
? agentDirOverride.trim()
|
||||
: resolveClawdbotAgentDir();
|
||||
const configuredProviders = cfg.models?.providers ?? {};
|
||||
const implicitProviders = resolveImplicitProviders({ agentDir });
|
||||
|
||||
const explicitProviders = cfg.models?.providers ?? {};
|
||||
const implicitProviders = resolveImplicitProviders({ cfg, agentDir });
|
||||
const providers: Record<string, ProviderConfig> = {
|
||||
...implicitProviders,
|
||||
...configuredProviders,
|
||||
...explicitProviders,
|
||||
};
|
||||
const implicitCopilot = await maybeBuildCopilotProvider({ agentDir });
|
||||
if (implicitCopilot && !providers["github-copilot"]) {
|
||||
providers["github-copilot"] = implicitCopilot;
|
||||
}
|
||||
|
||||
if (Object.keys(providers).length === 0) {
|
||||
return { agentDir, wrote: false };
|
||||
}
|
||||
|
||||
@@ -1,40 +1,118 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
|
||||
import type { AgentMessage, AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import { SessionManager } from "@mariozechner/pi-coding-agent";
|
||||
import { Type } from "@sinclair/typebox";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
import type { ClawdbotConfig } from "../config/config.js";
|
||||
import { resolveSessionAgentIds } from "./agent-scope.js";
|
||||
import { ensureClawdbotModelsJson } from "./models-config.js";
|
||||
import {
|
||||
applyGoogleTurnOrderingFix,
|
||||
buildEmbeddedSandboxInfo,
|
||||
createSystemPromptOverride,
|
||||
getDmHistoryLimitFromSessionKey,
|
||||
limitHistoryTurns,
|
||||
runEmbeddedPiAgent,
|
||||
splitSdkTools,
|
||||
} from "./pi-embedded-runner.js";
|
||||
import type { SandboxContext } from "./sandbox.js";
|
||||
|
||||
vi.mock("./model-auth.js", () => ({
|
||||
getApiKeyForModel: vi.fn(),
|
||||
ensureAuthProfileStore: vi.fn(() => ({ profiles: {} })),
|
||||
resolveAuthProfileOrder: vi.fn(() => []),
|
||||
resolveEnvApiKey: vi.fn(() => null),
|
||||
}));
|
||||
|
||||
vi.mock("../providers/github-copilot-token.js", async () => {
|
||||
const actual = await vi.importActual<
|
||||
typeof import("../providers/github-copilot-token.js")
|
||||
>("../providers/github-copilot-token.js");
|
||||
vi.mock("@mariozechner/pi-ai", async () => {
|
||||
const actual = await vi.importActual<typeof import("@mariozechner/pi-ai")>(
|
||||
"@mariozechner/pi-ai",
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
resolveCopilotApiToken: vi.fn(),
|
||||
streamSimple: (model: { api: string; provider: string; id: string }) => {
|
||||
if (model.id === "mock-error") {
|
||||
throw new Error("boom");
|
||||
}
|
||||
const stream = new actual.AssistantMessageEventStream();
|
||||
queueMicrotask(() => {
|
||||
stream.push({
|
||||
type: "done",
|
||||
reason: "stop",
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "ok" }],
|
||||
stopReason: "stop",
|
||||
api: model.api,
|
||||
provider: model.provider,
|
||||
model: model.id,
|
||||
usage: {
|
||||
input: 1,
|
||||
output: 1,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
totalTokens: 2,
|
||||
cost: {
|
||||
input: 0,
|
||||
output: 0,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
total: 0,
|
||||
},
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
});
|
||||
});
|
||||
return stream;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const makeOpenAiConfig = (modelIds: string[]) =>
|
||||
({
|
||||
models: {
|
||||
providers: {
|
||||
openai: {
|
||||
api: "openai-responses",
|
||||
apiKey: "sk-test",
|
||||
baseUrl: "https://example.com",
|
||||
models: modelIds.map((id) => ({
|
||||
id,
|
||||
name: `Mock ${id}`,
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||
contextWindow: 16_000,
|
||||
maxTokens: 2048,
|
||||
})),
|
||||
},
|
||||
},
|
||||
},
|
||||
}) satisfies ClawdbotConfig;
|
||||
|
||||
const ensureModels = (cfg: ClawdbotConfig, agentDir: string) =>
|
||||
ensureClawdbotModelsJson(cfg, agentDir);
|
||||
|
||||
const textFromContent = (content: unknown) => {
|
||||
if (typeof content === "string") return content;
|
||||
if (Array.isArray(content) && content[0]?.type === "text") {
|
||||
return (content[0] as { text?: string }).text;
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const readSessionMessages = async (sessionFile: string) => {
|
||||
const raw = await fs.readFile(sessionFile, "utf-8");
|
||||
return raw
|
||||
.split(/\r?\n/)
|
||||
.filter(Boolean)
|
||||
.map(
|
||||
(line) =>
|
||||
JSON.parse(line) as {
|
||||
type?: string;
|
||||
message?: { role?: string; content?: unknown };
|
||||
},
|
||||
)
|
||||
.filter((entry) => entry.type === "message")
|
||||
.map((entry) => entry.message as { role?: string; content?: unknown });
|
||||
};
|
||||
|
||||
describe("buildEmbeddedSandboxInfo", () => {
|
||||
it("returns undefined when sandbox is missing", () => {
|
||||
expect(buildEmbeddedSandboxInfo()).toBeUndefined();
|
||||
@@ -61,7 +139,7 @@ describe("buildEmbeddedSandboxInfo", () => {
|
||||
env: { LANG: "C.UTF-8" },
|
||||
},
|
||||
tools: {
|
||||
allow: ["bash"],
|
||||
allow: ["exec"],
|
||||
deny: ["browser"],
|
||||
},
|
||||
browserAllowHostControl: true,
|
||||
@@ -104,7 +182,7 @@ describe("buildEmbeddedSandboxInfo", () => {
|
||||
env: { LANG: "C.UTF-8" },
|
||||
},
|
||||
tools: {
|
||||
allow: ["bash"],
|
||||
allow: ["exec"],
|
||||
deny: ["browser"],
|
||||
},
|
||||
browserAllowHostControl: false,
|
||||
@@ -188,7 +266,7 @@ function createStubTool(name: string): AgentTool {
|
||||
describe("splitSdkTools", () => {
|
||||
const tools = [
|
||||
createStubTool("read"),
|
||||
createStubTool("bash"),
|
||||
createStubTool("exec"),
|
||||
createStubTool("edit"),
|
||||
createStubTool("write"),
|
||||
createStubTool("browser"),
|
||||
@@ -202,7 +280,7 @@ describe("splitSdkTools", () => {
|
||||
expect(builtInTools).toEqual([]);
|
||||
expect(customTools.map((tool) => tool.name)).toEqual([
|
||||
"read",
|
||||
"bash",
|
||||
"exec",
|
||||
"edit",
|
||||
"write",
|
||||
"browser",
|
||||
@@ -217,7 +295,7 @@ describe("splitSdkTools", () => {
|
||||
expect(builtInTools).toEqual([]);
|
||||
expect(customTools.map((tool) => tool.name)).toEqual([
|
||||
"read",
|
||||
"bash",
|
||||
"exec",
|
||||
"edit",
|
||||
"write",
|
||||
"browser",
|
||||
@@ -243,7 +321,7 @@ describe("applyGoogleTurnOrderingFix", () => {
|
||||
{
|
||||
role: "assistant",
|
||||
content: [
|
||||
{ type: "toolCall", id: "call_1", name: "bash", arguments: {} },
|
||||
{ type: "toolCall", id: "call_1", name: "exec", arguments: {} },
|
||||
],
|
||||
},
|
||||
] satisfies AgentMessage[];
|
||||
@@ -298,50 +376,281 @@ describe("applyGoogleTurnOrderingFix", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("runEmbeddedPiAgent", () => {
|
||||
it("exchanges github token for copilot token", async () => {
|
||||
const { getApiKeyForModel } = await import("./model-auth.js");
|
||||
const { resolveCopilotApiToken } = await import(
|
||||
"../providers/github-copilot-token.js"
|
||||
);
|
||||
describe("limitHistoryTurns", () => {
|
||||
const makeMessages = (roles: ("user" | "assistant")[]): AgentMessage[] =>
|
||||
roles.map((role, i) => ({
|
||||
role,
|
||||
content: [{ type: "text", text: `message ${i}` }],
|
||||
}));
|
||||
|
||||
vi.mocked(getApiKeyForModel).mockResolvedValue({
|
||||
apiKey: "gh-token",
|
||||
source: "test",
|
||||
});
|
||||
vi.mocked(resolveCopilotApiToken).mockResolvedValue({
|
||||
token: "copilot-token",
|
||||
expiresAt: Date.now() + 60_000,
|
||||
source: "test",
|
||||
});
|
||||
|
||||
const agentDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-agent-copilot-"),
|
||||
);
|
||||
const workspaceDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-workspace-copilot-"),
|
||||
);
|
||||
const sessionFile = path.join(workspaceDir, "session.jsonl");
|
||||
|
||||
await expect(
|
||||
runEmbeddedPiAgent({
|
||||
sessionId: "session:test",
|
||||
sessionKey: "agent:dev:test",
|
||||
sessionFile,
|
||||
workspaceDir,
|
||||
prompt: "hi",
|
||||
provider: "github-copilot",
|
||||
model: "gpt-4o",
|
||||
timeoutMs: 1,
|
||||
agentDir,
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
|
||||
expect(resolveCopilotApiToken).toHaveBeenCalledWith({
|
||||
githubToken: "gh-token",
|
||||
});
|
||||
it("returns all messages when limit is undefined", () => {
|
||||
const messages = makeMessages(["user", "assistant", "user", "assistant"]);
|
||||
expect(limitHistoryTurns(messages, undefined)).toBe(messages);
|
||||
});
|
||||
|
||||
it("returns all messages when limit is 0", () => {
|
||||
const messages = makeMessages(["user", "assistant", "user", "assistant"]);
|
||||
expect(limitHistoryTurns(messages, 0)).toBe(messages);
|
||||
});
|
||||
|
||||
it("returns all messages when limit is negative", () => {
|
||||
const messages = makeMessages(["user", "assistant", "user", "assistant"]);
|
||||
expect(limitHistoryTurns(messages, -1)).toBe(messages);
|
||||
});
|
||||
|
||||
it("returns empty array when messages is empty", () => {
|
||||
expect(limitHistoryTurns([], 5)).toEqual([]);
|
||||
});
|
||||
|
||||
it("keeps all messages when fewer user turns than limit", () => {
|
||||
const messages = makeMessages(["user", "assistant", "user", "assistant"]);
|
||||
expect(limitHistoryTurns(messages, 10)).toBe(messages);
|
||||
});
|
||||
|
||||
it("limits to last N user turns", () => {
|
||||
const messages = makeMessages([
|
||||
"user",
|
||||
"assistant",
|
||||
"user",
|
||||
"assistant",
|
||||
"user",
|
||||
"assistant",
|
||||
]);
|
||||
const limited = limitHistoryTurns(messages, 2);
|
||||
expect(limited.length).toBe(4);
|
||||
expect(limited[0].content).toEqual([{ type: "text", text: "message 2" }]);
|
||||
});
|
||||
|
||||
it("handles single user turn limit", () => {
|
||||
const messages = makeMessages([
|
||||
"user",
|
||||
"assistant",
|
||||
"user",
|
||||
"assistant",
|
||||
"user",
|
||||
"assistant",
|
||||
]);
|
||||
const limited = limitHistoryTurns(messages, 1);
|
||||
expect(limited.length).toBe(2);
|
||||
expect(limited[0].content).toEqual([{ type: "text", text: "message 4" }]);
|
||||
expect(limited[1].content).toEqual([{ type: "text", text: "message 5" }]);
|
||||
});
|
||||
|
||||
it("handles messages with multiple assistant responses per user turn", () => {
|
||||
const messages = makeMessages([
|
||||
"user",
|
||||
"assistant",
|
||||
"assistant",
|
||||
"user",
|
||||
"assistant",
|
||||
]);
|
||||
const limited = limitHistoryTurns(messages, 1);
|
||||
expect(limited.length).toBe(2);
|
||||
expect(limited[0].role).toBe("user");
|
||||
expect(limited[1].role).toBe("assistant");
|
||||
});
|
||||
|
||||
it("preserves message content integrity", () => {
|
||||
const messages: AgentMessage[] = [
|
||||
{ role: "user", content: [{ type: "text", text: "first" }] },
|
||||
{
|
||||
role: "assistant",
|
||||
content: [{ type: "toolCall", id: "1", name: "exec", arguments: {} }],
|
||||
},
|
||||
{ role: "user", content: [{ type: "text", text: "second" }] },
|
||||
{ role: "assistant", content: [{ type: "text", text: "response" }] },
|
||||
];
|
||||
const limited = limitHistoryTurns(messages, 1);
|
||||
expect(limited[0].content).toEqual([{ type: "text", text: "second" }]);
|
||||
expect(limited[1].content).toEqual([{ type: "text", text: "response" }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getDmHistoryLimitFromSessionKey", () => {
|
||||
it("returns undefined when sessionKey is undefined", () => {
|
||||
expect(getDmHistoryLimitFromSessionKey(undefined, {})).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns undefined when config is undefined", () => {
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey("telegram:dm:123", undefined),
|
||||
).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns dmHistoryLimit for telegram provider", () => {
|
||||
const config = { telegram: { dmHistoryLimit: 15 } } as ClawdbotConfig;
|
||||
expect(getDmHistoryLimitFromSessionKey("telegram:dm:123", config)).toBe(15);
|
||||
});
|
||||
|
||||
it("returns dmHistoryLimit for whatsapp provider", () => {
|
||||
const config = { whatsapp: { dmHistoryLimit: 20 } } as ClawdbotConfig;
|
||||
expect(getDmHistoryLimitFromSessionKey("whatsapp:dm:123", config)).toBe(20);
|
||||
});
|
||||
|
||||
it("returns dmHistoryLimit for agent-prefixed session keys", () => {
|
||||
const config = { telegram: { dmHistoryLimit: 10 } } as ClawdbotConfig;
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey("agent:main:telegram:dm:123", config),
|
||||
).toBe(10);
|
||||
});
|
||||
|
||||
it("returns undefined for non-dm session kinds", () => {
|
||||
const config = {
|
||||
slack: { dmHistoryLimit: 10 },
|
||||
telegram: { dmHistoryLimit: 15 },
|
||||
} as ClawdbotConfig;
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey("agent:beta:slack:channel:C1", config),
|
||||
).toBeUndefined();
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey("telegram:slash:123", config),
|
||||
).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns undefined for unknown provider", () => {
|
||||
const config = { telegram: { dmHistoryLimit: 15 } } as ClawdbotConfig;
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey("unknown:dm:123", config),
|
||||
).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns undefined when provider config has no dmHistoryLimit", () => {
|
||||
const config = { telegram: {} } as ClawdbotConfig;
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey("telegram:dm:123", config),
|
||||
).toBeUndefined();
|
||||
});
|
||||
|
||||
it("handles all supported providers", () => {
|
||||
const providers = [
|
||||
"telegram",
|
||||
"whatsapp",
|
||||
"discord",
|
||||
"slack",
|
||||
"signal",
|
||||
"imessage",
|
||||
"msteams",
|
||||
] as const;
|
||||
|
||||
for (const provider of providers) {
|
||||
const config = { [provider]: { dmHistoryLimit: 5 } } as ClawdbotConfig;
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey(`${provider}:dm:123`, config),
|
||||
).toBe(5);
|
||||
}
|
||||
});
|
||||
|
||||
it("handles per-DM overrides for all supported providers", () => {
|
||||
const providers = [
|
||||
"telegram",
|
||||
"whatsapp",
|
||||
"discord",
|
||||
"slack",
|
||||
"signal",
|
||||
"imessage",
|
||||
"msteams",
|
||||
] as const;
|
||||
|
||||
for (const provider of providers) {
|
||||
// Test per-DM override takes precedence
|
||||
const configWithOverride = {
|
||||
[provider]: {
|
||||
dmHistoryLimit: 20,
|
||||
dms: { user123: { historyLimit: 7 } },
|
||||
},
|
||||
} as ClawdbotConfig;
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey(
|
||||
`${provider}:dm:user123`,
|
||||
configWithOverride,
|
||||
),
|
||||
).toBe(7);
|
||||
|
||||
// Test fallback to provider default when user not in dms
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey(
|
||||
`${provider}:dm:otheruser`,
|
||||
configWithOverride,
|
||||
),
|
||||
).toBe(20);
|
||||
|
||||
// Test with agent-prefixed key
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey(
|
||||
`agent:main:${provider}:dm:user123`,
|
||||
configWithOverride,
|
||||
),
|
||||
).toBe(7);
|
||||
}
|
||||
});
|
||||
|
||||
it("returns per-DM override when set", () => {
|
||||
const config = {
|
||||
telegram: {
|
||||
dmHistoryLimit: 15,
|
||||
dms: { "123": { historyLimit: 5 } },
|
||||
},
|
||||
} as ClawdbotConfig;
|
||||
expect(getDmHistoryLimitFromSessionKey("telegram:dm:123", config)).toBe(5);
|
||||
});
|
||||
|
||||
it("falls back to provider default when per-DM not set", () => {
|
||||
const config = {
|
||||
telegram: {
|
||||
dmHistoryLimit: 15,
|
||||
dms: { "456": { historyLimit: 5 } },
|
||||
},
|
||||
} as ClawdbotConfig;
|
||||
expect(getDmHistoryLimitFromSessionKey("telegram:dm:123", config)).toBe(15);
|
||||
});
|
||||
|
||||
it("returns per-DM override for agent-prefixed keys", () => {
|
||||
const config = {
|
||||
telegram: {
|
||||
dmHistoryLimit: 20,
|
||||
dms: { "789": { historyLimit: 3 } },
|
||||
},
|
||||
} as ClawdbotConfig;
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey("agent:main:telegram:dm:789", config),
|
||||
).toBe(3);
|
||||
});
|
||||
|
||||
it("handles userId with colons (e.g., email)", () => {
|
||||
const config = {
|
||||
msteams: {
|
||||
dmHistoryLimit: 10,
|
||||
dms: { "user@example.com": { historyLimit: 7 } },
|
||||
},
|
||||
} as ClawdbotConfig;
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey("msteams:dm:user@example.com", config),
|
||||
).toBe(7);
|
||||
});
|
||||
|
||||
it("returns undefined when per-DM historyLimit is not set", () => {
|
||||
const config = {
|
||||
telegram: {
|
||||
dms: { "123": {} },
|
||||
},
|
||||
} as ClawdbotConfig;
|
||||
expect(
|
||||
getDmHistoryLimitFromSessionKey("telegram:dm:123", config),
|
||||
).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns 0 when per-DM historyLimit is explicitly 0 (unlimited)", () => {
|
||||
const config = {
|
||||
telegram: {
|
||||
dmHistoryLimit: 15,
|
||||
dms: { "123": { historyLimit: 0 } },
|
||||
},
|
||||
} as ClawdbotConfig;
|
||||
expect(getDmHistoryLimitFromSessionKey("telegram:dm:123", config)).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("runEmbeddedPiAgent", () => {
|
||||
it("writes models.json into the provided agentDir", async () => {
|
||||
const agentDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-agent-"),
|
||||
@@ -355,12 +664,12 @@ describe("runEmbeddedPiAgent", () => {
|
||||
models: {
|
||||
providers: {
|
||||
minimax: {
|
||||
baseUrl: "https://api.minimax.io/v1",
|
||||
api: "openai-completions",
|
||||
baseUrl: "https://api.minimax.io/anthropic",
|
||||
api: "anthropic-messages",
|
||||
apiKey: "sk-minimax-test",
|
||||
models: [
|
||||
{
|
||||
id: "minimax-m2.1",
|
||||
id: "MiniMax-M2.1",
|
||||
name: "MiniMax M2.1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
@@ -393,4 +702,220 @@ describe("runEmbeddedPiAgent", () => {
|
||||
fs.stat(path.join(agentDir, "models.json")),
|
||||
).resolves.toBeTruthy();
|
||||
});
|
||||
|
||||
it("persists the first user message before assistant output", async () => {
|
||||
const agentDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-agent-"),
|
||||
);
|
||||
const workspaceDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-workspace-"),
|
||||
);
|
||||
const sessionFile = path.join(workspaceDir, "session.jsonl");
|
||||
|
||||
const cfg = makeOpenAiConfig(["mock-1"]);
|
||||
await ensureModels(cfg, agentDir);
|
||||
|
||||
await runEmbeddedPiAgent({
|
||||
sessionId: "session:test",
|
||||
sessionKey: "agent:main:main",
|
||||
sessionFile,
|
||||
workspaceDir,
|
||||
config: cfg,
|
||||
prompt: "hello",
|
||||
provider: "openai",
|
||||
model: "mock-1",
|
||||
timeoutMs: 5_000,
|
||||
agentDir,
|
||||
});
|
||||
|
||||
const messages = await readSessionMessages(sessionFile);
|
||||
const firstUserIndex = messages.findIndex(
|
||||
(message) =>
|
||||
message?.role === "user" &&
|
||||
textFromContent(message.content) === "hello",
|
||||
);
|
||||
const firstAssistantIndex = messages.findIndex(
|
||||
(message) => message?.role === "assistant",
|
||||
);
|
||||
expect(firstUserIndex).toBeGreaterThanOrEqual(0);
|
||||
if (firstAssistantIndex !== -1) {
|
||||
expect(firstUserIndex).toBeLessThan(firstAssistantIndex);
|
||||
}
|
||||
});
|
||||
|
||||
it("persists the user message when prompt fails before assistant output", async () => {
|
||||
const agentDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-agent-"),
|
||||
);
|
||||
const workspaceDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-workspace-"),
|
||||
);
|
||||
const sessionFile = path.join(workspaceDir, "session.jsonl");
|
||||
|
||||
const cfg = makeOpenAiConfig(["mock-error"]);
|
||||
await ensureModels(cfg, agentDir);
|
||||
|
||||
const result = await runEmbeddedPiAgent({
|
||||
sessionId: "session:test",
|
||||
sessionKey: "agent:main:main",
|
||||
sessionFile,
|
||||
workspaceDir,
|
||||
config: cfg,
|
||||
prompt: "boom",
|
||||
provider: "openai",
|
||||
model: "mock-error",
|
||||
timeoutMs: 5_000,
|
||||
agentDir,
|
||||
});
|
||||
expect(result.payloads[0]?.isError).toBe(true);
|
||||
|
||||
const messages = await readSessionMessages(sessionFile);
|
||||
const userIndex = messages.findIndex(
|
||||
(message) =>
|
||||
message?.role === "user" && textFromContent(message.content) === "boom",
|
||||
);
|
||||
expect(userIndex).toBeGreaterThanOrEqual(0);
|
||||
});
|
||||
|
||||
it("appends new user + assistant after existing transcript entries", async () => {
|
||||
const agentDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-agent-"),
|
||||
);
|
||||
const workspaceDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-workspace-"),
|
||||
);
|
||||
const sessionFile = path.join(workspaceDir, "session.jsonl");
|
||||
|
||||
const sessionManager = SessionManager.open(sessionFile);
|
||||
sessionManager.appendMessage({
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "seed user" }],
|
||||
});
|
||||
sessionManager.appendMessage({
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "seed assistant" }],
|
||||
stopReason: "stop",
|
||||
api: "openai-responses",
|
||||
provider: "openai",
|
||||
model: "mock-1",
|
||||
usage: {
|
||||
input: 1,
|
||||
output: 1,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
totalTokens: 2,
|
||||
cost: {
|
||||
input: 0,
|
||||
output: 0,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
total: 0,
|
||||
},
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
const cfg = makeOpenAiConfig(["mock-1"]);
|
||||
await ensureModels(cfg, agentDir);
|
||||
|
||||
await runEmbeddedPiAgent({
|
||||
sessionId: "session:test",
|
||||
sessionKey: "agent:main:main",
|
||||
sessionFile,
|
||||
workspaceDir,
|
||||
config: cfg,
|
||||
prompt: "hello",
|
||||
provider: "openai",
|
||||
model: "mock-1",
|
||||
timeoutMs: 5_000,
|
||||
agentDir,
|
||||
});
|
||||
|
||||
const messages = await readSessionMessages(sessionFile);
|
||||
const seedUserIndex = messages.findIndex(
|
||||
(message) =>
|
||||
message?.role === "user" &&
|
||||
textFromContent(message.content) === "seed user",
|
||||
);
|
||||
const seedAssistantIndex = messages.findIndex(
|
||||
(message) =>
|
||||
message?.role === "assistant" &&
|
||||
textFromContent(message.content) === "seed assistant",
|
||||
);
|
||||
const newUserIndex = messages.findIndex(
|
||||
(message) =>
|
||||
message?.role === "user" &&
|
||||
textFromContent(message.content) === "hello",
|
||||
);
|
||||
const newAssistantIndex = messages.findIndex(
|
||||
(message, index) => index > newUserIndex && message?.role === "assistant",
|
||||
);
|
||||
expect(seedUserIndex).toBeGreaterThanOrEqual(0);
|
||||
expect(seedAssistantIndex).toBeGreaterThan(seedUserIndex);
|
||||
expect(newUserIndex).toBeGreaterThan(seedAssistantIndex);
|
||||
expect(newAssistantIndex).toBeGreaterThan(newUserIndex);
|
||||
});
|
||||
|
||||
it("persists multi-turn user/assistant ordering across runs", async () => {
|
||||
const agentDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-agent-"),
|
||||
);
|
||||
const workspaceDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "clawdbot-workspace-"),
|
||||
);
|
||||
const sessionFile = path.join(workspaceDir, "session.jsonl");
|
||||
|
||||
const cfg = makeOpenAiConfig(["mock-1"]);
|
||||
await ensureModels(cfg, agentDir);
|
||||
|
||||
await runEmbeddedPiAgent({
|
||||
sessionId: "session:test",
|
||||
sessionKey: "agent:main:main",
|
||||
sessionFile,
|
||||
workspaceDir,
|
||||
config: cfg,
|
||||
prompt: "first",
|
||||
provider: "openai",
|
||||
model: "mock-1",
|
||||
timeoutMs: 5_000,
|
||||
agentDir,
|
||||
});
|
||||
|
||||
await runEmbeddedPiAgent({
|
||||
sessionId: "session:test",
|
||||
sessionKey: "agent:main:main",
|
||||
sessionFile,
|
||||
workspaceDir,
|
||||
config: cfg,
|
||||
prompt: "second",
|
||||
provider: "openai",
|
||||
model: "mock-1",
|
||||
timeoutMs: 5_000,
|
||||
agentDir,
|
||||
});
|
||||
|
||||
const messages = await readSessionMessages(sessionFile);
|
||||
const firstUserIndex = messages.findIndex(
|
||||
(message) =>
|
||||
message?.role === "user" &&
|
||||
textFromContent(message.content) === "first",
|
||||
);
|
||||
const firstAssistantIndex = messages.findIndex(
|
||||
(message, index) =>
|
||||
index > firstUserIndex && message?.role === "assistant",
|
||||
);
|
||||
const secondUserIndex = messages.findIndex(
|
||||
(message) =>
|
||||
message?.role === "user" &&
|
||||
textFromContent(message.content) === "second",
|
||||
);
|
||||
const secondAssistantIndex = messages.findIndex(
|
||||
(message, index) =>
|
||||
index > secondUserIndex && message?.role === "assistant",
|
||||
);
|
||||
expect(firstUserIndex).toBeGreaterThanOrEqual(0);
|
||||
expect(firstAssistantIndex).toBeGreaterThan(firstUserIndex);
|
||||
expect(secondUserIndex).toBeGreaterThan(firstAssistantIndex);
|
||||
expect(secondAssistantIndex).toBeGreaterThan(secondUserIndex);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -967,6 +967,7 @@ function resolveModel(
|
||||
provider: string,
|
||||
modelId: string,
|
||||
agentDir?: string,
|
||||
cfg?: ClawdbotConfig,
|
||||
): {
|
||||
model?: Model<Api>;
|
||||
error?: string;
|
||||
@@ -978,6 +979,38 @@ function resolveModel(
|
||||
const modelRegistry = discoverModels(authStorage, resolvedAgentDir);
|
||||
const model = modelRegistry.find(provider, modelId) as Model<Api> | null;
|
||||
if (!model) {
|
||||
const providers = cfg?.models?.providers ?? {};
|
||||
const inlineModels =
|
||||
providers[provider]?.models ??
|
||||
Object.values(providers)
|
||||
.flatMap((entry) => entry?.models ?? [])
|
||||
.map((entry) => ({ ...entry, provider }));
|
||||
const inlineMatch = inlineModels.find((entry) => entry.id === modelId);
|
||||
if (inlineMatch) {
|
||||
const normalized = normalizeModelCompat(inlineMatch as Model<Api>);
|
||||
return {
|
||||
model: normalized,
|
||||
authStorage,
|
||||
modelRegistry,
|
||||
};
|
||||
}
|
||||
const providerCfg = providers[provider];
|
||||
if (providerCfg || modelId.startsWith("mock-")) {
|
||||
const fallbackModel: Model<Api> = normalizeModelCompat({
|
||||
id: modelId,
|
||||
name: modelId,
|
||||
api: providerCfg?.api ?? "openai-responses",
|
||||
provider,
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||
contextWindow:
|
||||
providerCfg?.models?.[0]?.contextWindow ?? DEFAULT_CONTEXT_TOKENS,
|
||||
maxTokens:
|
||||
providerCfg?.models?.[0]?.maxTokens ?? DEFAULT_CONTEXT_TOKENS,
|
||||
} as Model<Api>);
|
||||
return { model: fallbackModel, authStorage, modelRegistry };
|
||||
}
|
||||
return {
|
||||
error: `Unknown model: ${provider}/${modelId}`,
|
||||
authStorage,
|
||||
@@ -1029,6 +1062,7 @@ export async function compactEmbeddedPiSession(params: {
|
||||
provider,
|
||||
modelId,
|
||||
agentDir,
|
||||
params.config,
|
||||
);
|
||||
if (!model) {
|
||||
return {
|
||||
@@ -1379,6 +1413,7 @@ export async function runEmbeddedPiAgent(params: {
|
||||
provider,
|
||||
modelId,
|
||||
agentDir,
|
||||
params.config,
|
||||
);
|
||||
if (!model) {
|
||||
throw new Error(error ?? `Unknown model: ${provider}/${modelId}`);
|
||||
|
||||
@@ -52,7 +52,7 @@ describe("stripHeartbeatToken", () => {
|
||||
});
|
||||
|
||||
it("keeps heartbeat replies when remaining content exceeds threshold", () => {
|
||||
const long = "A".repeat(350);
|
||||
const long = "A".repeat(400);
|
||||
expect(
|
||||
stripHeartbeatToken(`${long} ${HEARTBEAT_TOKEN}`, { mode: "heartbeat" }),
|
||||
).toEqual({
|
||||
|
||||
@@ -803,7 +803,7 @@ describe("runCronIsolatedAgentTurn", () => {
|
||||
sendMessageIMessage: vi.fn(),
|
||||
};
|
||||
// Long content after HEARTBEAT_OK should still be delivered.
|
||||
const longContent = `Important alert: ${"a".repeat(350)}`;
|
||||
const longContent = `Important alert: ${"a".repeat(400)}`;
|
||||
vi.mocked(runEmbeddedPiAgent).mockResolvedValue({
|
||||
payloads: [{ text: `HEARTBEAT_OK ${longContent}` }],
|
||||
meta: {
|
||||
|
||||
@@ -238,13 +238,24 @@ function listExistingAgentIdsFromDisk(): string[] {
|
||||
}
|
||||
|
||||
function listConfiguredAgentIds(cfg: ClawdbotConfig): string[] {
|
||||
const agents = cfg.agents?.list ?? [];
|
||||
if (agents.length > 0) {
|
||||
const ids = new Set<string>();
|
||||
for (const entry of agents) {
|
||||
if (entry?.id) ids.add(normalizeAgentId(entry.id));
|
||||
}
|
||||
const defaultId = normalizeAgentId(resolveDefaultAgentId(cfg));
|
||||
ids.add(defaultId);
|
||||
const sorted = Array.from(ids).filter(Boolean);
|
||||
sorted.sort((a, b) => a.localeCompare(b));
|
||||
return sorted.includes(defaultId)
|
||||
? [defaultId, ...sorted.filter((id) => id !== defaultId)]
|
||||
: sorted;
|
||||
}
|
||||
|
||||
const ids = new Set<string>();
|
||||
const defaultId = normalizeAgentId(resolveDefaultAgentId(cfg));
|
||||
ids.add(defaultId);
|
||||
const agents = cfg.agents?.list ?? [];
|
||||
for (const entry of agents) {
|
||||
if (entry?.id) ids.add(normalizeAgentId(entry.id));
|
||||
}
|
||||
for (const id of listExistingAgentIdsFromDisk()) ids.add(id);
|
||||
const sorted = Array.from(ids).filter(Boolean);
|
||||
sorted.sort((a, b) => a.localeCompare(b));
|
||||
|
||||
Reference in New Issue
Block a user