Files
clawdbot/src/agents/pi-embedded-runner.resolvesessionagentids.test.ts
Abhay 51e3d16be9 feat: Add Ollama provider with automatic model discovery (#1606)
* feat: Add Ollama provider with automatic model discovery

- Add Ollama provider builder with automatic model detection
- Discover available models from local Ollama instance via /api/tags API
- Make resolveImplicitProviders async to support dynamic model discovery
- Add comprehensive Ollama documentation with setup and usage guide
- Add tests for Ollama provider integration
- Update provider index and model providers documentation

Closes #1531

* fix: Correct Ollama provider type definitions and error handling

- Fix input property type to match ModelDefinitionConfig
- Import ModelDefinitionConfig type properly
- Fix error template literal to use String() for type safety
- Simplify return type signature of discoverOllamaModels

* fix: Suppress unhandled promise warnings from ensureClawdbotModelsJson in tests

- Cast unused promise returns to 'unknown' to suppress TypeScript warnings
- Tests that don't await the promise are intentionally not awaiting it
- This fixes the failing test suite caused by unawaited async calls

* fix: Skip Ollama model discovery during tests

- Check for VITEST or NODE_ENV=test before making HTTP requests
- Prevents test timeouts and hangs from network calls
- Ollama discovery will still work in production/normal usage

* fix: Set VITEST environment variable in test setup

- Ensures Ollama discovery is skipped in all test runs
- Prevents network calls during tests that could cause timeouts

* test: Temporarily skip Ollama provider tests to diagnose CI failures

* fix: Make Ollama provider opt-in to avoid breaking existing tests

**Root Cause:**
The Ollama provider was being added to ALL configurations by default
(with a fallback API key of 'ollama-local'), which broke tests that
expected NO providers when no API keys were configured.

**Solution:**
- Removed the default fallback API key for Ollama
- Ollama provider now requires explicit configuration via:
  - OLLAMA_API_KEY environment variable, OR
  - Ollama profile in auth store
- Updated documentation to reflect the explicit configuration requirement
- Added a test to verify Ollama is not added by default

This fixes all 4 failing test suites:
- checks (node, test, pnpm test)
- checks (bun, test, bunx vitest run)
- checks-windows (node, test, pnpm test)
- checks-macos (test, pnpm test)

Closes #1531
2026-01-24 22:38:52 +00:00

142 lines
4.1 KiB
TypeScript

import fs from "node:fs/promises";
import { describe, expect, it, vi } from "vitest";
import type { ClawdbotConfig } from "../config/config.js";
import { resolveSessionAgentIds } from "./agent-scope.js";
import { ensureClawdbotModelsJson } from "./models-config.js";
vi.mock("@mariozechner/pi-ai", async () => {
const actual = await vi.importActual<typeof import("@mariozechner/pi-ai")>("@mariozechner/pi-ai");
return {
...actual,
streamSimple: (model: { api: string; provider: string; id: string }) => {
if (model.id === "mock-error") {
throw new Error("boom");
}
const stream = new actual.AssistantMessageEventStream();
queueMicrotask(() => {
stream.push({
type: "done",
reason: "stop",
message: {
role: "assistant",
content: [{ type: "text", text: "ok" }],
stopReason: "stop",
api: model.api,
provider: model.provider,
model: model.id,
usage: {
input: 1,
output: 1,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 2,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
},
timestamp: Date.now(),
},
});
});
return stream;
},
};
});
const _makeOpenAiConfig = (modelIds: string[]) =>
({
models: {
providers: {
openai: {
api: "openai-responses",
apiKey: "sk-test",
baseUrl: "https://example.com",
models: modelIds.map((id) => ({
id,
name: `Mock ${id}`,
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 16_000,
maxTokens: 2048,
})),
},
},
},
}) satisfies ClawdbotConfig;
const _ensureModels = (cfg: ClawdbotConfig, agentDir: string) =>
ensureClawdbotModelsJson(cfg, agentDir) as unknown;
const _textFromContent = (content: unknown) => {
if (typeof content === "string") return content;
if (Array.isArray(content) && content[0]?.type === "text") {
return (content[0] as { text?: string }).text;
}
return undefined;
};
const _readSessionMessages = async (sessionFile: string) => {
const raw = await fs.readFile(sessionFile, "utf-8");
return raw
.split(/\r?\n/)
.filter(Boolean)
.map(
(line) =>
JSON.parse(line) as {
type?: string;
message?: { role?: string; content?: unknown };
},
)
.filter((entry) => entry.type === "message")
.map((entry) => entry.message as { role?: string; content?: unknown });
};
describe("resolveSessionAgentIds", () => {
const cfg = {
agents: {
list: [{ id: "main" }, { id: "beta", default: true }],
},
} as ClawdbotConfig;
it("falls back to the configured default when sessionKey is missing", () => {
const { defaultAgentId, sessionAgentId } = resolveSessionAgentIds({
config: cfg,
});
expect(defaultAgentId).toBe("beta");
expect(sessionAgentId).toBe("beta");
});
it("falls back to the configured default when sessionKey is non-agent", () => {
const { sessionAgentId } = resolveSessionAgentIds({
sessionKey: "telegram:slash:123",
config: cfg,
});
expect(sessionAgentId).toBe("beta");
});
it("falls back to the configured default for global sessions", () => {
const { sessionAgentId } = resolveSessionAgentIds({
sessionKey: "global",
config: cfg,
});
expect(sessionAgentId).toBe("beta");
});
it("keeps the agent id for provider-qualified agent sessions", () => {
const { sessionAgentId } = resolveSessionAgentIds({
sessionKey: "agent:beta:slack:channel:c1",
config: cfg,
});
expect(sessionAgentId).toBe("beta");
});
it("uses the agent id from agent session keys", () => {
const { sessionAgentId } = resolveSessionAgentIds({
sessionKey: "agent:main:main",
config: cfg,
});
expect(sessionAgentId).toBe("main");
});
});