fix: make node-llama-cpp optional

This commit is contained in:
Peter Steinberger
2026-01-15 18:37:02 +00:00
parent 316e8b2eb2
commit cb78fa46a1
12 changed files with 277 additions and 87 deletions

View File

@@ -1,6 +1,7 @@
import type { Llama, LlamaEmbeddingContext, LlamaModel } from "node-llama-cpp";
import { resolveApiKeyForProvider } from "../agents/model-auth.js";
import type { ClawdbotConfig } from "../config/config.js";
import { importNodeLlamaCpp } from "./node-llama.js";
export type EmbeddingProvider = {
id: string;
@@ -105,7 +106,7 @@ async function createLocalEmbeddingProvider(
const modelCacheDir = options.local?.modelCacheDir?.trim();
// Lazy-load node-llama-cpp to keep startup light unless local is enabled.
const { getLlama, resolveModelFile, LlamaLogLevel } = await import("node-llama-cpp");
const { getLlama, resolveModelFile, LlamaLogLevel } = await importNodeLlamaCpp();
let llama: Llama | null = null;
let embeddingModel: LlamaModel | null = null;
@@ -181,15 +182,32 @@ function formatError(err: unknown): string {
return String(err);
}
function isNodeLlamaCppMissing(err: unknown): boolean {
if (!(err instanceof Error)) return false;
const code = (err as Error & { code?: unknown }).code;
if (code === "ERR_MODULE_NOT_FOUND") {
return err.message.includes("node-llama-cpp");
}
return false;
}
function formatLocalSetupError(err: unknown): string {
const detail = formatError(err);
const missing = isNodeLlamaCppMissing(err);
return [
"Local embeddings unavailable.",
detail ? `Reason: ${detail}` : undefined,
missing
? "Reason: optional dependency node-llama-cpp is missing (or failed to install)."
: detail
? `Reason: ${detail}`
: undefined,
missing && detail ? `Detail: ${detail}` : null,
"To enable local embeddings:",
"1) pnpm approve-builds",
"2) select node-llama-cpp",
"3) pnpm rebuild node-llama-cpp",
"1) Use Node 22 LTS (recommended for installs/updates)",
missing
? "2) Reinstall Clawdbot (this should install node-llama-cpp): npm i -g clawdbot@latest"
: null,
"3) If you use pnpm: pnpm approve-builds (select node-llama-cpp), then pnpm rebuild node-llama-cpp",
'Or set agents.defaults.memorySearch.provider = "openai" (remote).',
]
.filter(Boolean)