fix: make node-llama-cpp optional
This commit is contained in:
@@ -13,7 +13,6 @@ export function isAntigravityClaude(api?: string | null, modelId?: string): bool
|
||||
return modelId?.toLowerCase().includes("claude") ?? false;
|
||||
}
|
||||
|
||||
|
||||
export { sanitizeGoogleTurnOrdering };
|
||||
|
||||
/**
|
||||
|
||||
@@ -30,7 +30,11 @@ function isEmptyAssistantErrorMessage(
|
||||
export async function sanitizeSessionMessagesImages(
|
||||
messages: AgentMessage[],
|
||||
label: string,
|
||||
options?: { sanitizeToolCallIds?: boolean; enforceToolCallLast?: boolean; preserveSignatures?: boolean },
|
||||
options?: {
|
||||
sanitizeToolCallIds?: boolean;
|
||||
enforceToolCallLast?: boolean;
|
||||
preserveSignatures?: boolean;
|
||||
},
|
||||
): Promise<AgentMessage[]> {
|
||||
// We sanitize historical session messages because Anthropic can reject a request
|
||||
// if the transcript contains oversized base64 images (see MAX_IMAGE_DIMENSION_PX).
|
||||
@@ -77,8 +81,8 @@ export async function sanitizeSessionMessagesImages(
|
||||
const content = assistantMsg.content;
|
||||
if (Array.isArray(content)) {
|
||||
const strippedContent = options?.preserveSignatures
|
||||
? content // Keep signatures for Antigravity Claude
|
||||
: stripThoughtSignatures(content); // Strip for Gemini
|
||||
? content // Keep signatures for Antigravity Claude
|
||||
: stripThoughtSignatures(content); // Strip for Gemini
|
||||
|
||||
const filteredContent = strippedContent.filter((block) => {
|
||||
if (!block || typeof block !== "object") return true;
|
||||
|
||||
@@ -140,19 +140,8 @@ function formatPositionalArgs(
|
||||
let rendered: string;
|
||||
if (typeof value === "string") {
|
||||
rendered = value.trim();
|
||||
} else if (
|
||||
typeof value === "number" ||
|
||||
typeof value === "boolean" ||
|
||||
typeof value === "bigint"
|
||||
) {
|
||||
rendered = String(value);
|
||||
} else if (typeof value === "symbol") {
|
||||
rendered = value.toString();
|
||||
} else if (typeof value === "function") {
|
||||
rendered = value.toString();
|
||||
} else {
|
||||
// Objects and arrays
|
||||
rendered = JSON.stringify(value);
|
||||
rendered = String(value);
|
||||
}
|
||||
if (!rendered) continue;
|
||||
parts.push(rendered);
|
||||
|
||||
@@ -8,9 +8,7 @@ describe("applyTemplate", () => {
|
||||
overrides.MessageSid = 42;
|
||||
overrides.IsNewSession = true;
|
||||
|
||||
expect(applyTemplate("sid={{MessageSid}} new={{IsNewSession}}", ctx)).toBe(
|
||||
"sid=42 new=true",
|
||||
);
|
||||
expect(applyTemplate("sid={{MessageSid}} new={{IsNewSession}}", ctx)).toBe("sid=42 new=true");
|
||||
});
|
||||
|
||||
it("renders arrays of primitives", () => {
|
||||
|
||||
@@ -100,7 +100,7 @@ function formatTemplateValue(value: unknown): string {
|
||||
.join(",");
|
||||
}
|
||||
if (typeof value === "object") {
|
||||
return JSON.stringify(value);
|
||||
return "";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
@@ -118,7 +118,7 @@ function readDiscordCommandArgs(
|
||||
if (!definitions || definitions.length === 0) return undefined;
|
||||
const values: CommandArgValues = {};
|
||||
for (const definition of definitions) {
|
||||
let value: string | number | boolean | null;
|
||||
let value: string | number | boolean | null | undefined;
|
||||
if (definition.type === "number") {
|
||||
value = interaction.options.getNumber(definition.name);
|
||||
} else if (definition.type === "boolean") {
|
||||
|
||||
@@ -14,6 +14,7 @@ const createFetchMock = () =>
|
||||
describe("embedding provider remote overrides", () => {
|
||||
afterEach(() => {
|
||||
vi.resetAllMocks();
|
||||
vi.resetModules();
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
@@ -107,3 +108,63 @@ describe("embedding provider remote overrides", () => {
|
||||
expect(headers.Authorization).toBe("Bearer provider-key");
|
||||
});
|
||||
});
|
||||
|
||||
describe("embedding provider local fallback", () => {
|
||||
afterEach(() => {
|
||||
vi.resetAllMocks();
|
||||
vi.resetModules();
|
||||
vi.unstubAllGlobals();
|
||||
vi.doUnmock("./node-llama.js");
|
||||
});
|
||||
|
||||
it("falls back to openai when node-llama-cpp is missing", async () => {
|
||||
vi.doMock("./node-llama.js", () => ({
|
||||
importNodeLlamaCpp: async () => {
|
||||
throw Object.assign(new Error("Cannot find package 'node-llama-cpp'"), {
|
||||
code: "ERR_MODULE_NOT_FOUND",
|
||||
});
|
||||
},
|
||||
}));
|
||||
|
||||
const fetchMock = createFetchMock();
|
||||
vi.stubGlobal("fetch", fetchMock);
|
||||
|
||||
const { createEmbeddingProvider } = await import("./embeddings.js");
|
||||
const authModule = await import("../agents/model-auth.js");
|
||||
vi.mocked(authModule.resolveApiKeyForProvider).mockResolvedValue({
|
||||
apiKey: "provider-key",
|
||||
});
|
||||
|
||||
const result = await createEmbeddingProvider({
|
||||
config: {} as never,
|
||||
provider: "local",
|
||||
model: "text-embedding-3-small",
|
||||
fallback: "openai",
|
||||
});
|
||||
|
||||
expect(result.provider.id).toBe("openai");
|
||||
expect(result.fallbackFrom).toBe("local");
|
||||
expect(result.fallbackReason).toContain("node-llama-cpp");
|
||||
});
|
||||
|
||||
it("throws a helpful error when local is requested and fallback is none", async () => {
|
||||
vi.doMock("./node-llama.js", () => ({
|
||||
importNodeLlamaCpp: async () => {
|
||||
throw Object.assign(new Error("Cannot find package 'node-llama-cpp'"), {
|
||||
code: "ERR_MODULE_NOT_FOUND",
|
||||
});
|
||||
},
|
||||
}));
|
||||
|
||||
const { createEmbeddingProvider } = await import("./embeddings.js");
|
||||
|
||||
await expect(
|
||||
createEmbeddingProvider({
|
||||
config: {} as never,
|
||||
provider: "local",
|
||||
model: "text-embedding-3-small",
|
||||
fallback: "none",
|
||||
}),
|
||||
).rejects.toThrow(/optional dependency node-llama-cpp/i);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import type { Llama, LlamaEmbeddingContext, LlamaModel } from "node-llama-cpp";
|
||||
import { resolveApiKeyForProvider } from "../agents/model-auth.js";
|
||||
import type { ClawdbotConfig } from "../config/config.js";
|
||||
import { importNodeLlamaCpp } from "./node-llama.js";
|
||||
|
||||
export type EmbeddingProvider = {
|
||||
id: string;
|
||||
@@ -105,7 +106,7 @@ async function createLocalEmbeddingProvider(
|
||||
const modelCacheDir = options.local?.modelCacheDir?.trim();
|
||||
|
||||
// Lazy-load node-llama-cpp to keep startup light unless local is enabled.
|
||||
const { getLlama, resolveModelFile, LlamaLogLevel } = await import("node-llama-cpp");
|
||||
const { getLlama, resolveModelFile, LlamaLogLevel } = await importNodeLlamaCpp();
|
||||
|
||||
let llama: Llama | null = null;
|
||||
let embeddingModel: LlamaModel | null = null;
|
||||
@@ -181,15 +182,32 @@ function formatError(err: unknown): string {
|
||||
return String(err);
|
||||
}
|
||||
|
||||
function isNodeLlamaCppMissing(err: unknown): boolean {
|
||||
if (!(err instanceof Error)) return false;
|
||||
const code = (err as Error & { code?: unknown }).code;
|
||||
if (code === "ERR_MODULE_NOT_FOUND") {
|
||||
return err.message.includes("node-llama-cpp");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function formatLocalSetupError(err: unknown): string {
|
||||
const detail = formatError(err);
|
||||
const missing = isNodeLlamaCppMissing(err);
|
||||
return [
|
||||
"Local embeddings unavailable.",
|
||||
detail ? `Reason: ${detail}` : undefined,
|
||||
missing
|
||||
? "Reason: optional dependency node-llama-cpp is missing (or failed to install)."
|
||||
: detail
|
||||
? `Reason: ${detail}`
|
||||
: undefined,
|
||||
missing && detail ? `Detail: ${detail}` : null,
|
||||
"To enable local embeddings:",
|
||||
"1) pnpm approve-builds",
|
||||
"2) select node-llama-cpp",
|
||||
"3) pnpm rebuild node-llama-cpp",
|
||||
"1) Use Node 22 LTS (recommended for installs/updates)",
|
||||
missing
|
||||
? "2) Reinstall Clawdbot (this should install node-llama-cpp): npm i -g clawdbot@latest"
|
||||
: null,
|
||||
"3) If you use pnpm: pnpm approve-builds (select node-llama-cpp), then pnpm rebuild node-llama-cpp",
|
||||
'Or set agents.defaults.memorySearch.provider = "openai" (remote).',
|
||||
]
|
||||
.filter(Boolean)
|
||||
|
||||
3
src/memory/node-llama.ts
Normal file
3
src/memory/node-llama.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export async function importNodeLlamaCpp() {
|
||||
return import("node-llama-cpp");
|
||||
}
|
||||
Reference in New Issue
Block a user