fix: memory search remote overrides (#819) (thanks @mukhtharcm)

This commit is contained in:
Peter Steinberger
2026-01-13 03:11:03 +00:00
parent ba316a10cc
commit da0a062fa7
7 changed files with 171 additions and 11 deletions

View File

@@ -4,6 +4,7 @@
### Changes
- Models/Moonshot: add Kimi K2 turbo + thinking variants to the preset + docs. (#818 — thanks @mickahouan)
- Memory: allow custom OpenAI-compatible embedding endpoints for memory search (remote baseUrl/apiKey/headers). (#819 — thanks @mukhtharcm)
### Fixes
- Onboarding/Configure: refuse to proceed with invalid configs; run `clawdbot doctor` first to avoid wiping custom fields. (#764 — thanks @mukhtharcm)

View File

@@ -79,9 +79,11 @@ Defaults:
- Uses remote embeddings (OpenAI) unless configured for local.
- Local mode uses node-llama-cpp and may require `pnpm approve-builds`.
Remote embeddings **require** an OpenAI API key (`OPENAI_API_KEY` or
`models.providers.openai.apiKey`). Codex OAuth only covers chat/completions and
does **not** satisfy embeddings for memory search.
Remote embeddings **require** an API key for the embedding provider. By default
this is OpenAI (`OPENAI_API_KEY` or `models.providers.openai.apiKey`). Codex
OAuth only covers chat/completions and does **not** satisfy embeddings for
memory search. When using a custom OpenAI-compatible endpoint, set
`memorySearch.remote.apiKey` (and optional `memorySearch.remote.headers`).
If you want to use a **custom OpenAI-compatible endpoint** (like Gemini, OpenRouter, or a proxy),
you can use the `remote` configuration:

View File

@@ -53,4 +53,37 @@ describe("memory search config", () => {
expect(resolved?.query.maxResults).toBe(8);
expect(resolved?.query.minScore).toBe(0.2);
});
it("merges remote defaults with agent overrides", () => {
const cfg = {
agents: {
defaults: {
memorySearch: {
remote: {
baseUrl: "https://default.example/v1",
apiKey: "default-key",
headers: { "X-Default": "on" },
},
},
},
list: [
{
id: "main",
default: true,
memorySearch: {
remote: {
baseUrl: "https://agent.example/v1",
},
},
},
],
},
};
const resolved = resolveMemorySearchConfig(cfg, "main");
expect(resolved?.remote).toEqual({
baseUrl: "https://agent.example/v1",
apiKey: "default-key",
headers: { "X-Default": "on" },
});
});
});

View File

@@ -65,7 +65,14 @@ function mergeConfig(
): ResolvedMemorySearchConfig {
const enabled = overrides?.enabled ?? defaults?.enabled ?? true;
const provider = overrides?.provider ?? defaults?.provider ?? "openai";
const remote = overrides?.remote ?? defaults?.remote;
const hasRemote = Boolean(defaults?.remote || overrides?.remote);
const remote = hasRemote
? {
baseUrl: overrides?.remote?.baseUrl ?? defaults?.remote?.baseUrl,
apiKey: overrides?.remote?.apiKey ?? defaults?.remote?.apiKey,
headers: overrides?.remote?.headers ?? defaults?.remote?.headers,
}
: undefined;
const fallback = overrides?.fallback ?? defaults?.fallback ?? "openai";
const model = overrides?.model ?? defaults?.model ?? DEFAULT_MODEL;
const local = {

View File

@@ -120,6 +120,7 @@ const FIELD_LABELS: Record<string, string> = {
"agents.defaults.memorySearch.provider": "Memory Search Provider",
"agents.defaults.memorySearch.remote.baseUrl": "Remote Embedding Base URL",
"agents.defaults.memorySearch.remote.apiKey": "Remote Embedding API Key",
"agents.defaults.memorySearch.remote.headers": "Remote Embedding Headers",
"agents.defaults.memorySearch.model": "Memory Search Model",
"agents.defaults.memorySearch.fallback": "Memory Search Fallback",
"agents.defaults.memorySearch.local.modelPath": "Local Embedding Model Path",
@@ -242,6 +243,8 @@ const FIELD_HELP: Record<string, string> = {
"Custom OpenAI-compatible base URL (e.g. for Gemini/OpenRouter proxies).",
"agents.defaults.memorySearch.remote.apiKey":
"Custom API key for the remote embedding provider.",
"agents.defaults.memorySearch.remote.headers":
"Extra headers for remote embeddings (merged; remote overrides OpenAI headers).",
"agents.defaults.memorySearch.local.modelPath":
"Local GGUF model path or hf: URI (node-llama-cpp).",
"agents.defaults.memorySearch.fallback":

View File

@@ -0,0 +1,110 @@
import { afterEach, describe, expect, it, vi } from "vitest";
vi.mock("../agents/model-auth.js", () => ({
resolveApiKeyForProvider: vi.fn(),
}));
const createFetchMock = () =>
vi.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ data: [{ embedding: [1, 2, 3] }] }),
})) as unknown as typeof fetch;
describe("embedding provider remote overrides", () => {
afterEach(() => {
vi.resetAllMocks();
vi.unstubAllGlobals();
});
it("uses remote baseUrl/apiKey and merges headers", async () => {
const fetchMock = createFetchMock();
vi.stubGlobal("fetch", fetchMock);
const { createEmbeddingProvider } = await import("./embeddings.js");
const authModule = await import("../agents/model-auth.js");
vi.mocked(authModule.resolveApiKeyForProvider).mockResolvedValue({
apiKey: "provider-key",
});
const cfg = {
models: {
providers: {
openai: {
baseUrl: "https://provider.example/v1",
headers: {
"X-Provider": "p",
"X-Shared": "provider",
},
},
},
},
};
const result = await createEmbeddingProvider({
config: cfg as never,
provider: "openai",
remote: {
baseUrl: "https://remote.example/v1",
apiKey: " remote-key ",
headers: {
"X-Shared": "remote",
"X-Remote": "r",
},
},
model: "text-embedding-3-small",
fallback: "openai",
});
await result.provider.embedQuery("hello");
expect(authModule.resolveApiKeyForProvider).not.toHaveBeenCalled();
const [url, init] = fetchMock.mock.calls[0] ?? [];
expect(url).toBe("https://remote.example/v1/embeddings");
const headers = (init?.headers ?? {}) as Record<string, string>;
expect(headers.Authorization).toBe("Bearer remote-key");
expect(headers["Content-Type"]).toBe("application/json");
expect(headers["X-Provider"]).toBe("p");
expect(headers["X-Shared"]).toBe("remote");
expect(headers["X-Remote"]).toBe("r");
});
it("falls back to resolved api key when remote apiKey is blank", async () => {
const fetchMock = createFetchMock();
vi.stubGlobal("fetch", fetchMock);
const { createEmbeddingProvider } = await import("./embeddings.js");
const authModule = await import("../agents/model-auth.js");
vi.mocked(authModule.resolveApiKeyForProvider).mockResolvedValue({
apiKey: "provider-key",
});
const cfg = {
models: {
providers: {
openai: {
baseUrl: "https://provider.example/v1",
},
},
},
};
const result = await createEmbeddingProvider({
config: cfg as never,
provider: "openai",
remote: {
baseUrl: "https://remote.example/v1",
apiKey: " ",
},
model: "text-embedding-3-small",
fallback: "openai",
});
await result.provider.embedQuery("hello");
expect(authModule.resolveApiKeyForProvider).toHaveBeenCalledTimes(1);
const headers =
(fetchMock.mock.calls[0]?.[1]?.headers as Record<string, string>) ?? {};
expect(headers.Authorization).toBe("Bearer provider-key");
});
});

View File

@@ -47,10 +47,12 @@ function normalizeOpenAiModel(model: string): string {
async function createOpenAiEmbeddingProvider(
options: EmbeddingProviderOptions,
): Promise<EmbeddingProvider> {
const remote = options.config.agents?.defaults?.memorySearch?.remote;
const remote = options.remote;
const remoteApiKey = remote?.apiKey?.trim();
const remoteBaseUrl = remote?.baseUrl?.trim();
const { apiKey } = remote?.apiKey
? { apiKey: remote.apiKey }
const { apiKey } = remoteApiKey
? { apiKey: remoteApiKey }
: await resolveApiKeyForProvider({
provider: "openai",
cfg: options.config,
@@ -59,11 +61,13 @@ async function createOpenAiEmbeddingProvider(
const providerConfig = options.config.models?.providers?.openai;
const baseUrl =
remote?.baseUrl?.trim() ||
providerConfig?.baseUrl?.trim() ||
DEFAULT_OPENAI_BASE_URL;
remoteBaseUrl || providerConfig?.baseUrl?.trim() || DEFAULT_OPENAI_BASE_URL;
const url = `${baseUrl.replace(/\/$/, "")}/embeddings`;
const headerOverrides = remote?.headers ?? providerConfig?.headers ?? {};
const headerOverrides = Object.assign(
{},
providerConfig?.headers,
remote?.headers,
);
const headers: Record<string, string> = {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,