From ba316a10cc627308b615561cad81c607d685f472 Mon Sep 17 00:00:00 2001 From: Muhammed Mukhthar CM Date: Mon, 12 Jan 2026 15:33:35 +0000 Subject: [PATCH] feat: add remote config overrides to memorySearch --- docs/concepts/memory.md | 24 ++++++++++++++++++++++-- docs/gateway/configuration-examples.md | 8 ++++++++ src/agents/memory-search.ts | 7 +++++++ src/config/schema.ts | 6 ++++++ src/config/types.ts | 5 +++++ src/config/zod-schema.ts | 7 +++++++ src/memory/embeddings.ts | 26 +++++++++++++++++++------- src/memory/index.ts | 1 + 8 files changed, 75 insertions(+), 9 deletions(-) diff --git a/docs/concepts/memory.md b/docs/concepts/memory.md index 6da6be1d4..d7b72e30f 100644 --- a/docs/concepts/memory.md +++ b/docs/concepts/memory.md @@ -81,8 +81,28 @@ Defaults: Remote embeddings **require** an OpenAI API key (`OPENAI_API_KEY` or `models.providers.openai.apiKey`). Codex OAuth only covers chat/completions and -does **not** satisfy embeddings for memory search. If you don't want to set an -API key, use `memorySearch.provider = "local"` or set +does **not** satisfy embeddings for memory search. + +If you want to use a **custom OpenAI-compatible endpoint** (like Gemini, OpenRouter, or a proxy), +you can use the `remote` configuration: + +```json5 +agents: { + defaults: { + memorySearch: { + provider: "openai", + model: "text-embedding-3-small", + remote: { + baseUrl: "https://generativelanguage.googleapis.com/v1beta/openai/", + apiKey: "YOUR_GEMINI_API_KEY", + headers: { "X-Custom-Header": "value" } + } + } + } +} +``` + +If you don't want to set an API key, use `memorySearch.provider = "local"` or set `memorySearch.fallback = "none"`. Config example: diff --git a/docs/gateway/configuration-examples.md b/docs/gateway/configuration-examples.md index 3456077d8..fbb828e49 100644 --- a/docs/gateway/configuration-examples.md +++ b/docs/gateway/configuration-examples.md @@ -241,6 +241,14 @@ Save to `~/.clawdbot/clawdbot.json` and you can DM the bot from that number. prompt: "HEARTBEAT", ackMaxChars: 300 }, + memorySearch: { + provider: "openai", + model: "text-embedding-004", + remote: { + baseUrl: "https://generativelanguage.googleapis.com/v1beta/openai/", + apiKey: "${GEMINI_API_KEY}" + } + }, sandbox: { mode: "non-main", perSession: true, diff --git a/src/agents/memory-search.ts b/src/agents/memory-search.ts index 02ba369a0..c32b7975b 100644 --- a/src/agents/memory-search.ts +++ b/src/agents/memory-search.ts @@ -9,6 +9,11 @@ import { resolveAgentConfig } from "./agent-scope.js"; export type ResolvedMemorySearchConfig = { enabled: boolean; provider: "openai" | "local"; + remote?: { + baseUrl?: string; + apiKey?: string; + headers?: Record; + }; fallback: "openai" | "none"; model: string; local: { @@ -60,6 +65,7 @@ function mergeConfig( ): ResolvedMemorySearchConfig { const enabled = overrides?.enabled ?? defaults?.enabled ?? true; const provider = overrides?.provider ?? defaults?.provider ?? "openai"; + const remote = overrides?.remote ?? defaults?.remote; const fallback = overrides?.fallback ?? defaults?.fallback ?? "openai"; const model = overrides?.model ?? defaults?.model ?? DEFAULT_MODEL; const local = { @@ -112,6 +118,7 @@ function mergeConfig( return { enabled, provider, + remote, fallback, model, local, diff --git a/src/config/schema.ts b/src/config/schema.ts index 2d43ae840..6c7a2e826 100644 --- a/src/config/schema.ts +++ b/src/config/schema.ts @@ -118,6 +118,8 @@ const FIELD_LABELS: Record = { "agents.defaults.memorySearch": "Memory Search", "agents.defaults.memorySearch.enabled": "Enable Memory Search", "agents.defaults.memorySearch.provider": "Memory Search Provider", + "agents.defaults.memorySearch.remote.baseUrl": "Remote Embedding Base URL", + "agents.defaults.memorySearch.remote.apiKey": "Remote Embedding API Key", "agents.defaults.memorySearch.model": "Memory Search Model", "agents.defaults.memorySearch.fallback": "Memory Search Fallback", "agents.defaults.memorySearch.local.modelPath": "Local Embedding Model Path", @@ -236,6 +238,10 @@ const FIELD_HELP: Record = { "Vector search over MEMORY.md and memory/*.md (per-agent overrides supported).", "agents.defaults.memorySearch.provider": 'Embedding provider ("openai" or "local").', + "agents.defaults.memorySearch.remote.baseUrl": + "Custom OpenAI-compatible base URL (e.g. for Gemini/OpenRouter proxies).", + "agents.defaults.memorySearch.remote.apiKey": + "Custom API key for the remote embedding provider.", "agents.defaults.memorySearch.local.modelPath": "Local GGUF model path or hf: URI (node-llama-cpp).", "agents.defaults.memorySearch.fallback": diff --git a/src/config/types.ts b/src/config/types.ts index 83e161490..ba008a610 100644 --- a/src/config/types.ts +++ b/src/config/types.ts @@ -1011,6 +1011,11 @@ export type MemorySearchConfig = { enabled?: boolean; /** Embedding provider mode. */ provider?: "openai" | "local"; + remote?: { + baseUrl?: string; + apiKey?: string; + headers?: Record; + }; /** Fallback behavior when local embeddings fail. */ fallback?: "openai" | "none"; /** Embedding model id (remote) or alias (local). */ diff --git a/src/config/zod-schema.ts b/src/config/zod-schema.ts index d72b855f4..3f78fad7f 100644 --- a/src/config/zod-schema.ts +++ b/src/config/zod-schema.ts @@ -886,6 +886,13 @@ const MemorySearchSchema = z .object({ enabled: z.boolean().optional(), provider: z.union([z.literal("openai"), z.literal("local")]).optional(), + remote: z + .object({ + baseUrl: z.string().optional(), + apiKey: z.string().optional(), + headers: z.record(z.string(), z.string()).optional(), + }) + .optional(), fallback: z.union([z.literal("openai"), z.literal("none")]).optional(), model: z.string().optional(), local: z diff --git a/src/memory/embeddings.ts b/src/memory/embeddings.ts index 3a5b1cb02..661d4f63e 100644 --- a/src/memory/embeddings.ts +++ b/src/memory/embeddings.ts @@ -20,6 +20,11 @@ export type EmbeddingProviderOptions = { config: ClawdbotConfig; agentDir?: string; provider: "openai" | "local"; + remote?: { + baseUrl?: string; + apiKey?: string; + headers?: Record; + }; model: string; fallback: "openai" | "none"; local?: { @@ -42,16 +47,23 @@ function normalizeOpenAiModel(model: string): string { async function createOpenAiEmbeddingProvider( options: EmbeddingProviderOptions, ): Promise { - const { apiKey } = await resolveApiKeyForProvider({ - provider: "openai", - cfg: options.config, - agentDir: options.agentDir, - }); + const remote = options.config.agents?.defaults?.memorySearch?.remote; + + const { apiKey } = remote?.apiKey + ? { apiKey: remote.apiKey } + : await resolveApiKeyForProvider({ + provider: "openai", + cfg: options.config, + agentDir: options.agentDir, + }); const providerConfig = options.config.models?.providers?.openai; - const baseUrl = providerConfig?.baseUrl?.trim() || DEFAULT_OPENAI_BASE_URL; + const baseUrl = + remote?.baseUrl?.trim() || + providerConfig?.baseUrl?.trim() || + DEFAULT_OPENAI_BASE_URL; const url = `${baseUrl.replace(/\/$/, "")}/embeddings`; - const headerOverrides = providerConfig?.headers ?? {}; + const headerOverrides = remote?.headers ?? providerConfig?.headers ?? {}; const headers: Record = { "Content-Type": "application/json", Authorization: `Bearer ${apiKey}`, diff --git a/src/memory/index.ts b/src/memory/index.ts index b81b10597..c45490fb5 100644 --- a/src/memory/index.ts +++ b/src/memory/index.ts @@ -88,6 +88,7 @@ export class MemoryIndexManager { config: cfg, agentDir: resolveAgentDir(cfg, agentId), provider: settings.provider, + remote: settings.remote, model: settings.model, fallback: settings.fallback, local: settings.local,