feat: add remote config overrides to memorySearch

This commit is contained in:
Muhammed Mukhthar CM
2026-01-12 15:33:35 +00:00
committed by Peter Steinberger
parent 4086408b10
commit ba316a10cc
8 changed files with 75 additions and 9 deletions

View File

@@ -81,8 +81,28 @@ Defaults:
Remote embeddings **require** an OpenAI API key (`OPENAI_API_KEY` or
`models.providers.openai.apiKey`). Codex OAuth only covers chat/completions and
does **not** satisfy embeddings for memory search. If you don't want to set an
API key, use `memorySearch.provider = "local"` or set
does **not** satisfy embeddings for memory search.
If you want to use a **custom OpenAI-compatible endpoint** (like Gemini, OpenRouter, or a proxy),
you can use the `remote` configuration:
```json5
agents: {
defaults: {
memorySearch: {
provider: "openai",
model: "text-embedding-3-small",
remote: {
baseUrl: "https://generativelanguage.googleapis.com/v1beta/openai/",
apiKey: "YOUR_GEMINI_API_KEY",
headers: { "X-Custom-Header": "value" }
}
}
}
}
```
If you don't want to set an API key, use `memorySearch.provider = "local"` or set
`memorySearch.fallback = "none"`.
Config example:

View File

@@ -241,6 +241,14 @@ Save to `~/.clawdbot/clawdbot.json` and you can DM the bot from that number.
prompt: "HEARTBEAT",
ackMaxChars: 300
},
memorySearch: {
provider: "openai",
model: "text-embedding-004",
remote: {
baseUrl: "https://generativelanguage.googleapis.com/v1beta/openai/",
apiKey: "${GEMINI_API_KEY}"
}
},
sandbox: {
mode: "non-main",
perSession: true,

View File

@@ -9,6 +9,11 @@ import { resolveAgentConfig } from "./agent-scope.js";
export type ResolvedMemorySearchConfig = {
enabled: boolean;
provider: "openai" | "local";
remote?: {
baseUrl?: string;
apiKey?: string;
headers?: Record<string, string>;
};
fallback: "openai" | "none";
model: string;
local: {
@@ -60,6 +65,7 @@ function mergeConfig(
): ResolvedMemorySearchConfig {
const enabled = overrides?.enabled ?? defaults?.enabled ?? true;
const provider = overrides?.provider ?? defaults?.provider ?? "openai";
const remote = overrides?.remote ?? defaults?.remote;
const fallback = overrides?.fallback ?? defaults?.fallback ?? "openai";
const model = overrides?.model ?? defaults?.model ?? DEFAULT_MODEL;
const local = {
@@ -112,6 +118,7 @@ function mergeConfig(
return {
enabled,
provider,
remote,
fallback,
model,
local,

View File

@@ -118,6 +118,8 @@ const FIELD_LABELS: Record<string, string> = {
"agents.defaults.memorySearch": "Memory Search",
"agents.defaults.memorySearch.enabled": "Enable Memory Search",
"agents.defaults.memorySearch.provider": "Memory Search Provider",
"agents.defaults.memorySearch.remote.baseUrl": "Remote Embedding Base URL",
"agents.defaults.memorySearch.remote.apiKey": "Remote Embedding API Key",
"agents.defaults.memorySearch.model": "Memory Search Model",
"agents.defaults.memorySearch.fallback": "Memory Search Fallback",
"agents.defaults.memorySearch.local.modelPath": "Local Embedding Model Path",
@@ -236,6 +238,10 @@ const FIELD_HELP: Record<string, string> = {
"Vector search over MEMORY.md and memory/*.md (per-agent overrides supported).",
"agents.defaults.memorySearch.provider":
'Embedding provider ("openai" or "local").',
"agents.defaults.memorySearch.remote.baseUrl":
"Custom OpenAI-compatible base URL (e.g. for Gemini/OpenRouter proxies).",
"agents.defaults.memorySearch.remote.apiKey":
"Custom API key for the remote embedding provider.",
"agents.defaults.memorySearch.local.modelPath":
"Local GGUF model path or hf: URI (node-llama-cpp).",
"agents.defaults.memorySearch.fallback":

View File

@@ -1011,6 +1011,11 @@ export type MemorySearchConfig = {
enabled?: boolean;
/** Embedding provider mode. */
provider?: "openai" | "local";
remote?: {
baseUrl?: string;
apiKey?: string;
headers?: Record<string, string>;
};
/** Fallback behavior when local embeddings fail. */
fallback?: "openai" | "none";
/** Embedding model id (remote) or alias (local). */

View File

@@ -886,6 +886,13 @@ const MemorySearchSchema = z
.object({
enabled: z.boolean().optional(),
provider: z.union([z.literal("openai"), z.literal("local")]).optional(),
remote: z
.object({
baseUrl: z.string().optional(),
apiKey: z.string().optional(),
headers: z.record(z.string(), z.string()).optional(),
})
.optional(),
fallback: z.union([z.literal("openai"), z.literal("none")]).optional(),
model: z.string().optional(),
local: z

View File

@@ -20,6 +20,11 @@ export type EmbeddingProviderOptions = {
config: ClawdbotConfig;
agentDir?: string;
provider: "openai" | "local";
remote?: {
baseUrl?: string;
apiKey?: string;
headers?: Record<string, string>;
};
model: string;
fallback: "openai" | "none";
local?: {
@@ -42,16 +47,23 @@ function normalizeOpenAiModel(model: string): string {
async function createOpenAiEmbeddingProvider(
options: EmbeddingProviderOptions,
): Promise<EmbeddingProvider> {
const { apiKey } = await resolveApiKeyForProvider({
provider: "openai",
cfg: options.config,
agentDir: options.agentDir,
});
const remote = options.config.agents?.defaults?.memorySearch?.remote;
const { apiKey } = remote?.apiKey
? { apiKey: remote.apiKey }
: await resolveApiKeyForProvider({
provider: "openai",
cfg: options.config,
agentDir: options.agentDir,
});
const providerConfig = options.config.models?.providers?.openai;
const baseUrl = providerConfig?.baseUrl?.trim() || DEFAULT_OPENAI_BASE_URL;
const baseUrl =
remote?.baseUrl?.trim() ||
providerConfig?.baseUrl?.trim() ||
DEFAULT_OPENAI_BASE_URL;
const url = `${baseUrl.replace(/\/$/, "")}/embeddings`;
const headerOverrides = providerConfig?.headers ?? {};
const headerOverrides = remote?.headers ?? providerConfig?.headers ?? {};
const headers: Record<string, string> = {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,

View File

@@ -88,6 +88,7 @@ export class MemoryIndexManager {
config: cfg,
agentDir: resolveAgentDir(cfg, agentId),
provider: settings.provider,
remote: settings.remote,
model: settings.model,
fallback: settings.fallback,
local: settings.local,