diff --git a/docs/concepts/model-providers.md b/docs/concepts/model-providers.md new file mode 100644 index 000000000..e7e86695b --- /dev/null +++ b/docs/concepts/model-providers.md @@ -0,0 +1,162 @@ +--- +summary: "Model provider overview with example configs + CLI flows" +read_when: + - You need a provider-by-provider model setup reference + - You want example configs or CLI onboarding commands for model providers +--- +# Model providers + +This page covers **LLM/model providers** (not chat providers like WhatsApp/Telegram). +For model selection rules, see [/concepts/models](/concepts/models). + +## Quick rules + +- Model refs use `provider/model` (example: `opencode/claude-opus-4-5`). +- If you set `agents.defaults.models`, it becomes the allowlist. +- CLI helpers: `clawdbot onboard`, `clawdbot models list`, `clawdbot models set `. + +## Built-in providers (pi-ai catalog) + +Clawdbot ships with the pi‑ai catalog. These providers require **no** +`models.providers` config; just set auth + pick a model. + +### OpenAI + +- Provider: `openai` +- Auth: `OPENAI_API_KEY` +- Example model: `openai/gpt-5.2` +- CLI: `clawdbot onboard --auth-choice openai-api-key` + +```json5 +{ + agents: { defaults: { model: { primary: "openai/gpt-5.2" } } } +} +``` + +### Anthropic + +- Provider: `anthropic` +- Auth: `ANTHROPIC_API_KEY` or `claude setup-token` +- Example model: `anthropic/claude-opus-4-5` +- CLI: `clawdbot onboard --auth-choice setup-token` + +```json5 +{ + agents: { defaults: { model: { primary: "anthropic/claude-opus-4-5" } } } +} +``` + +### OpenAI Code (Codex) + +- Provider: `openai-codex` +- Auth: OAuth or Codex CLI (`~/.codex/auth.json`) +- Example model: `openai-codex/gpt-5.2` +- CLI: `clawdbot onboard --auth-choice openai-codex` or `codex-cli` + +```json5 +{ + agents: { defaults: { model: { primary: "openai-codex/gpt-5.2" } } } +} +``` + +### OpenCode Zen + +- Provider: `opencode` +- Auth: `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`) +- Example model: `opencode/claude-opus-4-5` +- CLI: `clawdbot onboard --auth-choice opencode-zen` + +```json5 +{ + agents: { defaults: { model: { primary: "opencode/claude-opus-4-5" } } } +} +``` + +### Google Gemini (API key) + +- Provider: `google` +- Auth: `GEMINI_API_KEY` +- Example model: `google/gemini-3-pro` +- CLI: `clawdbot onboard --auth-choice gemini-api-key` + +### Google Vertex / Antigravity / Gemini CLI + +- Providers: `google-vertex`, `google-antigravity`, `google-gemini-cli` +- Auth: Vertex uses gcloud ADC; Antigravity/Gemini CLI use their respective auth flows +- CLI: `clawdbot onboard --auth-choice antigravity` (others via interactive wizard) + +### Z.AI (GLM) + +- Provider: `zai` +- Auth: `ZAI_API_KEY` +- Example model: `zai/glm-4.7` +- CLI: `clawdbot onboard --auth-choice zai-api-key` + - Aliases: `z.ai/*` and `z-ai/*` normalize to `zai/*` + +### Other built-in providers + +- OpenRouter: `openrouter` (`OPENROUTER_API_KEY`) +- Example model: `openrouter/anthropic/claude-sonnet-4-5` +- xAI: `xai` (`XAI_API_KEY`) +- Groq: `groq` (`GROQ_API_KEY`) +- Cerebras: `cerebras` (`CEREBRAS_API_KEY`) +- Mistral: `mistral` (`MISTRAL_API_KEY`) +- GitHub Copilot: `github-copilot` (`COPILOT_GITHUB_TOKEN` / `GH_TOKEN` / `GITHUB_TOKEN`) + +## Providers via `models.providers` (custom/base URL) + +Use `models.providers` (or `models.json`) to add **custom** providers or +OpenAI/Anthropic‑compatible proxies. + +### MiniMax + +MiniMax is configured via `models.providers` because it uses custom endpoints: + +- MiniMax Cloud (OpenAI‑compatible): `--auth-choice minimax-cloud` +- MiniMax API (Anthropic‑compatible): `--auth-choice minimax-api` +- Auth: `MINIMAX_API_KEY` + +### Local proxies (LM Studio, vLLM, LiteLLM, etc.) + +Example (OpenAI‑compatible): + +```json5 +{ + agents: { + defaults: { + model: { primary: "lmstudio/minimax-m2.1-gs32" }, + models: { "lmstudio/minimax-m2.1-gs32": { alias: "Minimax" } } + } + }, + models: { + providers: { + lmstudio: { + baseUrl: "http://localhost:1234/v1", + apiKey: "LMSTUDIO_KEY", + api: "openai-completions", + models: [ + { + id: "minimax-m2.1-gs32", + name: "MiniMax M2.1", + reasoning: false, + input: ["text"], + cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, + contextWindow: 200000, + maxTokens: 8192 + } + ] + } + } + } +} +``` + +## CLI examples + +```bash +clawdbot onboard --auth-choice opencode-zen +clawdbot models set opencode/claude-opus-4-5 +clawdbot models list +``` + +See also: [/gateway/configuration](/gateway/configuration) for full configuration examples. diff --git a/docs/concepts/models.md b/docs/concepts/models.md index eefd31565..776dda8ba 100644 --- a/docs/concepts/models.md +++ b/docs/concepts/models.md @@ -9,6 +9,7 @@ read_when: See [/concepts/model-failover](/concepts/model-failover) for auth profile rotation, cooldowns, and how that interacts with fallbacks. +Quick provider overview + examples: [/concepts/model-providers](/concepts/model-providers). ## How model selection works diff --git a/docs/docs.json b/docs/docs.json index 33cc7302b..d5ef8248b 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -580,6 +580,7 @@ "group": "Install & Updates", "pages": [ "install/updating", + "install/ansible", "install/nix", "install/docker", "install/bun" @@ -589,7 +590,9 @@ "group": "CLI", "pages": [ "cli/index", + "cli/message", "cli/gateway", + "cli/update", "cli/sandbox" ] }, @@ -612,12 +615,16 @@ "concepts/presence", "concepts/provider-routing", "concepts/messages", + "concepts/streaming", "concepts/groups", "concepts/group-messages", "concepts/typing-indicators", "concepts/queue", + "concepts/retry", + "concepts/model-providers", "concepts/models", "concepts/model-failover", + "concepts/usage-tracking", "concepts/timezone", "concepts/typebox" ] @@ -628,6 +635,7 @@ "gateway", "gateway/pairing", "gateway/gateway-lock", + "environment", "gateway/configuration", "gateway/configuration-examples", "gateway/authentication", @@ -637,7 +645,10 @@ "gateway/doctor", "gateway/logging", "gateway/security", + "gateway/sandbox-vs-tool-policy-vs-elevated", + "gateway/sandboxing", "gateway/troubleshooting", + "debugging", "gateway/remote", "gateway/remote-gateway-readme", "gateway/discovery", @@ -659,12 +670,15 @@ "group": "Providers", "pages": [ "providers/whatsapp", + "broadcast-groups", "providers/telegram", "providers/grammy", "providers/discord", "providers/slack", "providers/signal", "providers/imessage", + "providers/msteams", + "providers/troubleshooting", "providers/location" ] }, @@ -690,6 +704,8 @@ "tools/thinking", "tools/agent-send", "tools/subagents", + "multi-agent-sandbox-tools", + "tools/reactions", "tools/skills", "tools/skills-config", "tools/clawdhub" diff --git a/docs/gateway/configuration.md b/docs/gateway/configuration.md index 3d8121ff4..90030a20e 100644 --- a/docs/gateway/configuration.md +++ b/docs/gateway/configuration.md @@ -1423,6 +1423,7 @@ Clawdbot uses the **pi-coding-agent** model catalog. You can add custom provider (LiteLLM, local OpenAI-compatible servers, Anthropic proxies, etc.) by writing `~/.clawdbot/agents//agent/models.json` or by defining the same schema inside your Clawdbot config under `models.providers`. +Provider-by-provider overview + examples: [/concepts/model-providers](/concepts/model-providers). When `models.providers` is present, Clawdbot writes/merges a `models.json` into `~/.clawdbot/agents//agent/` on startup: @@ -1467,10 +1468,12 @@ Select the model via `agents.defaults.model.primary` (provider/model). ### OpenCode Zen (multi-model proxy) -OpenCode Zen is an OpenAI-compatible proxy at `https://opencode.ai/zen/v1`. Get an API key at https://opencode.ai/auth and set `OPENCODE_ZEN_API_KEY`. +OpenCode Zen is a multi-model gateway with per-model endpoints. Clawdbot uses +the built-in `opencode` provider from pi-ai; set `OPENCODE_API_KEY` (or +`OPENCODE_ZEN_API_KEY`) from https://opencode.ai/auth. Notes: -- Model refs use `opencode-zen/` (example: `opencode-zen/claude-opus-4-5`). +- Model refs use `opencode/` (example: `opencode/claude-opus-4-5`). - If you enable an allowlist via `agents.defaults.models`, add each model you plan to use. - Shortcut: `clawdbot onboard --auth-choice opencode-zen`. @@ -1478,29 +1481,8 @@ Notes: { agents: { defaults: { - model: { primary: "opencode-zen/claude-opus-4-5" }, - models: { "opencode-zen/claude-opus-4-5": { alias: "Opus" } } - } - }, - models: { - mode: "merge", - providers: { - "opencode-zen": { - baseUrl: "https://opencode.ai/zen/v1", - apiKey: "${OPENCODE_ZEN_API_KEY}", - api: "openai-completions", - models: [ - { - id: "claude-opus-4-5", - name: "Claude Opus 4.5", - reasoning: true, - input: ["text", "image"], - cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, - contextWindow: 200000, - maxTokens: 32000 - } - ] - } + model: { primary: "opencode/claude-opus-4-5" }, + models: { "opencode/claude-opus-4-5": { alias: "Opus" } } } } } diff --git a/docs/start/wizard.md b/docs/start/wizard.md index 7b9abab5f..e2ede7fa6 100644 --- a/docs/start/wizard.md +++ b/docs/start/wizard.md @@ -78,7 +78,7 @@ Tip: `--json` does **not** imply non-interactive mode. Use `--non-interactive` ( - **OpenAI Code (Codex) subscription (OAuth)**: browser flow; paste the `code#state`. - Sets `agents.defaults.model` to `openai-codex/gpt-5.2` when model is unset or `openai/*`. - **OpenAI API key**: uses `OPENAI_API_KEY` if present or prompts for a key, then saves it to `~/.clawdbot/.env` so launchd can read it. - - **OpenCode Zen (multi-model proxy)**: prompts for `OPENCODE_ZEN_API_KEY` (get it at https://opencode.ai/auth). + - **OpenCode Zen (multi-model proxy)**: prompts for `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`, get it at https://opencode.ai/auth). - **API key**: stores the key for you. - **MiniMax M2.1 (minimax.io)**: config is auto‑written for the OpenAI-compatible `/v1` endpoint. - **MiniMax API (platform.minimax.io)**: config is auto‑written for the Anthropic-compatible `/anthropic` endpoint. @@ -205,7 +205,7 @@ OpenCode Zen example: clawdbot onboard --non-interactive \ --mode local \ --auth-choice opencode-zen \ - --opencode-zen-api-key "$OPENCODE_ZEN_API_KEY" \ + --opencode-zen-api-key "$OPENCODE_API_KEY" \ --gateway-port 18789 \ --gateway-bind loopback ``` diff --git a/src/agents/model-auth.ts b/src/agents/model-auth.ts index fe9b164ba..ac648caad 100644 --- a/src/agents/model-auth.ts +++ b/src/agents/model-auth.ts @@ -2,6 +2,7 @@ import { type Api, getEnvApiKey, type Model } from "@mariozechner/pi-ai"; import type { ClawdbotConfig } from "../config/config.js"; import type { ModelProviderConfig } from "../config/types.js"; import { getShellEnvAppliedKeys } from "../infra/shell-env.js"; +import { normalizeProviderId } from "./model-selection.js"; import { type AuthProfileStore, ensureAuthProfileStore, @@ -103,6 +104,7 @@ export type EnvApiKeyResult = { apiKey: string; source: string }; export type ModelAuthMode = "api-key" | "oauth" | "token" | "mixed" | "unknown"; export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null { + const normalized = normalizeProviderId(provider); const applied = new Set(getShellEnvAppliedKeys()); const pick = (envVar: string): EnvApiKeyResult | null => { const value = process.env[envVar]?.trim(); @@ -113,26 +115,30 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null { return { apiKey: value, source }; }; - if (provider === "github-copilot") { + if (normalized === "github-copilot") { return ( pick("COPILOT_GITHUB_TOKEN") ?? pick("GH_TOKEN") ?? pick("GITHUB_TOKEN") ); } - if (provider === "anthropic") { + if (normalized === "anthropic") { return pick("ANTHROPIC_OAUTH_TOKEN") ?? pick("ANTHROPIC_API_KEY"); } - if (provider === "zai") { + if (normalized === "zai") { return pick("ZAI_API_KEY") ?? pick("Z_AI_API_KEY"); } - if (provider === "google-vertex") { - const envKey = getEnvApiKey(provider); + if (normalized === "google-vertex") { + const envKey = getEnvApiKey(normalized); if (!envKey) return null; return { apiKey: envKey, source: "gcloud adc" }; } + if (normalized === "opencode") { + return pick("OPENCODE_API_KEY") ?? pick("OPENCODE_ZEN_API_KEY"); + } + const envMap: Record = { openai: "OPENAI_API_KEY", google: "GEMINI_API_KEY", @@ -142,9 +148,9 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null { openrouter: "OPENROUTER_API_KEY", minimax: "MINIMAX_API_KEY", mistral: "MISTRAL_API_KEY", - "opencode-zen": "OPENCODE_ZEN_API_KEY", + opencode: "OPENCODE_API_KEY", }; - const envVar = envMap[provider]; + const envVar = envMap[normalized]; if (!envVar) return null; return pick(envVar); } diff --git a/src/agents/model-selection.ts b/src/agents/model-selection.ts index 57841c6fa..56616ca08 100644 --- a/src/agents/model-selection.ts +++ b/src/agents/model-selection.ts @@ -24,6 +24,7 @@ export function modelKey(provider: string, model: string) { export function normalizeProviderId(provider: string): string { const normalized = provider.trim().toLowerCase(); if (normalized === "z.ai" || normalized === "z-ai") return "zai"; + if (normalized === "opencode-zen") return "opencode"; return normalized; } diff --git a/src/agents/opencode-zen-models.test.ts b/src/agents/opencode-zen-models.test.ts index 4fcbe3395..f1c1efd7a 100644 --- a/src/agents/opencode-zen-models.test.ts +++ b/src/agents/opencode-zen-models.test.ts @@ -41,12 +41,18 @@ describe("resolveOpencodeZenAlias", () => { }); describe("resolveOpencodeZenModelApi", () => { - it("returns openai-completions for all models (OpenCode Zen is OpenAI-compatible)", () => { + it("maps APIs by model family", () => { expect(resolveOpencodeZenModelApi("claude-opus-4-5")).toBe( - "openai-completions", + "anthropic-messages", + ); + expect(resolveOpencodeZenModelApi("minimax-m2.1-free")).toBe( + "anthropic-messages", ); - expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-completions"); expect(resolveOpencodeZenModelApi("gemini-3-pro")).toBe( + "google-generative-ai", + ); + expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-responses"); + expect(resolveOpencodeZenModelApi("glm-4.7-free")).toBe( "openai-completions", ); expect(resolveOpencodeZenModelApi("some-unknown-model")).toBe( diff --git a/src/agents/opencode-zen-models.ts b/src/agents/opencode-zen-models.ts index 3cdcd0753..df906606d 100644 --- a/src/agents/opencode-zen-models.ts +++ b/src/agents/opencode-zen-models.ts @@ -12,7 +12,7 @@ import type { ModelApi, ModelDefinitionConfig } from "../config/types.js"; export const OPENCODE_ZEN_API_BASE_URL = "https://opencode.ai/zen/v1"; export const OPENCODE_ZEN_DEFAULT_MODEL = "claude-opus-4-5"; -export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode-zen/${OPENCODE_ZEN_DEFAULT_MODEL}`; +export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode/${OPENCODE_ZEN_DEFAULT_MODEL}`; // Cache for fetched models (1 hour TTL) let cachedModels: ModelDefinitionConfig[] | null = null; @@ -87,10 +87,23 @@ export function resolveOpencodeZenAlias(modelIdOrAlias: string): string { } /** - * OpenCode Zen is an OpenAI-compatible proxy for all models. - * All requests go through /chat/completions regardless of the underlying model. + * OpenCode Zen routes models to different APIs based on model family. */ -export function resolveOpencodeZenModelApi(_modelId: string): ModelApi { +export function resolveOpencodeZenModelApi(modelId: string): ModelApi { + const lower = modelId.toLowerCase(); + if ( + lower.startsWith("claude-") || + lower.startsWith("minimax") || + lower.startsWith("alpha-gd4") + ) { + return "anthropic-messages"; + } + if (lower.startsWith("gemini-")) { + return "google-generative-ai"; + } + if (lower.startsWith("gpt-")) { + return "openai-responses"; + } return "openai-completions"; } diff --git a/src/commands/auth-choice.test.ts b/src/commands/auth-choice.test.ts index f903bfc3b..4cf6c8dc3 100644 --- a/src/commands/auth-choice.test.ts +++ b/src/commands/auth-choice.test.ts @@ -147,7 +147,7 @@ describe("applyAuthChoice", () => { expect(result.config.agents?.defaults?.model?.primary).toBe( "anthropic/claude-opus-4-5", ); - expect(result.config.models?.providers?.["opencode-zen"]).toBeDefined(); - expect(result.agentModelOverride).toBe("opencode-zen/claude-opus-4-5"); + expect(result.config.models?.providers?.["opencode-zen"]).toBeUndefined(); + expect(result.agentModelOverride).toBe("opencode/claude-opus-4-5"); }); }); diff --git a/src/commands/auth-choice.ts b/src/commands/auth-choice.ts index 02744c5dc..7c82b1c40 100644 --- a/src/commands/auth-choice.ts +++ b/src/commands/auth-choice.ts @@ -710,8 +710,8 @@ export async function applyAuthChoice(params: { }); await setOpencodeZenApiKey(String(key).trim(), params.agentDir); nextConfig = applyAuthProfileConfig(nextConfig, { - profileId: "opencode-zen:default", - provider: "opencode-zen", + profileId: "opencode:default", + provider: "opencode", mode: "api_key", }); if (params.setDefaultModel) { @@ -755,7 +755,7 @@ export function resolvePreferredProviderForAuthChoice( case "minimax": return "lmstudio"; case "opencode-zen": - return "opencode-zen"; + return "opencode"; default: return undefined; } diff --git a/src/commands/onboard-auth.test.ts b/src/commands/onboard-auth.test.ts index 495ae029d..8791ed0e0 100644 --- a/src/commands/onboard-auth.test.ts +++ b/src/commands/onboard-auth.test.ts @@ -258,23 +258,10 @@ describe("applyMinimaxApiProviderConfig", () => { }); describe("applyOpencodeZenProviderConfig", () => { - it("adds opencode-zen provider with correct settings", () => { - const cfg = applyOpencodeZenProviderConfig({}); - expect(cfg.models?.providers?.["opencode-zen"]).toMatchObject({ - baseUrl: "https://opencode.ai/zen/v1", - apiKey: "opencode-zen", - api: "openai-completions", - }); - expect( - cfg.models?.providers?.["opencode-zen"]?.models.length, - ).toBeGreaterThan(0); - }); - - it("adds allowlist entries for fallback models", () => { + it("adds allowlist entry for the default model", () => { const cfg = applyOpencodeZenProviderConfig({}); const models = cfg.agents?.defaults?.models ?? {}; - expect(Object.keys(models)).toContain("opencode-zen/claude-opus-4-5"); - expect(Object.keys(models)).toContain("opencode-zen/gpt-5.2"); + expect(Object.keys(models)).toContain("opencode/claude-opus-4-5"); }); it("preserves existing alias for the default model", () => { @@ -282,13 +269,13 @@ describe("applyOpencodeZenProviderConfig", () => { agents: { defaults: { models: { - "opencode-zen/claude-opus-4-5": { alias: "My Opus" }, + "opencode/claude-opus-4-5": { alias: "My Opus" }, }, }, }, }); expect( - cfg.agents?.defaults?.models?.["opencode-zen/claude-opus-4-5"]?.alias, + cfg.agents?.defaults?.models?.["opencode/claude-opus-4-5"]?.alias, ).toBe("My Opus"); }); }); @@ -297,7 +284,7 @@ describe("applyOpencodeZenConfig", () => { it("sets correct primary model", () => { const cfg = applyOpencodeZenConfig({}); expect(cfg.agents?.defaults?.model?.primary).toBe( - "opencode-zen/claude-opus-4-5", + "opencode/claude-opus-4-5", ); }); diff --git a/src/commands/onboard-auth.ts b/src/commands/onboard-auth.ts index 65af6913c..e6eca06c1 100644 --- a/src/commands/onboard-auth.ts +++ b/src/commands/onboard-auth.ts @@ -1,11 +1,7 @@ import type { OAuthCredentials, OAuthProvider } from "@mariozechner/pi-ai"; import { resolveDefaultAgentDir } from "../agents/agent-scope.js"; import { upsertAuthProfile } from "../agents/auth-profiles.js"; -import { - getOpencodeZenStaticFallbackModels, - OPENCODE_ZEN_API_BASE_URL, - OPENCODE_ZEN_DEFAULT_MODEL_REF, -} from "../agents/opencode-zen-models.js"; +import { OPENCODE_ZEN_DEFAULT_MODEL_REF } from "../agents/opencode-zen-models.js"; import type { ClawdbotConfig } from "../config/config.js"; import type { ModelDefinitionConfig } from "../config/types.js"; @@ -450,10 +446,10 @@ export function applyMinimaxApiConfig( export async function setOpencodeZenApiKey(key: string, agentDir?: string) { upsertAuthProfile({ - profileId: "opencode-zen:default", + profileId: "opencode:default", credential: { type: "api_key", - provider: "opencode-zen", + provider: "opencode", key, }, agentDir: agentDir ?? resolveDefaultAgentDir(), @@ -463,21 +459,8 @@ export async function setOpencodeZenApiKey(key: string, agentDir?: string) { export function applyOpencodeZenProviderConfig( cfg: ClawdbotConfig, ): ClawdbotConfig { - const opencodeModels = getOpencodeZenStaticFallbackModels(); - - const providers = { ...cfg.models?.providers }; - providers["opencode-zen"] = { - baseUrl: OPENCODE_ZEN_API_BASE_URL, - apiKey: "opencode-zen", - api: "openai-completions", - models: opencodeModels, - }; - + // Use the built-in opencode provider from pi-ai; only seed the allowlist alias. const models = { ...cfg.agents?.defaults?.models }; - for (const model of opencodeModels) { - const key = `opencode-zen/${model.id}`; - models[key] = models[key] ?? {}; - } models[OPENCODE_ZEN_DEFAULT_MODEL_REF] = { ...models[OPENCODE_ZEN_DEFAULT_MODEL_REF], alias: models[OPENCODE_ZEN_DEFAULT_MODEL_REF]?.alias ?? "Opus", @@ -492,10 +475,6 @@ export function applyOpencodeZenProviderConfig( models, }, }, - models: { - mode: cfg.models?.mode ?? "merge", - providers, - }, }; } diff --git a/src/commands/onboard-non-interactive.ts b/src/commands/onboard-non-interactive.ts index de6293296..c37f35cd7 100644 --- a/src/commands/onboard-non-interactive.ts +++ b/src/commands/onboard-non-interactive.ts @@ -337,11 +337,11 @@ export async function runNonInteractiveOnboarding( nextConfig = applyMinimaxConfig(nextConfig); } else if (authChoice === "opencode-zen") { const resolved = await resolveNonInteractiveApiKey({ - provider: "opencode-zen", + provider: "opencode", cfg: baseConfig, flagValue: opts.opencodeZenApiKey, flagName: "--opencode-zen-api-key", - envVar: "OPENCODE_ZEN_API_KEY", + envVar: "OPENCODE_API_KEY (or OPENCODE_ZEN_API_KEY)", runtime, }); if (!resolved) return; @@ -349,8 +349,8 @@ export async function runNonInteractiveOnboarding( await setOpencodeZenApiKey(resolved.key); } nextConfig = applyAuthProfileConfig(nextConfig, { - profileId: "opencode-zen:default", - provider: "opencode-zen", + profileId: "opencode:default", + provider: "opencode", mode: "api_key", }); nextConfig = applyOpencodeZenConfig(nextConfig); diff --git a/src/commands/opencode-zen-model-default.test.ts b/src/commands/opencode-zen-model-default.test.ts index da6db0ac4..62f8f6ded 100644 --- a/src/commands/opencode-zen-model-default.test.ts +++ b/src/commands/opencode-zen-model-default.test.ts @@ -7,7 +7,7 @@ import { } from "./opencode-zen-model-default.js"; describe("applyOpencodeZenModelDefault", () => { - it("sets opencode-zen default when model is unset", () => { + it("sets opencode default when model is unset", () => { const cfg: ClawdbotConfig = { agents: { defaults: {} } }; const applied = applyOpencodeZenModelDefault(cfg); expect(applied.changed).toBe(true); @@ -36,6 +36,15 @@ describe("applyOpencodeZenModelDefault", () => { expect(applied.next).toEqual(cfg); }); + it("no-ops when already legacy opencode-zen default", () => { + const cfg = { + agents: { defaults: { model: "opencode-zen/claude-opus-4-5" } }, + } as ClawdbotConfig; + const applied = applyOpencodeZenModelDefault(cfg); + expect(applied.changed).toBe(false); + expect(applied.next).toEqual(cfg); + }); + it("preserves fallbacks when setting primary", () => { const cfg: ClawdbotConfig = { agents: { diff --git a/src/commands/opencode-zen-model-default.ts b/src/commands/opencode-zen-model-default.ts index c34d2f425..70d67acbd 100644 --- a/src/commands/opencode-zen-model-default.ts +++ b/src/commands/opencode-zen-model-default.ts @@ -1,7 +1,8 @@ import type { ClawdbotConfig } from "../config/config.js"; import type { AgentModelListConfig } from "../config/types.js"; -export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode-zen/claude-opus-4-5"; +export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode/claude-opus-4-5"; +const LEGACY_OPENCODE_ZEN_DEFAULT_MODEL = "opencode-zen/claude-opus-4-5"; function resolvePrimaryModel( model?: AgentModelListConfig | string, @@ -18,7 +19,11 @@ export function applyOpencodeZenModelDefault(cfg: ClawdbotConfig): { changed: boolean; } { const current = resolvePrimaryModel(cfg.agents?.defaults?.model)?.trim(); - if (current === OPENCODE_ZEN_DEFAULT_MODEL) { + const normalizedCurrent = + current === LEGACY_OPENCODE_ZEN_DEFAULT_MODEL + ? OPENCODE_ZEN_DEFAULT_MODEL + : current; + if (normalizedCurrent === OPENCODE_ZEN_DEFAULT_MODEL) { return { next: cfg, changed: false }; }