fix: align opencode-zen provider setup

This commit is contained in:
Peter Steinberger
2026-01-10 21:37:38 +01:00
parent 46e00ad5e7
commit 8a194b4abc
16 changed files with 263 additions and 96 deletions

View File

@@ -0,0 +1,162 @@
---
summary: "Model provider overview with example configs + CLI flows"
read_when:
- You need a provider-by-provider model setup reference
- You want example configs or CLI onboarding commands for model providers
---
# Model providers
This page covers **LLM/model providers** (not chat providers like WhatsApp/Telegram).
For model selection rules, see [/concepts/models](/concepts/models).
## Quick rules
- Model refs use `provider/model` (example: `opencode/claude-opus-4-5`).
- If you set `agents.defaults.models`, it becomes the allowlist.
- CLI helpers: `clawdbot onboard`, `clawdbot models list`, `clawdbot models set <provider/model>`.
## Built-in providers (pi-ai catalog)
Clawdbot ships with the piai catalog. These providers require **no**
`models.providers` config; just set auth + pick a model.
### OpenAI
- Provider: `openai`
- Auth: `OPENAI_API_KEY`
- Example model: `openai/gpt-5.2`
- CLI: `clawdbot onboard --auth-choice openai-api-key`
```json5
{
agents: { defaults: { model: { primary: "openai/gpt-5.2" } } }
}
```
### Anthropic
- Provider: `anthropic`
- Auth: `ANTHROPIC_API_KEY` or `claude setup-token`
- Example model: `anthropic/claude-opus-4-5`
- CLI: `clawdbot onboard --auth-choice setup-token`
```json5
{
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-5" } } }
}
```
### OpenAI Code (Codex)
- Provider: `openai-codex`
- Auth: OAuth or Codex CLI (`~/.codex/auth.json`)
- Example model: `openai-codex/gpt-5.2`
- CLI: `clawdbot onboard --auth-choice openai-codex` or `codex-cli`
```json5
{
agents: { defaults: { model: { primary: "openai-codex/gpt-5.2" } } }
}
```
### OpenCode Zen
- Provider: `opencode`
- Auth: `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`)
- Example model: `opencode/claude-opus-4-5`
- CLI: `clawdbot onboard --auth-choice opencode-zen`
```json5
{
agents: { defaults: { model: { primary: "opencode/claude-opus-4-5" } } }
}
```
### Google Gemini (API key)
- Provider: `google`
- Auth: `GEMINI_API_KEY`
- Example model: `google/gemini-3-pro`
- CLI: `clawdbot onboard --auth-choice gemini-api-key`
### Google Vertex / Antigravity / Gemini CLI
- Providers: `google-vertex`, `google-antigravity`, `google-gemini-cli`
- Auth: Vertex uses gcloud ADC; Antigravity/Gemini CLI use their respective auth flows
- CLI: `clawdbot onboard --auth-choice antigravity` (others via interactive wizard)
### Z.AI (GLM)
- Provider: `zai`
- Auth: `ZAI_API_KEY`
- Example model: `zai/glm-4.7`
- CLI: `clawdbot onboard --auth-choice zai-api-key`
- Aliases: `z.ai/*` and `z-ai/*` normalize to `zai/*`
### Other built-in providers
- OpenRouter: `openrouter` (`OPENROUTER_API_KEY`)
- Example model: `openrouter/anthropic/claude-sonnet-4-5`
- xAI: `xai` (`XAI_API_KEY`)
- Groq: `groq` (`GROQ_API_KEY`)
- Cerebras: `cerebras` (`CEREBRAS_API_KEY`)
- Mistral: `mistral` (`MISTRAL_API_KEY`)
- GitHub Copilot: `github-copilot` (`COPILOT_GITHUB_TOKEN` / `GH_TOKEN` / `GITHUB_TOKEN`)
## Providers via `models.providers` (custom/base URL)
Use `models.providers` (or `models.json`) to add **custom** providers or
OpenAI/Anthropiccompatible proxies.
### MiniMax
MiniMax is configured via `models.providers` because it uses custom endpoints:
- MiniMax Cloud (OpenAIcompatible): `--auth-choice minimax-cloud`
- MiniMax API (Anthropiccompatible): `--auth-choice minimax-api`
- Auth: `MINIMAX_API_KEY`
### Local proxies (LM Studio, vLLM, LiteLLM, etc.)
Example (OpenAIcompatible):
```json5
{
agents: {
defaults: {
model: { primary: "lmstudio/minimax-m2.1-gs32" },
models: { "lmstudio/minimax-m2.1-gs32": { alias: "Minimax" } }
}
},
models: {
providers: {
lmstudio: {
baseUrl: "http://localhost:1234/v1",
apiKey: "LMSTUDIO_KEY",
api: "openai-completions",
models: [
{
id: "minimax-m2.1-gs32",
name: "MiniMax M2.1",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192
}
]
}
}
}
}
```
## CLI examples
```bash
clawdbot onboard --auth-choice opencode-zen
clawdbot models set opencode/claude-opus-4-5
clawdbot models list
```
See also: [/gateway/configuration](/gateway/configuration) for full configuration examples.

View File

@@ -9,6 +9,7 @@ read_when:
See [/concepts/model-failover](/concepts/model-failover) for auth profile See [/concepts/model-failover](/concepts/model-failover) for auth profile
rotation, cooldowns, and how that interacts with fallbacks. rotation, cooldowns, and how that interacts with fallbacks.
Quick provider overview + examples: [/concepts/model-providers](/concepts/model-providers).
## How model selection works ## How model selection works

View File

@@ -580,6 +580,7 @@
"group": "Install & Updates", "group": "Install & Updates",
"pages": [ "pages": [
"install/updating", "install/updating",
"install/ansible",
"install/nix", "install/nix",
"install/docker", "install/docker",
"install/bun" "install/bun"
@@ -589,7 +590,9 @@
"group": "CLI", "group": "CLI",
"pages": [ "pages": [
"cli/index", "cli/index",
"cli/message",
"cli/gateway", "cli/gateway",
"cli/update",
"cli/sandbox" "cli/sandbox"
] ]
}, },
@@ -612,12 +615,16 @@
"concepts/presence", "concepts/presence",
"concepts/provider-routing", "concepts/provider-routing",
"concepts/messages", "concepts/messages",
"concepts/streaming",
"concepts/groups", "concepts/groups",
"concepts/group-messages", "concepts/group-messages",
"concepts/typing-indicators", "concepts/typing-indicators",
"concepts/queue", "concepts/queue",
"concepts/retry",
"concepts/model-providers",
"concepts/models", "concepts/models",
"concepts/model-failover", "concepts/model-failover",
"concepts/usage-tracking",
"concepts/timezone", "concepts/timezone",
"concepts/typebox" "concepts/typebox"
] ]
@@ -628,6 +635,7 @@
"gateway", "gateway",
"gateway/pairing", "gateway/pairing",
"gateway/gateway-lock", "gateway/gateway-lock",
"environment",
"gateway/configuration", "gateway/configuration",
"gateway/configuration-examples", "gateway/configuration-examples",
"gateway/authentication", "gateway/authentication",
@@ -637,7 +645,10 @@
"gateway/doctor", "gateway/doctor",
"gateway/logging", "gateway/logging",
"gateway/security", "gateway/security",
"gateway/sandbox-vs-tool-policy-vs-elevated",
"gateway/sandboxing",
"gateway/troubleshooting", "gateway/troubleshooting",
"debugging",
"gateway/remote", "gateway/remote",
"gateway/remote-gateway-readme", "gateway/remote-gateway-readme",
"gateway/discovery", "gateway/discovery",
@@ -659,12 +670,15 @@
"group": "Providers", "group": "Providers",
"pages": [ "pages": [
"providers/whatsapp", "providers/whatsapp",
"broadcast-groups",
"providers/telegram", "providers/telegram",
"providers/grammy", "providers/grammy",
"providers/discord", "providers/discord",
"providers/slack", "providers/slack",
"providers/signal", "providers/signal",
"providers/imessage", "providers/imessage",
"providers/msteams",
"providers/troubleshooting",
"providers/location" "providers/location"
] ]
}, },
@@ -690,6 +704,8 @@
"tools/thinking", "tools/thinking",
"tools/agent-send", "tools/agent-send",
"tools/subagents", "tools/subagents",
"multi-agent-sandbox-tools",
"tools/reactions",
"tools/skills", "tools/skills",
"tools/skills-config", "tools/skills-config",
"tools/clawdhub" "tools/clawdhub"

View File

@@ -1423,6 +1423,7 @@ Clawdbot uses the **pi-coding-agent** model catalog. You can add custom provider
(LiteLLM, local OpenAI-compatible servers, Anthropic proxies, etc.) by writing (LiteLLM, local OpenAI-compatible servers, Anthropic proxies, etc.) by writing
`~/.clawdbot/agents/<agentId>/agent/models.json` or by defining the same schema inside your `~/.clawdbot/agents/<agentId>/agent/models.json` or by defining the same schema inside your
Clawdbot config under `models.providers`. Clawdbot config under `models.providers`.
Provider-by-provider overview + examples: [/concepts/model-providers](/concepts/model-providers).
When `models.providers` is present, Clawdbot writes/merges a `models.json` into When `models.providers` is present, Clawdbot writes/merges a `models.json` into
`~/.clawdbot/agents/<agentId>/agent/` on startup: `~/.clawdbot/agents/<agentId>/agent/` on startup:
@@ -1467,10 +1468,12 @@ Select the model via `agents.defaults.model.primary` (provider/model).
### OpenCode Zen (multi-model proxy) ### OpenCode Zen (multi-model proxy)
OpenCode Zen is an OpenAI-compatible proxy at `https://opencode.ai/zen/v1`. Get an API key at https://opencode.ai/auth and set `OPENCODE_ZEN_API_KEY`. OpenCode Zen is a multi-model gateway with per-model endpoints. Clawdbot uses
the built-in `opencode` provider from pi-ai; set `OPENCODE_API_KEY` (or
`OPENCODE_ZEN_API_KEY`) from https://opencode.ai/auth.
Notes: Notes:
- Model refs use `opencode-zen/<modelId>` (example: `opencode-zen/claude-opus-4-5`). - Model refs use `opencode/<modelId>` (example: `opencode/claude-opus-4-5`).
- If you enable an allowlist via `agents.defaults.models`, add each model you plan to use. - If you enable an allowlist via `agents.defaults.models`, add each model you plan to use.
- Shortcut: `clawdbot onboard --auth-choice opencode-zen`. - Shortcut: `clawdbot onboard --auth-choice opencode-zen`.
@@ -1478,29 +1481,8 @@ Notes:
{ {
agents: { agents: {
defaults: { defaults: {
model: { primary: "opencode-zen/claude-opus-4-5" }, model: { primary: "opencode/claude-opus-4-5" },
models: { "opencode-zen/claude-opus-4-5": { alias: "Opus" } } models: { "opencode/claude-opus-4-5": { alias: "Opus" } }
}
},
models: {
mode: "merge",
providers: {
"opencode-zen": {
baseUrl: "https://opencode.ai/zen/v1",
apiKey: "${OPENCODE_ZEN_API_KEY}",
api: "openai-completions",
models: [
{
id: "claude-opus-4-5",
name: "Claude Opus 4.5",
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 32000
}
]
}
} }
} }
} }

View File

@@ -78,7 +78,7 @@ Tip: `--json` does **not** imply non-interactive mode. Use `--non-interactive` (
- **OpenAI Code (Codex) subscription (OAuth)**: browser flow; paste the `code#state`. - **OpenAI Code (Codex) subscription (OAuth)**: browser flow; paste the `code#state`.
- Sets `agents.defaults.model` to `openai-codex/gpt-5.2` when model is unset or `openai/*`. - Sets `agents.defaults.model` to `openai-codex/gpt-5.2` when model is unset or `openai/*`.
- **OpenAI API key**: uses `OPENAI_API_KEY` if present or prompts for a key, then saves it to `~/.clawdbot/.env` so launchd can read it. - **OpenAI API key**: uses `OPENAI_API_KEY` if present or prompts for a key, then saves it to `~/.clawdbot/.env` so launchd can read it.
- **OpenCode Zen (multi-model proxy)**: prompts for `OPENCODE_ZEN_API_KEY` (get it at https://opencode.ai/auth). - **OpenCode Zen (multi-model proxy)**: prompts for `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`, get it at https://opencode.ai/auth).
- **API key**: stores the key for you. - **API key**: stores the key for you.
- **MiniMax M2.1 (minimax.io)**: config is autowritten for the OpenAI-compatible `/v1` endpoint. - **MiniMax M2.1 (minimax.io)**: config is autowritten for the OpenAI-compatible `/v1` endpoint.
- **MiniMax API (platform.minimax.io)**: config is autowritten for the Anthropic-compatible `/anthropic` endpoint. - **MiniMax API (platform.minimax.io)**: config is autowritten for the Anthropic-compatible `/anthropic` endpoint.
@@ -205,7 +205,7 @@ OpenCode Zen example:
clawdbot onboard --non-interactive \ clawdbot onboard --non-interactive \
--mode local \ --mode local \
--auth-choice opencode-zen \ --auth-choice opencode-zen \
--opencode-zen-api-key "$OPENCODE_ZEN_API_KEY" \ --opencode-zen-api-key "$OPENCODE_API_KEY" \
--gateway-port 18789 \ --gateway-port 18789 \
--gateway-bind loopback --gateway-bind loopback
``` ```

View File

@@ -2,6 +2,7 @@ import { type Api, getEnvApiKey, type Model } from "@mariozechner/pi-ai";
import type { ClawdbotConfig } from "../config/config.js"; import type { ClawdbotConfig } from "../config/config.js";
import type { ModelProviderConfig } from "../config/types.js"; import type { ModelProviderConfig } from "../config/types.js";
import { getShellEnvAppliedKeys } from "../infra/shell-env.js"; import { getShellEnvAppliedKeys } from "../infra/shell-env.js";
import { normalizeProviderId } from "./model-selection.js";
import { import {
type AuthProfileStore, type AuthProfileStore,
ensureAuthProfileStore, ensureAuthProfileStore,
@@ -103,6 +104,7 @@ export type EnvApiKeyResult = { apiKey: string; source: string };
export type ModelAuthMode = "api-key" | "oauth" | "token" | "mixed" | "unknown"; export type ModelAuthMode = "api-key" | "oauth" | "token" | "mixed" | "unknown";
export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null { export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
const normalized = normalizeProviderId(provider);
const applied = new Set(getShellEnvAppliedKeys()); const applied = new Set(getShellEnvAppliedKeys());
const pick = (envVar: string): EnvApiKeyResult | null => { const pick = (envVar: string): EnvApiKeyResult | null => {
const value = process.env[envVar]?.trim(); const value = process.env[envVar]?.trim();
@@ -113,26 +115,30 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
return { apiKey: value, source }; return { apiKey: value, source };
}; };
if (provider === "github-copilot") { if (normalized === "github-copilot") {
return ( return (
pick("COPILOT_GITHUB_TOKEN") ?? pick("GH_TOKEN") ?? pick("GITHUB_TOKEN") pick("COPILOT_GITHUB_TOKEN") ?? pick("GH_TOKEN") ?? pick("GITHUB_TOKEN")
); );
} }
if (provider === "anthropic") { if (normalized === "anthropic") {
return pick("ANTHROPIC_OAUTH_TOKEN") ?? pick("ANTHROPIC_API_KEY"); return pick("ANTHROPIC_OAUTH_TOKEN") ?? pick("ANTHROPIC_API_KEY");
} }
if (provider === "zai") { if (normalized === "zai") {
return pick("ZAI_API_KEY") ?? pick("Z_AI_API_KEY"); return pick("ZAI_API_KEY") ?? pick("Z_AI_API_KEY");
} }
if (provider === "google-vertex") { if (normalized === "google-vertex") {
const envKey = getEnvApiKey(provider); const envKey = getEnvApiKey(normalized);
if (!envKey) return null; if (!envKey) return null;
return { apiKey: envKey, source: "gcloud adc" }; return { apiKey: envKey, source: "gcloud adc" };
} }
if (normalized === "opencode") {
return pick("OPENCODE_API_KEY") ?? pick("OPENCODE_ZEN_API_KEY");
}
const envMap: Record<string, string> = { const envMap: Record<string, string> = {
openai: "OPENAI_API_KEY", openai: "OPENAI_API_KEY",
google: "GEMINI_API_KEY", google: "GEMINI_API_KEY",
@@ -142,9 +148,9 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
openrouter: "OPENROUTER_API_KEY", openrouter: "OPENROUTER_API_KEY",
minimax: "MINIMAX_API_KEY", minimax: "MINIMAX_API_KEY",
mistral: "MISTRAL_API_KEY", mistral: "MISTRAL_API_KEY",
"opencode-zen": "OPENCODE_ZEN_API_KEY", opencode: "OPENCODE_API_KEY",
}; };
const envVar = envMap[provider]; const envVar = envMap[normalized];
if (!envVar) return null; if (!envVar) return null;
return pick(envVar); return pick(envVar);
} }

View File

@@ -24,6 +24,7 @@ export function modelKey(provider: string, model: string) {
export function normalizeProviderId(provider: string): string { export function normalizeProviderId(provider: string): string {
const normalized = provider.trim().toLowerCase(); const normalized = provider.trim().toLowerCase();
if (normalized === "z.ai" || normalized === "z-ai") return "zai"; if (normalized === "z.ai" || normalized === "z-ai") return "zai";
if (normalized === "opencode-zen") return "opencode";
return normalized; return normalized;
} }

View File

@@ -41,12 +41,18 @@ describe("resolveOpencodeZenAlias", () => {
}); });
describe("resolveOpencodeZenModelApi", () => { describe("resolveOpencodeZenModelApi", () => {
it("returns openai-completions for all models (OpenCode Zen is OpenAI-compatible)", () => { it("maps APIs by model family", () => {
expect(resolveOpencodeZenModelApi("claude-opus-4-5")).toBe( expect(resolveOpencodeZenModelApi("claude-opus-4-5")).toBe(
"openai-completions", "anthropic-messages",
);
expect(resolveOpencodeZenModelApi("minimax-m2.1-free")).toBe(
"anthropic-messages",
); );
expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-completions");
expect(resolveOpencodeZenModelApi("gemini-3-pro")).toBe( expect(resolveOpencodeZenModelApi("gemini-3-pro")).toBe(
"google-generative-ai",
);
expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-responses");
expect(resolveOpencodeZenModelApi("glm-4.7-free")).toBe(
"openai-completions", "openai-completions",
); );
expect(resolveOpencodeZenModelApi("some-unknown-model")).toBe( expect(resolveOpencodeZenModelApi("some-unknown-model")).toBe(

View File

@@ -12,7 +12,7 @@ import type { ModelApi, ModelDefinitionConfig } from "../config/types.js";
export const OPENCODE_ZEN_API_BASE_URL = "https://opencode.ai/zen/v1"; export const OPENCODE_ZEN_API_BASE_URL = "https://opencode.ai/zen/v1";
export const OPENCODE_ZEN_DEFAULT_MODEL = "claude-opus-4-5"; export const OPENCODE_ZEN_DEFAULT_MODEL = "claude-opus-4-5";
export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode-zen/${OPENCODE_ZEN_DEFAULT_MODEL}`; export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode/${OPENCODE_ZEN_DEFAULT_MODEL}`;
// Cache for fetched models (1 hour TTL) // Cache for fetched models (1 hour TTL)
let cachedModels: ModelDefinitionConfig[] | null = null; let cachedModels: ModelDefinitionConfig[] | null = null;
@@ -87,10 +87,23 @@ export function resolveOpencodeZenAlias(modelIdOrAlias: string): string {
} }
/** /**
* OpenCode Zen is an OpenAI-compatible proxy for all models. * OpenCode Zen routes models to different APIs based on model family.
* All requests go through /chat/completions regardless of the underlying model.
*/ */
export function resolveOpencodeZenModelApi(_modelId: string): ModelApi { export function resolveOpencodeZenModelApi(modelId: string): ModelApi {
const lower = modelId.toLowerCase();
if (
lower.startsWith("claude-") ||
lower.startsWith("minimax") ||
lower.startsWith("alpha-gd4")
) {
return "anthropic-messages";
}
if (lower.startsWith("gemini-")) {
return "google-generative-ai";
}
if (lower.startsWith("gpt-")) {
return "openai-responses";
}
return "openai-completions"; return "openai-completions";
} }

View File

@@ -147,7 +147,7 @@ describe("applyAuthChoice", () => {
expect(result.config.agents?.defaults?.model?.primary).toBe( expect(result.config.agents?.defaults?.model?.primary).toBe(
"anthropic/claude-opus-4-5", "anthropic/claude-opus-4-5",
); );
expect(result.config.models?.providers?.["opencode-zen"]).toBeDefined(); expect(result.config.models?.providers?.["opencode-zen"]).toBeUndefined();
expect(result.agentModelOverride).toBe("opencode-zen/claude-opus-4-5"); expect(result.agentModelOverride).toBe("opencode/claude-opus-4-5");
}); });
}); });

View File

@@ -710,8 +710,8 @@ export async function applyAuthChoice(params: {
}); });
await setOpencodeZenApiKey(String(key).trim(), params.agentDir); await setOpencodeZenApiKey(String(key).trim(), params.agentDir);
nextConfig = applyAuthProfileConfig(nextConfig, { nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "opencode-zen:default", profileId: "opencode:default",
provider: "opencode-zen", provider: "opencode",
mode: "api_key", mode: "api_key",
}); });
if (params.setDefaultModel) { if (params.setDefaultModel) {
@@ -755,7 +755,7 @@ export function resolvePreferredProviderForAuthChoice(
case "minimax": case "minimax":
return "lmstudio"; return "lmstudio";
case "opencode-zen": case "opencode-zen":
return "opencode-zen"; return "opencode";
default: default:
return undefined; return undefined;
} }

View File

@@ -258,23 +258,10 @@ describe("applyMinimaxApiProviderConfig", () => {
}); });
describe("applyOpencodeZenProviderConfig", () => { describe("applyOpencodeZenProviderConfig", () => {
it("adds opencode-zen provider with correct settings", () => { it("adds allowlist entry for the default model", () => {
const cfg = applyOpencodeZenProviderConfig({});
expect(cfg.models?.providers?.["opencode-zen"]).toMatchObject({
baseUrl: "https://opencode.ai/zen/v1",
apiKey: "opencode-zen",
api: "openai-completions",
});
expect(
cfg.models?.providers?.["opencode-zen"]?.models.length,
).toBeGreaterThan(0);
});
it("adds allowlist entries for fallback models", () => {
const cfg = applyOpencodeZenProviderConfig({}); const cfg = applyOpencodeZenProviderConfig({});
const models = cfg.agents?.defaults?.models ?? {}; const models = cfg.agents?.defaults?.models ?? {};
expect(Object.keys(models)).toContain("opencode-zen/claude-opus-4-5"); expect(Object.keys(models)).toContain("opencode/claude-opus-4-5");
expect(Object.keys(models)).toContain("opencode-zen/gpt-5.2");
}); });
it("preserves existing alias for the default model", () => { it("preserves existing alias for the default model", () => {
@@ -282,13 +269,13 @@ describe("applyOpencodeZenProviderConfig", () => {
agents: { agents: {
defaults: { defaults: {
models: { models: {
"opencode-zen/claude-opus-4-5": { alias: "My Opus" }, "opencode/claude-opus-4-5": { alias: "My Opus" },
}, },
}, },
}, },
}); });
expect( expect(
cfg.agents?.defaults?.models?.["opencode-zen/claude-opus-4-5"]?.alias, cfg.agents?.defaults?.models?.["opencode/claude-opus-4-5"]?.alias,
).toBe("My Opus"); ).toBe("My Opus");
}); });
}); });
@@ -297,7 +284,7 @@ describe("applyOpencodeZenConfig", () => {
it("sets correct primary model", () => { it("sets correct primary model", () => {
const cfg = applyOpencodeZenConfig({}); const cfg = applyOpencodeZenConfig({});
expect(cfg.agents?.defaults?.model?.primary).toBe( expect(cfg.agents?.defaults?.model?.primary).toBe(
"opencode-zen/claude-opus-4-5", "opencode/claude-opus-4-5",
); );
}); });

View File

@@ -1,11 +1,7 @@
import type { OAuthCredentials, OAuthProvider } from "@mariozechner/pi-ai"; import type { OAuthCredentials, OAuthProvider } from "@mariozechner/pi-ai";
import { resolveDefaultAgentDir } from "../agents/agent-scope.js"; import { resolveDefaultAgentDir } from "../agents/agent-scope.js";
import { upsertAuthProfile } from "../agents/auth-profiles.js"; import { upsertAuthProfile } from "../agents/auth-profiles.js";
import { import { OPENCODE_ZEN_DEFAULT_MODEL_REF } from "../agents/opencode-zen-models.js";
getOpencodeZenStaticFallbackModels,
OPENCODE_ZEN_API_BASE_URL,
OPENCODE_ZEN_DEFAULT_MODEL_REF,
} from "../agents/opencode-zen-models.js";
import type { ClawdbotConfig } from "../config/config.js"; import type { ClawdbotConfig } from "../config/config.js";
import type { ModelDefinitionConfig } from "../config/types.js"; import type { ModelDefinitionConfig } from "../config/types.js";
@@ -450,10 +446,10 @@ export function applyMinimaxApiConfig(
export async function setOpencodeZenApiKey(key: string, agentDir?: string) { export async function setOpencodeZenApiKey(key: string, agentDir?: string) {
upsertAuthProfile({ upsertAuthProfile({
profileId: "opencode-zen:default", profileId: "opencode:default",
credential: { credential: {
type: "api_key", type: "api_key",
provider: "opencode-zen", provider: "opencode",
key, key,
}, },
agentDir: agentDir ?? resolveDefaultAgentDir(), agentDir: agentDir ?? resolveDefaultAgentDir(),
@@ -463,21 +459,8 @@ export async function setOpencodeZenApiKey(key: string, agentDir?: string) {
export function applyOpencodeZenProviderConfig( export function applyOpencodeZenProviderConfig(
cfg: ClawdbotConfig, cfg: ClawdbotConfig,
): ClawdbotConfig { ): ClawdbotConfig {
const opencodeModels = getOpencodeZenStaticFallbackModels(); // Use the built-in opencode provider from pi-ai; only seed the allowlist alias.
const providers = { ...cfg.models?.providers };
providers["opencode-zen"] = {
baseUrl: OPENCODE_ZEN_API_BASE_URL,
apiKey: "opencode-zen",
api: "openai-completions",
models: opencodeModels,
};
const models = { ...cfg.agents?.defaults?.models }; const models = { ...cfg.agents?.defaults?.models };
for (const model of opencodeModels) {
const key = `opencode-zen/${model.id}`;
models[key] = models[key] ?? {};
}
models[OPENCODE_ZEN_DEFAULT_MODEL_REF] = { models[OPENCODE_ZEN_DEFAULT_MODEL_REF] = {
...models[OPENCODE_ZEN_DEFAULT_MODEL_REF], ...models[OPENCODE_ZEN_DEFAULT_MODEL_REF],
alias: models[OPENCODE_ZEN_DEFAULT_MODEL_REF]?.alias ?? "Opus", alias: models[OPENCODE_ZEN_DEFAULT_MODEL_REF]?.alias ?? "Opus",
@@ -492,10 +475,6 @@ export function applyOpencodeZenProviderConfig(
models, models,
}, },
}, },
models: {
mode: cfg.models?.mode ?? "merge",
providers,
},
}; };
} }

View File

@@ -337,11 +337,11 @@ export async function runNonInteractiveOnboarding(
nextConfig = applyMinimaxConfig(nextConfig); nextConfig = applyMinimaxConfig(nextConfig);
} else if (authChoice === "opencode-zen") { } else if (authChoice === "opencode-zen") {
const resolved = await resolveNonInteractiveApiKey({ const resolved = await resolveNonInteractiveApiKey({
provider: "opencode-zen", provider: "opencode",
cfg: baseConfig, cfg: baseConfig,
flagValue: opts.opencodeZenApiKey, flagValue: opts.opencodeZenApiKey,
flagName: "--opencode-zen-api-key", flagName: "--opencode-zen-api-key",
envVar: "OPENCODE_ZEN_API_KEY", envVar: "OPENCODE_API_KEY (or OPENCODE_ZEN_API_KEY)",
runtime, runtime,
}); });
if (!resolved) return; if (!resolved) return;
@@ -349,8 +349,8 @@ export async function runNonInteractiveOnboarding(
await setOpencodeZenApiKey(resolved.key); await setOpencodeZenApiKey(resolved.key);
} }
nextConfig = applyAuthProfileConfig(nextConfig, { nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "opencode-zen:default", profileId: "opencode:default",
provider: "opencode-zen", provider: "opencode",
mode: "api_key", mode: "api_key",
}); });
nextConfig = applyOpencodeZenConfig(nextConfig); nextConfig = applyOpencodeZenConfig(nextConfig);

View File

@@ -7,7 +7,7 @@ import {
} from "./opencode-zen-model-default.js"; } from "./opencode-zen-model-default.js";
describe("applyOpencodeZenModelDefault", () => { describe("applyOpencodeZenModelDefault", () => {
it("sets opencode-zen default when model is unset", () => { it("sets opencode default when model is unset", () => {
const cfg: ClawdbotConfig = { agents: { defaults: {} } }; const cfg: ClawdbotConfig = { agents: { defaults: {} } };
const applied = applyOpencodeZenModelDefault(cfg); const applied = applyOpencodeZenModelDefault(cfg);
expect(applied.changed).toBe(true); expect(applied.changed).toBe(true);
@@ -36,6 +36,15 @@ describe("applyOpencodeZenModelDefault", () => {
expect(applied.next).toEqual(cfg); expect(applied.next).toEqual(cfg);
}); });
it("no-ops when already legacy opencode-zen default", () => {
const cfg = {
agents: { defaults: { model: "opencode-zen/claude-opus-4-5" } },
} as ClawdbotConfig;
const applied = applyOpencodeZenModelDefault(cfg);
expect(applied.changed).toBe(false);
expect(applied.next).toEqual(cfg);
});
it("preserves fallbacks when setting primary", () => { it("preserves fallbacks when setting primary", () => {
const cfg: ClawdbotConfig = { const cfg: ClawdbotConfig = {
agents: { agents: {

View File

@@ -1,7 +1,8 @@
import type { ClawdbotConfig } from "../config/config.js"; import type { ClawdbotConfig } from "../config/config.js";
import type { AgentModelListConfig } from "../config/types.js"; import type { AgentModelListConfig } from "../config/types.js";
export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode-zen/claude-opus-4-5"; export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode/claude-opus-4-5";
const LEGACY_OPENCODE_ZEN_DEFAULT_MODEL = "opencode-zen/claude-opus-4-5";
function resolvePrimaryModel( function resolvePrimaryModel(
model?: AgentModelListConfig | string, model?: AgentModelListConfig | string,
@@ -18,7 +19,11 @@ export function applyOpencodeZenModelDefault(cfg: ClawdbotConfig): {
changed: boolean; changed: boolean;
} { } {
const current = resolvePrimaryModel(cfg.agents?.defaults?.model)?.trim(); const current = resolvePrimaryModel(cfg.agents?.defaults?.model)?.trim();
if (current === OPENCODE_ZEN_DEFAULT_MODEL) { const normalizedCurrent =
current === LEGACY_OPENCODE_ZEN_DEFAULT_MODEL
? OPENCODE_ZEN_DEFAULT_MODEL
: current;
if (normalizedCurrent === OPENCODE_ZEN_DEFAULT_MODEL) {
return { next: cfg, changed: false }; return { next: cfg, changed: false };
} }