feat(onboard): add OpenCode Zen as model provider

This commit is contained in:
Magi Metal
2026-01-09 18:12:07 -05:00
committed by Peter Steinberger
parent 9b1f164447
commit a399fa36c8
11 changed files with 676 additions and 35 deletions

View File

@@ -139,6 +139,7 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
minimax: "MINIMAX_API_KEY", minimax: "MINIMAX_API_KEY",
zai: "ZAI_API_KEY", zai: "ZAI_API_KEY",
mistral: "MISTRAL_API_KEY", mistral: "MISTRAL_API_KEY",
"opencode-zen": "OPENCODE_ZEN_API_KEY",
}; };
const envVar = envMap[provider]; const envVar = envMap[provider];
if (!envVar) return null; if (!envVar) return null;

View File

@@ -0,0 +1,88 @@
import { describe, expect, it } from "vitest";
import {
getOpencodeZenStaticFallbackModels,
OPENCODE_ZEN_MODEL_ALIASES,
resolveOpencodeZenAlias,
resolveOpencodeZenModelApi,
} from "./opencode-zen-models.js";
describe("resolveOpencodeZenAlias", () => {
it("resolves opus alias", () => {
expect(resolveOpencodeZenAlias("opus")).toBe("claude-opus-4-5");
});
it("resolves gpt5 alias", () => {
expect(resolveOpencodeZenAlias("gpt5")).toBe("gpt-5.2");
});
it("resolves gemini alias", () => {
expect(resolveOpencodeZenAlias("gemini")).toBe("gemini-3-pro");
});
it("returns input if no alias exists", () => {
expect(resolveOpencodeZenAlias("some-unknown-model")).toBe(
"some-unknown-model",
);
});
it("is case-insensitive", () => {
expect(resolveOpencodeZenAlias("OPUS")).toBe("claude-opus-4-5");
expect(resolveOpencodeZenAlias("Gpt5")).toBe("gpt-5.2");
});
});
describe("resolveOpencodeZenModelApi", () => {
it("returns openai-completions for all models (OpenCode Zen is OpenAI-compatible)", () => {
expect(resolveOpencodeZenModelApi("claude-opus-4-5")).toBe(
"openai-completions",
);
expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-completions");
expect(resolveOpencodeZenModelApi("gemini-3-pro")).toBe(
"openai-completions",
);
expect(resolveOpencodeZenModelApi("some-unknown-model")).toBe(
"openai-completions",
);
});
});
describe("getOpencodeZenStaticFallbackModels", () => {
it("returns an array of models", () => {
const models = getOpencodeZenStaticFallbackModels();
expect(Array.isArray(models)).toBe(true);
expect(models.length).toBeGreaterThan(0);
});
it("includes Claude, GPT, and Gemini models", () => {
const models = getOpencodeZenStaticFallbackModels();
const ids = models.map((m) => m.id);
expect(ids).toContain("claude-opus-4-5");
expect(ids).toContain("gpt-5.2");
expect(ids).toContain("gemini-3-pro");
});
it("returns valid ModelDefinitionConfig objects", () => {
const models = getOpencodeZenStaticFallbackModels();
for (const model of models) {
expect(model.id).toBeDefined();
expect(model.name).toBeDefined();
expect(typeof model.reasoning).toBe("boolean");
expect(Array.isArray(model.input)).toBe(true);
expect(model.cost).toBeDefined();
expect(typeof model.contextWindow).toBe("number");
expect(typeof model.maxTokens).toBe("number");
}
});
});
describe("OPENCODE_ZEN_MODEL_ALIASES", () => {
it("has expected aliases", () => {
expect(OPENCODE_ZEN_MODEL_ALIASES.opus).toBe("claude-opus-4-5");
expect(OPENCODE_ZEN_MODEL_ALIASES.sonnet).toBe("claude-sonnet-4-20250514");
expect(OPENCODE_ZEN_MODEL_ALIASES.gpt5).toBe("gpt-5.2");
expect(OPENCODE_ZEN_MODEL_ALIASES.o1).toBe("o1-2025-04-16");
expect(OPENCODE_ZEN_MODEL_ALIASES.gemini).toBe("gemini-3-pro");
});
});

View File

@@ -0,0 +1,285 @@
/**
* OpenCode Zen model catalog with dynamic fetching, caching, and static fallback.
*
* OpenCode Zen is a $200/month subscription that provides proxy access to multiple
* AI models (Claude, GPT, Gemini, etc.) through a single API endpoint.
*
* API endpoint: https://opencode.ai/zen/v1
* Auth URL: https://opencode.ai/auth
*/
import type { ModelApi, ModelDefinitionConfig } from "../config/types.js";
export const OPENCODE_ZEN_API_BASE_URL = "https://opencode.ai/zen/v1";
export const OPENCODE_ZEN_DEFAULT_MODEL = "claude-opus-4-5";
export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode-zen/${OPENCODE_ZEN_DEFAULT_MODEL}`;
// Cache for fetched models (1 hour TTL)
let cachedModels: ModelDefinitionConfig[] | null = null;
let cacheTimestamp = 0;
const CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour
/**
* Model aliases for convenient shortcuts.
* Users can use "opus" instead of "claude-opus-4-5", etc.
*/
export const OPENCODE_ZEN_MODEL_ALIASES: Record<string, string> = {
// Claude aliases
opus: "claude-opus-4-5",
"opus-4.5": "claude-opus-4-5",
"opus-4": "claude-opus-4-5",
sonnet: "claude-sonnet-4-20250514",
"sonnet-4": "claude-sonnet-4-20250514",
haiku: "claude-haiku-3-5-20241022",
"haiku-3.5": "claude-haiku-3-5-20241022",
// GPT aliases
gpt5: "gpt-5.2",
"gpt-5": "gpt-5.2",
gpt4: "gpt-4.1",
"gpt-4": "gpt-4.1",
"gpt-mini": "gpt-4.1-mini",
// O-series aliases
o1: "o1-2025-04-16",
o3: "o3-2025-04-16",
"o3-mini": "o3-mini-2025-04-16",
// Gemini aliases
gemini: "gemini-3-pro",
"gemini-pro": "gemini-3-pro",
"gemini-3": "gemini-3-pro",
"gemini-2.5": "gemini-2.5-pro",
};
/**
* Resolve a model alias to its full model ID.
* Returns the input if no alias exists.
*/
export function resolveOpencodeZenAlias(modelIdOrAlias: string): string {
const normalized = modelIdOrAlias.toLowerCase().trim();
return OPENCODE_ZEN_MODEL_ALIASES[normalized] ?? modelIdOrAlias;
}
/**
* OpenCode Zen is an OpenAI-compatible proxy for all models.
* All requests go through /chat/completions regardless of the underlying model.
*/
export function resolveOpencodeZenModelApi(_modelId: string): ModelApi {
return "openai-completions";
}
/**
* Check if a model is a reasoning model (extended thinking).
*/
function isReasoningModel(modelId: string): boolean {
const lower = modelId.toLowerCase();
return (
lower.includes("opus") ||
lower.startsWith("o1-") ||
lower.startsWith("o3-") ||
lower.startsWith("o4-") ||
lower.includes("-thinking")
);
}
/**
* Check if a model supports image input.
*/
function supportsImageInput(modelId: string): boolean {
const lower = modelId.toLowerCase();
// Most modern models support images, except some reasoning-only models
if (lower.startsWith("o1-") || lower.startsWith("o3-")) {
return false;
}
return true;
}
// Default cost structure (per million tokens, in USD cents)
// These are approximate; actual costs depend on OpenCode Zen pricing
const DEFAULT_COST = {
input: 0, // Included in subscription
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
// Default context windows by model family
function getDefaultContextWindow(modelId: string): number {
const lower = modelId.toLowerCase();
if (lower.includes("opus")) return 200000;
if (lower.includes("sonnet")) return 200000;
if (lower.includes("haiku")) return 200000;
if (lower.includes("gpt-5")) return 256000;
if (lower.includes("gpt-4")) return 128000;
if (lower.startsWith("o1-") || lower.startsWith("o3-")) return 200000;
if (lower.includes("gemini-3")) return 1000000;
if (lower.includes("gemini-2.5")) return 1000000;
if (lower.includes("gemini")) return 128000;
return 128000; // Conservative default
}
function getDefaultMaxTokens(modelId: string): number {
const lower = modelId.toLowerCase();
if (lower.includes("opus")) return 32000;
if (lower.includes("sonnet")) return 16000;
if (lower.includes("haiku")) return 8192;
if (lower.startsWith("o1-") || lower.startsWith("o3-")) return 100000;
if (lower.includes("gpt")) return 16384;
if (lower.includes("gemini")) return 8192;
return 8192;
}
/**
* Build a ModelDefinitionConfig from a model ID.
*/
function buildModelDefinition(modelId: string): ModelDefinitionConfig {
return {
id: modelId,
name: formatModelName(modelId),
api: resolveOpencodeZenModelApi(modelId),
reasoning: isReasoningModel(modelId),
input: supportsImageInput(modelId) ? ["text", "image"] : ["text"],
cost: DEFAULT_COST,
contextWindow: getDefaultContextWindow(modelId),
maxTokens: getDefaultMaxTokens(modelId),
};
}
/**
* Format a model ID into a human-readable name.
*/
function formatModelName(modelId: string): string {
// Handle common patterns
const replacements: Record<string, string> = {
"claude-opus-4-5": "Claude Opus 4.5",
"claude-sonnet-4-20250514": "Claude Sonnet 4",
"claude-haiku-3-5-20241022": "Claude Haiku 3.5",
"gpt-5.2": "GPT-5.2",
"gpt-4.1": "GPT-4.1",
"gpt-4.1-mini": "GPT-4.1 Mini",
"o1-2025-04-16": "O1",
"o3-2025-04-16": "O3",
"o3-mini-2025-04-16": "O3 Mini",
"gemini-3-pro": "Gemini 3 Pro",
"gemini-2.5-pro": "Gemini 2.5 Pro",
"gemini-2.5-flash": "Gemini 2.5 Flash",
};
if (replacements[modelId]) {
return replacements[modelId];
}
// Generic formatting: capitalize and replace dashes
return modelId
.split("-")
.map((part) => part.charAt(0).toUpperCase() + part.slice(1))
.join(" ");
}
/**
* Static fallback models when API is unreachable.
* These are the most commonly used models.
*/
export function getOpencodeZenStaticFallbackModels(): ModelDefinitionConfig[] {
const modelIds = [
// Claude models
"claude-opus-4-5",
"claude-sonnet-4-20250514",
"claude-haiku-3-5-20241022",
// GPT models
"gpt-5.2",
"gpt-4.1",
"gpt-4.1-mini",
// O-series reasoning models
"o1-2025-04-16",
"o3-2025-04-16",
"o3-mini-2025-04-16",
// Gemini models
"gemini-3-pro",
"gemini-2.5-pro",
"gemini-2.5-flash",
];
return modelIds.map(buildModelDefinition);
}
/**
* Response shape from OpenCode Zen /models endpoint.
* Returns OpenAI-compatible format.
*/
interface ZenModelsResponse {
data: Array<{
id: string;
object: "model";
created?: number;
owned_by?: string;
}>;
}
/**
* Fetch models from the OpenCode Zen API.
* Uses caching with 1-hour TTL.
*
* @param apiKey - OpenCode Zen API key for authentication
* @returns Array of model definitions, or static fallback on failure
*/
export async function fetchOpencodeZenModels(
apiKey?: string,
): Promise<ModelDefinitionConfig[]> {
// Return cached models if still valid
const now = Date.now();
if (cachedModels && now - cacheTimestamp < CACHE_TTL_MS) {
return cachedModels;
}
try {
const headers: Record<string, string> = {
Accept: "application/json",
};
if (apiKey) {
headers.Authorization = `Bearer ${apiKey}`;
}
const response = await fetch(`${OPENCODE_ZEN_API_BASE_URL}/models`, {
method: "GET",
headers,
signal: AbortSignal.timeout(10000), // 10 second timeout
});
if (!response.ok) {
throw new Error(
`API returned ${response.status}: ${response.statusText}`,
);
}
const data = (await response.json()) as ZenModelsResponse;
if (!data.data || !Array.isArray(data.data)) {
throw new Error("Invalid response format from /models endpoint");
}
const models = data.data.map((model) => buildModelDefinition(model.id));
cachedModels = models;
cacheTimestamp = now;
return models;
} catch (error) {
console.warn(
`[opencode-zen] Failed to fetch models, using static fallback: ${String(error)}`,
);
return getOpencodeZenStaticFallbackModels();
}
}
/**
* Clear the model cache (useful for testing or forcing refresh).
*/
export function clearOpencodeZenModelCache(): void {
cachedModels = null;
cacheTimestamp = 0;
}

View File

@@ -14,7 +14,7 @@ export type AuthChoiceOption = {
function formatOAuthHint( function formatOAuthHint(
expires?: number, expires?: number,
opts?: { allowStale?: boolean }, opts?: { allowStale?: boolean }
): string { ): string {
const rich = isRich(); const rich = isRich();
if (!expires) { if (!expires) {
@@ -33,8 +33,8 @@ function formatOAuthHint(
minutes >= 120 minutes >= 120
? `${Math.round(minutes / 60)}h` ? `${Math.round(minutes / 60)}h`
: minutes >= 60 : minutes >= 60
? "1h" ? "1h"
: `${Math.max(minutes, 1)}m`; : `${Math.max(minutes, 1)}m`;
const label = `token ok · expires in ${duration}`; const label = `token ok · expires in ${duration}`;
if (minutes <= 10) { if (minutes <= 10) {
return colorize(rich, theme.warn, label); return colorize(rich, theme.warn, label);
@@ -99,6 +99,11 @@ export function buildAuthChoiceOptions(params: {
options.push({ value: "gemini-api-key", label: "Google Gemini API key" }); options.push({ value: "gemini-api-key", label: "Google Gemini API key" });
options.push({ value: "apiKey", label: "Anthropic API key" }); options.push({ value: "apiKey", label: "Anthropic API key" });
// Token flow is currently Anthropic-only; use CLI for advanced providers. // Token flow is currently Anthropic-only; use CLI for advanced providers.
options.push({
value: "opencode-zen",
label: "OpenCode Zen (multi-model proxy)",
hint: "Claude, GPT, Gemini via opencode.ai/zen",
});
options.push({ value: "minimax-cloud", label: "MiniMax M2.1 (minimax.io)" }); options.push({ value: "minimax-cloud", label: "MiniMax M2.1 (minimax.io)" });
options.push({ value: "minimax", label: "Minimax M2.1 (LM Studio)" }); options.push({ value: "minimax", label: "Minimax M2.1 (LM Studio)" });
options.push({ options.push({

View File

@@ -42,10 +42,12 @@ import {
applyMinimaxHostedConfig, applyMinimaxHostedConfig,
applyMinimaxHostedProviderConfig, applyMinimaxHostedProviderConfig,
applyMinimaxProviderConfig, applyMinimaxProviderConfig,
applyOpencodeZenConfig,
MINIMAX_HOSTED_MODEL_REF, MINIMAX_HOSTED_MODEL_REF,
setAnthropicApiKey, setAnthropicApiKey,
setGeminiApiKey, setGeminiApiKey,
setMinimaxApiKey, setMinimaxApiKey,
setOpencodeZenApiKey,
writeOAuthCredentials, writeOAuthCredentials,
} from "./onboard-auth.js"; } from "./onboard-auth.js";
import { openUrl } from "./onboard-helpers.js"; import { openUrl } from "./onboard-helpers.js";
@@ -54,11 +56,12 @@ import {
applyOpenAICodexModelDefault, applyOpenAICodexModelDefault,
OPENAI_CODEX_DEFAULT_MODEL, OPENAI_CODEX_DEFAULT_MODEL,
} from "./openai-codex-model-default.js"; } from "./openai-codex-model-default.js";
import { OPENCODE_ZEN_DEFAULT_MODEL } from "./opencode-zen-model-default.js";
export async function warnIfModelConfigLooksOff( export async function warnIfModelConfigLooksOff(
config: ClawdbotConfig, config: ClawdbotConfig,
prompter: WizardPrompter, prompter: WizardPrompter,
options?: { agentId?: string; agentDir?: string }, options?: { agentId?: string; agentDir?: string }
) { ) {
const agentModelOverride = options?.agentId const agentModelOverride = options?.agentId
? resolveAgentConfig(config, options.agentId)?.model?.trim() ? resolveAgentConfig(config, options.agentId)?.model?.trim()
@@ -93,11 +96,11 @@ export async function warnIfModelConfigLooksOff(
}); });
if (catalog.length > 0) { if (catalog.length > 0) {
const known = catalog.some( const known = catalog.some(
(entry) => entry.provider === ref.provider && entry.id === ref.model, (entry) => entry.provider === ref.provider && entry.id === ref.model
); );
if (!known) { if (!known) {
warnings.push( warnings.push(
`Model not found: ${ref.provider}/${ref.model}. Update agents.defaults.model or run /models list.`, `Model not found: ${ref.provider}/${ref.model}. Update agents.defaults.model or run /models list.`
); );
} }
} }
@@ -108,7 +111,7 @@ export async function warnIfModelConfigLooksOff(
const customKey = getCustomProviderApiKey(config, ref.provider); const customKey = getCustomProviderApiKey(config, ref.provider);
if (!hasProfile && !envKey && !customKey) { if (!hasProfile && !envKey && !customKey) {
warnings.push( warnings.push(
`No auth configured for provider "${ref.provider}". The agent may fail until credentials are added.`, `No auth configured for provider "${ref.provider}". The agent may fail until credentials are added.`
); );
} }
@@ -116,7 +119,7 @@ export async function warnIfModelConfigLooksOff(
const hasCodex = listProfilesForProvider(store, "openai-codex").length > 0; const hasCodex = listProfilesForProvider(store, "openai-codex").length > 0;
if (hasCodex) { if (hasCodex) {
warnings.push( warnings.push(
`Detected OpenAI Codex OAuth. Consider setting agents.defaults.model to ${OPENAI_CODEX_DEFAULT_MODEL}.`, `Detected OpenAI Codex OAuth. Consider setting agents.defaults.model to ${OPENAI_CODEX_DEFAULT_MODEL}.`
); );
} }
} }
@@ -142,7 +145,7 @@ export async function applyAuthChoice(params: {
if (!params.agentId) return; if (!params.agentId) return;
await params.prompter.note( await params.prompter.note(
`Default model set to ${model} for agent "${params.agentId}".`, `Default model set to ${model} for agent "${params.agentId}".`,
"Model configured", "Model configured"
); );
}; };
@@ -158,7 +161,7 @@ export async function applyAuthChoice(params: {
'Choose "Always Allow" so the launchd gateway can start without prompts.', 'Choose "Always Allow" so the launchd gateway can start without prompts.',
'If you choose "Allow" or "Deny", each restart will block on a Keychain alert.', 'If you choose "Allow" or "Deny", each restart will block on a Keychain alert.',
].join("\n"), ].join("\n"),
"Claude CLI Keychain", "Claude CLI Keychain"
); );
const proceed = await params.prompter.confirm({ const proceed = await params.prompter.confirm({
message: "Check Keychain for Claude CLI credentials now?", message: "Check Keychain for Claude CLI credentials now?",
@@ -189,14 +192,14 @@ export async function applyAuthChoice(params: {
if (res.error) { if (res.error) {
await params.prompter.note( await params.prompter.note(
`Failed to run claude: ${String(res.error)}`, `Failed to run claude: ${String(res.error)}`,
"Claude setup-token", "Claude setup-token"
); );
} }
} }
} else { } else {
await params.prompter.note( await params.prompter.note(
"`claude setup-token` requires an interactive TTY.", "`claude setup-token` requires an interactive TTY.",
"Claude setup-token", "Claude setup-token"
); );
} }
@@ -208,7 +211,7 @@ export async function applyAuthChoice(params: {
process.platform === "darwin" process.platform === "darwin"
? 'No Claude CLI credentials found in Keychain ("Claude Code-credentials") or ~/.claude/.credentials.json.' ? 'No Claude CLI credentials found in Keychain ("Claude Code-credentials") or ~/.claude/.credentials.json.'
: "No Claude CLI credentials found at ~/.claude/.credentials.json.", : "No Claude CLI credentials found at ~/.claude/.credentials.json.",
"Claude CLI OAuth", "Claude CLI OAuth"
); );
return { config: nextConfig, agentModelOverride }; return { config: nextConfig, agentModelOverride };
} }
@@ -227,13 +230,13 @@ export async function applyAuthChoice(params: {
"This will run `claude setup-token` to create a long-lived Anthropic token.", "This will run `claude setup-token` to create a long-lived Anthropic token.",
"Requires an interactive TTY and a Claude Pro/Max subscription.", "Requires an interactive TTY and a Claude Pro/Max subscription.",
].join("\n"), ].join("\n"),
"Anthropic setup-token", "Anthropic setup-token"
); );
if (!process.stdin.isTTY) { if (!process.stdin.isTTY) {
await params.prompter.note( await params.prompter.note(
"`claude setup-token` requires an interactive TTY.", "`claude setup-token` requires an interactive TTY.",
"Anthropic setup-token", "Anthropic setup-token"
); );
return { config: nextConfig, agentModelOverride }; return { config: nextConfig, agentModelOverride };
} }
@@ -251,14 +254,14 @@ export async function applyAuthChoice(params: {
if (res.error) { if (res.error) {
await params.prompter.note( await params.prompter.note(
`Failed to run claude: ${String(res.error)}`, `Failed to run claude: ${String(res.error)}`,
"Anthropic setup-token", "Anthropic setup-token"
); );
return { config: nextConfig, agentModelOverride }; return { config: nextConfig, agentModelOverride };
} }
if (typeof res.status === "number" && res.status !== 0) { if (typeof res.status === "number" && res.status !== 0) {
await params.prompter.note( await params.prompter.note(
`claude setup-token failed (exit ${res.status})`, `claude setup-token failed (exit ${res.status})`,
"Anthropic setup-token", "Anthropic setup-token"
); );
return { config: nextConfig, agentModelOverride }; return { config: nextConfig, agentModelOverride };
} }
@@ -269,7 +272,7 @@ export async function applyAuthChoice(params: {
if (!store.profiles[CLAUDE_CLI_PROFILE_ID]) { if (!store.profiles[CLAUDE_CLI_PROFILE_ID]) {
await params.prompter.note( await params.prompter.note(
`No Claude CLI credentials found after setup-token. Expected ${CLAUDE_CLI_PROFILE_ID}.`, `No Claude CLI credentials found after setup-token. Expected ${CLAUDE_CLI_PROFILE_ID}.`,
"Anthropic setup-token", "Anthropic setup-token"
); );
return { config: nextConfig, agentModelOverride }; return { config: nextConfig, agentModelOverride };
} }
@@ -289,7 +292,7 @@ export async function applyAuthChoice(params: {
"Run `claude setup-token` in your terminal.", "Run `claude setup-token` in your terminal.",
"Then paste the generated token below.", "Then paste the generated token below.",
].join("\n"), ].join("\n"),
"Anthropic token", "Anthropic token"
); );
const tokenRaw = await params.prompter.text({ const tokenRaw = await params.prompter.text({
@@ -339,7 +342,7 @@ export async function applyAuthChoice(params: {
} }
await params.prompter.note( await params.prompter.note(
`Copied OPENAI_API_KEY to ${result.path} for launchd compatibility.`, `Copied OPENAI_API_KEY to ${result.path} for launchd compatibility.`,
"OpenAI API key", "OpenAI API key"
); );
return { config: nextConfig, agentModelOverride }; return { config: nextConfig, agentModelOverride };
} }
@@ -357,7 +360,7 @@ export async function applyAuthChoice(params: {
process.env.OPENAI_API_KEY = trimmed; process.env.OPENAI_API_KEY = trimmed;
await params.prompter.note( await params.prompter.note(
`Saved OPENAI_API_KEY to ${result.path} for launchd compatibility.`, `Saved OPENAI_API_KEY to ${result.path} for launchd compatibility.`,
"OpenAI API key", "OpenAI API key"
); );
} else if (params.authChoice === "openai-codex") { } else if (params.authChoice === "openai-codex") {
const isRemote = isRemoteEnvironment(); const isRemote = isRemoteEnvironment();
@@ -373,7 +376,7 @@ export async function applyAuthChoice(params: {
"If the callback doesn't auto-complete, paste the redirect URL.", "If the callback doesn't auto-complete, paste the redirect URL.",
"OpenAI OAuth uses localhost:1455 for the callback.", "OpenAI OAuth uses localhost:1455 for the callback.",
].join("\n"), ].join("\n"),
"OpenAI Codex OAuth", "OpenAI Codex OAuth"
); );
const spin = params.prompter.progress("Starting OAuth flow…"); const spin = params.prompter.progress("Starting OAuth flow…");
let manualCodePromise: Promise<string> | undefined; let manualCodePromise: Promise<string> | undefined;
@@ -383,7 +386,7 @@ export async function applyAuthChoice(params: {
if (isRemote) { if (isRemote) {
spin.stop("OAuth URL ready"); spin.stop("OAuth URL ready");
params.runtime.log( params.runtime.log(
`\nOpen this URL in your LOCAL browser:\n\n${url}\n`, `\nOpen this URL in your LOCAL browser:\n\n${url}\n`
); );
manualCodePromise = params.prompter manualCodePromise = params.prompter
.text({ .text({
@@ -415,7 +418,7 @@ export async function applyAuthChoice(params: {
await writeOAuthCredentials( await writeOAuthCredentials(
"openai-codex" as unknown as OAuthProvider, "openai-codex" as unknown as OAuthProvider,
creds, creds,
params.agentDir, params.agentDir
); );
nextConfig = applyAuthProfileConfig(nextConfig, { nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "openai-codex:default", profileId: "openai-codex:default",
@@ -428,7 +431,7 @@ export async function applyAuthChoice(params: {
if (applied.changed) { if (applied.changed) {
await params.prompter.note( await params.prompter.note(
`Default model set to ${OPENAI_CODEX_DEFAULT_MODEL}`, `Default model set to ${OPENAI_CODEX_DEFAULT_MODEL}`,
"Model configured", "Model configured"
); );
} }
} else { } else {
@@ -441,7 +444,7 @@ export async function applyAuthChoice(params: {
params.runtime.error(String(err)); params.runtime.error(String(err));
await params.prompter.note( await params.prompter.note(
"Trouble with OAuth? See https://docs.clawd.bot/start/faq", "Trouble with OAuth? See https://docs.clawd.bot/start/faq",
"OAuth help", "OAuth help"
); );
} }
} else if (params.authChoice === "codex-cli") { } else if (params.authChoice === "codex-cli") {
@@ -449,7 +452,7 @@ export async function applyAuthChoice(params: {
if (!store.profiles[CODEX_CLI_PROFILE_ID]) { if (!store.profiles[CODEX_CLI_PROFILE_ID]) {
await params.prompter.note( await params.prompter.note(
"No Codex CLI credentials found at ~/.codex/auth.json.", "No Codex CLI credentials found at ~/.codex/auth.json.",
"Codex CLI OAuth", "Codex CLI OAuth"
); );
return { config: nextConfig, agentModelOverride }; return { config: nextConfig, agentModelOverride };
} }
@@ -464,7 +467,7 @@ export async function applyAuthChoice(params: {
if (applied.changed) { if (applied.changed) {
await params.prompter.note( await params.prompter.note(
`Default model set to ${OPENAI_CODEX_DEFAULT_MODEL}`, `Default model set to ${OPENAI_CODEX_DEFAULT_MODEL}`,
"Model configured", "Model configured"
); );
} }
} else { } else {
@@ -485,7 +488,7 @@ export async function applyAuthChoice(params: {
"Sign in with your Google account that has Antigravity access.", "Sign in with your Google account that has Antigravity access.",
"The callback will be captured automatically on localhost:51121.", "The callback will be captured automatically on localhost:51121.",
].join("\n"), ].join("\n"),
"Google Antigravity OAuth", "Google Antigravity OAuth"
); );
const spin = params.prompter.progress("Starting OAuth flow…"); const spin = params.prompter.progress("Starting OAuth flow…");
let oauthCreds: OAuthCredentials | null = null; let oauthCreds: OAuthCredentials | null = null;
@@ -495,7 +498,7 @@ export async function applyAuthChoice(params: {
if (isRemote) { if (isRemote) {
spin.stop("OAuth URL ready"); spin.stop("OAuth URL ready");
params.runtime.log( params.runtime.log(
`\nOpen this URL in your LOCAL browser:\n\n${url}\n`, `\nOpen this URL in your LOCAL browser:\n\n${url}\n`
); );
} else { } else {
spin.update("Complete sign-in in browser…"); spin.update("Complete sign-in in browser…");
@@ -503,14 +506,14 @@ export async function applyAuthChoice(params: {
params.runtime.log(`Open: ${url}`); params.runtime.log(`Open: ${url}`);
} }
}, },
(msg) => spin.update(msg), (msg) => spin.update(msg)
); );
spin.stop("Antigravity OAuth complete"); spin.stop("Antigravity OAuth complete");
if (oauthCreds) { if (oauthCreds) {
await writeOAuthCredentials( await writeOAuthCredentials(
"google-antigravity", "google-antigravity",
oauthCreds, oauthCreds,
params.agentDir, params.agentDir
); );
nextConfig = applyAuthProfileConfig(nextConfig, { nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: `google-antigravity:${oauthCreds.email ?? "default"}`, profileId: `google-antigravity:${oauthCreds.email ?? "default"}`,
@@ -555,7 +558,7 @@ export async function applyAuthChoice(params: {
}; };
await params.prompter.note( await params.prompter.note(
`Default model set to ${modelKey}`, `Default model set to ${modelKey}`,
"Model configured", "Model configured"
); );
} else { } else {
agentModelOverride = modelKey; agentModelOverride = modelKey;
@@ -567,7 +570,7 @@ export async function applyAuthChoice(params: {
params.runtime.error(String(err)); params.runtime.error(String(err));
await params.prompter.note( await params.prompter.note(
"Trouble with OAuth? See https://docs.clawd.bot/start/faq", "Trouble with OAuth? See https://docs.clawd.bot/start/faq",
"OAuth help", "OAuth help"
); );
} }
} else if (params.authChoice === "gemini-api-key") { } else if (params.authChoice === "gemini-api-key") {
@@ -587,7 +590,7 @@ export async function applyAuthChoice(params: {
if (applied.changed) { if (applied.changed) {
await params.prompter.note( await params.prompter.note(
`Default model set to ${GOOGLE_GEMINI_DEFAULT_MODEL}`, `Default model set to ${GOOGLE_GEMINI_DEFAULT_MODEL}`,
"Model configured", "Model configured"
); );
} }
} else { } else {
@@ -649,6 +652,36 @@ export async function applyAuthChoice(params: {
agentModelOverride = "minimax/MiniMax-M2.1"; agentModelOverride = "minimax/MiniMax-M2.1";
await noteAgentModel("minimax/MiniMax-M2.1"); await noteAgentModel("minimax/MiniMax-M2.1");
} }
} else if (params.authChoice === "opencode-zen") {
await params.prompter.note(
[
"OpenCode Zen provides access to Claude, GPT, Gemini, and more models.",
"Get your API key at: https://opencode.ai/auth",
"Requires an active OpenCode Zen subscription.",
].join("\n"),
"OpenCode Zen"
);
const key = await params.prompter.text({
message: "Enter OpenCode Zen API key",
validate: (value) => (value?.trim() ? undefined : "Required"),
});
await setOpencodeZenApiKey(String(key).trim(), params.agentDir);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "opencode-zen:default",
provider: "opencode-zen",
mode: "api_key",
});
if (params.setDefaultModel) {
nextConfig = applyOpencodeZenConfig(nextConfig);
await params.prompter.note(
`Default model set to ${OPENCODE_ZEN_DEFAULT_MODEL}`,
"Model configured"
);
} else {
nextConfig = applyOpencodeZenConfig(nextConfig);
agentModelOverride = OPENCODE_ZEN_DEFAULT_MODEL;
await noteAgentModel(OPENCODE_ZEN_DEFAULT_MODEL);
}
} }
return { config: nextConfig, agentModelOverride }; return { config: nextConfig, agentModelOverride };

View File

@@ -71,9 +71,11 @@ import {
applyAuthProfileConfig, applyAuthProfileConfig,
applyMinimaxConfig, applyMinimaxConfig,
applyMinimaxHostedConfig, applyMinimaxHostedConfig,
applyOpencodeZenConfig,
setAnthropicApiKey, setAnthropicApiKey,
setGeminiApiKey, setGeminiApiKey,
setMinimaxApiKey, setMinimaxApiKey,
setOpencodeZenApiKey,
writeOAuthCredentials, writeOAuthCredentials,
} from "./onboard-auth.js"; } from "./onboard-auth.js";
import { import {
@@ -95,6 +97,7 @@ import {
applyOpenAICodexModelDefault, applyOpenAICodexModelDefault,
OPENAI_CODEX_DEFAULT_MODEL, OPENAI_CODEX_DEFAULT_MODEL,
} from "./openai-codex-model-default.js"; } from "./openai-codex-model-default.js";
import { OPENCODE_ZEN_DEFAULT_MODEL } from "./opencode-zen-model-default.js";
import { ensureSystemdUserLingerInteractive } from "./systemd-linger.js"; import { ensureSystemdUserLingerInteractive } from "./systemd-linger.js";
export const CONFIGURE_WIZARD_SECTIONS = [ export const CONFIGURE_WIZARD_SECTIONS = [
@@ -366,6 +369,7 @@ async function promptAuthConfig(
| "apiKey" | "apiKey"
| "minimax-cloud" | "minimax-cloud"
| "minimax" | "minimax"
| "opencode-zen"
| "skip"; | "skip";
let next = cfg; let next = cfg;
@@ -783,6 +787,32 @@ async function promptAuthConfig(
next = applyMinimaxHostedConfig(next); next = applyMinimaxHostedConfig(next);
} else if (authChoice === "minimax") { } else if (authChoice === "minimax") {
next = applyMinimaxConfig(next); next = applyMinimaxConfig(next);
} else if (authChoice === "opencode-zen") {
note(
[
"OpenCode Zen provides access to Claude, GPT, Gemini, and more models.",
"Get your API key at: https://opencode.ai/auth",
].join("\n"),
"OpenCode Zen",
);
const key = guardCancel(
await text({
message: "Enter OpenCode Zen API key",
validate: (value) => (value?.trim() ? undefined : "Required"),
}),
runtime,
);
await setOpencodeZenApiKey(String(key).trim());
next = applyAuthProfileConfig(next, {
profileId: "opencode-zen:default",
provider: "opencode-zen",
mode: "api_key",
});
next = applyOpencodeZenConfig(next);
note(
`Default model set to ${OPENCODE_ZEN_DEFAULT_MODEL}`,
"Model configured",
);
} }
const currentModel = const currentModel =

View File

@@ -1,6 +1,11 @@
import type { OAuthCredentials, OAuthProvider } from "@mariozechner/pi-ai"; import type { OAuthCredentials, OAuthProvider } from "@mariozechner/pi-ai";
import { resolveDefaultAgentDir } from "../agents/agent-scope.js"; import { resolveDefaultAgentDir } from "../agents/agent-scope.js";
import { upsertAuthProfile } from "../agents/auth-profiles.js"; import { upsertAuthProfile } from "../agents/auth-profiles.js";
import {
getOpencodeZenStaticFallbackModels,
OPENCODE_ZEN_API_BASE_URL,
OPENCODE_ZEN_DEFAULT_MODEL_REF,
} from "../agents/opencode-zen-models.js";
import type { ClawdbotConfig } from "../config/config.js"; import type { ClawdbotConfig } from "../config/config.js";
import type { ModelDefinitionConfig } from "../config/types.js"; import type { ModelDefinitionConfig } from "../config/types.js";
@@ -381,3 +386,72 @@ export function applyMinimaxApiConfig(
}, },
}; };
} }
export async function setOpencodeZenApiKey(key: string, agentDir?: string) {
upsertAuthProfile({
profileId: "opencode-zen:default",
credential: {
type: "api_key",
provider: "opencode-zen",
key,
},
agentDir: agentDir ?? resolveDefaultAgentDir(),
});
}
export function applyOpencodeZenProviderConfig(
cfg: ClawdbotConfig,
): ClawdbotConfig {
const providers = { ...cfg.models?.providers };
providers["opencode-zen"] = {
baseUrl: OPENCODE_ZEN_API_BASE_URL,
apiKey: "opencode-zen",
api: "openai-completions",
models: getOpencodeZenStaticFallbackModels(),
};
const models = { ...cfg.agents?.defaults?.models };
models[OPENCODE_ZEN_DEFAULT_MODEL_REF] = {
...models[OPENCODE_ZEN_DEFAULT_MODEL_REF],
alias: "Opus",
};
return {
...cfg,
agents: {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
models,
},
},
models: {
mode: cfg.models?.mode ?? "merge",
providers,
},
};
}
export function applyOpencodeZenConfig(cfg: ClawdbotConfig): ClawdbotConfig {
const next = applyOpencodeZenProviderConfig(cfg);
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
model: {
...(next.agents?.defaults?.model &&
"fallbacks" in (next.agents.defaults.model as Record<string, unknown>)
? {
fallbacks: (
next.agents.defaults.model as { fallbacks?: string[] }
).fallbacks,
}
: undefined),
primary: OPENCODE_ZEN_DEFAULT_MODEL_REF,
},
},
},
};
}

View File

@@ -35,9 +35,11 @@ import {
applyMinimaxApiConfig, applyMinimaxApiConfig,
applyMinimaxConfig, applyMinimaxConfig,
applyMinimaxHostedConfig, applyMinimaxHostedConfig,
applyOpencodeZenConfig,
setAnthropicApiKey, setAnthropicApiKey,
setGeminiApiKey, setGeminiApiKey,
setMinimaxApiKey, setMinimaxApiKey,
setOpencodeZenApiKey,
} from "./onboard-auth.js"; } from "./onboard-auth.js";
import { import {
applyWizardMetadata, applyWizardMetadata,
@@ -312,6 +314,25 @@ export async function runNonInteractiveOnboarding(
nextConfig = applyOpenAICodexModelDefault(nextConfig).next; nextConfig = applyOpenAICodexModelDefault(nextConfig).next;
} else if (authChoice === "minimax") { } else if (authChoice === "minimax") {
nextConfig = applyMinimaxConfig(nextConfig); nextConfig = applyMinimaxConfig(nextConfig);
} else if (authChoice === "opencode-zen") {
const resolved = await resolveNonInteractiveApiKey({
provider: "opencode-zen",
cfg: baseConfig,
flagValue: opts.opencodeZenApiKey,
flagName: "--opencode-zen-api-key",
envVar: "OPENCODE_ZEN_API_KEY",
runtime,
});
if (!resolved) return;
if (resolved.source !== "profile") {
await setOpencodeZenApiKey(resolved.key);
}
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "opencode-zen:default",
provider: "opencode-zen",
mode: "api_key",
});
nextConfig = applyOpencodeZenConfig(nextConfig);
} else if ( } else if (
authChoice === "token" || authChoice === "token" ||
authChoice === "oauth" || authChoice === "oauth" ||

View File

@@ -17,6 +17,7 @@ export type AuthChoice =
| "minimax-cloud" | "minimax-cloud"
| "minimax" | "minimax"
| "minimax-api" | "minimax-api"
| "opencode-zen"
| "skip"; | "skip";
export type GatewayAuthChoice = "off" | "token" | "password"; export type GatewayAuthChoice = "off" | "token" | "password";
export type ResetScope = "config" | "config+creds+sessions" | "full"; export type ResetScope = "config" | "config+creds+sessions" | "full";
@@ -43,6 +44,7 @@ export type OnboardOptions = {
openaiApiKey?: string; openaiApiKey?: string;
geminiApiKey?: string; geminiApiKey?: string;
minimaxApiKey?: string; minimaxApiKey?: string;
opencodeZenApiKey?: string;
gatewayPort?: number; gatewayPort?: number;
gatewayBind?: GatewayBind; gatewayBind?: GatewayBind;
gatewayAuth?: GatewayAuthChoice; gatewayAuth?: GatewayAuthChoice;

View File

@@ -0,0 +1,57 @@
import { describe, expect, it } from "vitest";
import type { ClawdbotConfig } from "../config/config.js";
import {
applyOpencodeZenModelDefault,
OPENCODE_ZEN_DEFAULT_MODEL,
} from "./opencode-zen-model-default.js";
describe("applyOpencodeZenModelDefault", () => {
it("sets opencode-zen default when model is unset", () => {
const cfg: ClawdbotConfig = { agents: { defaults: {} } };
const applied = applyOpencodeZenModelDefault(cfg);
expect(applied.changed).toBe(true);
expect(applied.next.agents?.defaults?.model).toEqual({
primary: OPENCODE_ZEN_DEFAULT_MODEL,
});
});
it("overrides existing model", () => {
const cfg = {
agents: { defaults: { model: "anthropic/claude-opus-4-5" } },
} as ClawdbotConfig;
const applied = applyOpencodeZenModelDefault(cfg);
expect(applied.changed).toBe(true);
expect(applied.next.agents?.defaults?.model).toEqual({
primary: OPENCODE_ZEN_DEFAULT_MODEL,
});
});
it("no-ops when already opencode-zen default", () => {
const cfg = {
agents: { defaults: { model: OPENCODE_ZEN_DEFAULT_MODEL } },
} as ClawdbotConfig;
const applied = applyOpencodeZenModelDefault(cfg);
expect(applied.changed).toBe(false);
expect(applied.next).toEqual(cfg);
});
it("preserves fallbacks when setting primary", () => {
const cfg: ClawdbotConfig = {
agents: {
defaults: {
model: {
primary: "anthropic/claude-opus-4-5",
fallbacks: ["google/gemini-3-pro"],
},
},
},
};
const applied = applyOpencodeZenModelDefault(cfg);
expect(applied.changed).toBe(true);
expect(applied.next.agents?.defaults?.model).toEqual({
primary: OPENCODE_ZEN_DEFAULT_MODEL,
fallbacks: ["google/gemini-3-pro"],
});
});
});

View File

@@ -0,0 +1,45 @@
import type { ClawdbotConfig } from "../config/config.js";
import type { AgentModelListConfig } from "../config/types.js";
export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode-zen/claude-opus-4-5";
function resolvePrimaryModel(
model?: AgentModelListConfig | string,
): string | undefined {
if (typeof model === "string") return model;
if (model && typeof model === "object" && typeof model.primary === "string") {
return model.primary;
}
return undefined;
}
export function applyOpencodeZenModelDefault(cfg: ClawdbotConfig): {
next: ClawdbotConfig;
changed: boolean;
} {
const current = resolvePrimaryModel(cfg.agents?.defaults?.model)?.trim();
if (current === OPENCODE_ZEN_DEFAULT_MODEL) {
return { next: cfg, changed: false };
}
return {
next: {
...cfg,
agents: {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
model:
cfg.agents?.defaults?.model &&
typeof cfg.agents.defaults.model === "object"
? {
...cfg.agents.defaults.model,
primary: OPENCODE_ZEN_DEFAULT_MODEL,
}
: { primary: OPENCODE_ZEN_DEFAULT_MODEL },
},
},
},
changed: true,
};
}