fix: align opencode-zen provider setup
This commit is contained in:
@@ -2,6 +2,7 @@ import { type Api, getEnvApiKey, type Model } from "@mariozechner/pi-ai";
|
||||
import type { ClawdbotConfig } from "../config/config.js";
|
||||
import type { ModelProviderConfig } from "../config/types.js";
|
||||
import { getShellEnvAppliedKeys } from "../infra/shell-env.js";
|
||||
import { normalizeProviderId } from "./model-selection.js";
|
||||
import {
|
||||
type AuthProfileStore,
|
||||
ensureAuthProfileStore,
|
||||
@@ -103,6 +104,7 @@ export type EnvApiKeyResult = { apiKey: string; source: string };
|
||||
export type ModelAuthMode = "api-key" | "oauth" | "token" | "mixed" | "unknown";
|
||||
|
||||
export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
|
||||
const normalized = normalizeProviderId(provider);
|
||||
const applied = new Set(getShellEnvAppliedKeys());
|
||||
const pick = (envVar: string): EnvApiKeyResult | null => {
|
||||
const value = process.env[envVar]?.trim();
|
||||
@@ -113,26 +115,30 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
|
||||
return { apiKey: value, source };
|
||||
};
|
||||
|
||||
if (provider === "github-copilot") {
|
||||
if (normalized === "github-copilot") {
|
||||
return (
|
||||
pick("COPILOT_GITHUB_TOKEN") ?? pick("GH_TOKEN") ?? pick("GITHUB_TOKEN")
|
||||
);
|
||||
}
|
||||
|
||||
if (provider === "anthropic") {
|
||||
if (normalized === "anthropic") {
|
||||
return pick("ANTHROPIC_OAUTH_TOKEN") ?? pick("ANTHROPIC_API_KEY");
|
||||
}
|
||||
|
||||
if (provider === "zai") {
|
||||
if (normalized === "zai") {
|
||||
return pick("ZAI_API_KEY") ?? pick("Z_AI_API_KEY");
|
||||
}
|
||||
|
||||
if (provider === "google-vertex") {
|
||||
const envKey = getEnvApiKey(provider);
|
||||
if (normalized === "google-vertex") {
|
||||
const envKey = getEnvApiKey(normalized);
|
||||
if (!envKey) return null;
|
||||
return { apiKey: envKey, source: "gcloud adc" };
|
||||
}
|
||||
|
||||
if (normalized === "opencode") {
|
||||
return pick("OPENCODE_API_KEY") ?? pick("OPENCODE_ZEN_API_KEY");
|
||||
}
|
||||
|
||||
const envMap: Record<string, string> = {
|
||||
openai: "OPENAI_API_KEY",
|
||||
google: "GEMINI_API_KEY",
|
||||
@@ -142,9 +148,9 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
|
||||
openrouter: "OPENROUTER_API_KEY",
|
||||
minimax: "MINIMAX_API_KEY",
|
||||
mistral: "MISTRAL_API_KEY",
|
||||
"opencode-zen": "OPENCODE_ZEN_API_KEY",
|
||||
opencode: "OPENCODE_API_KEY",
|
||||
};
|
||||
const envVar = envMap[provider];
|
||||
const envVar = envMap[normalized];
|
||||
if (!envVar) return null;
|
||||
return pick(envVar);
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ export function modelKey(provider: string, model: string) {
|
||||
export function normalizeProviderId(provider: string): string {
|
||||
const normalized = provider.trim().toLowerCase();
|
||||
if (normalized === "z.ai" || normalized === "z-ai") return "zai";
|
||||
if (normalized === "opencode-zen") return "opencode";
|
||||
return normalized;
|
||||
}
|
||||
|
||||
|
||||
@@ -41,12 +41,18 @@ describe("resolveOpencodeZenAlias", () => {
|
||||
});
|
||||
|
||||
describe("resolveOpencodeZenModelApi", () => {
|
||||
it("returns openai-completions for all models (OpenCode Zen is OpenAI-compatible)", () => {
|
||||
it("maps APIs by model family", () => {
|
||||
expect(resolveOpencodeZenModelApi("claude-opus-4-5")).toBe(
|
||||
"openai-completions",
|
||||
"anthropic-messages",
|
||||
);
|
||||
expect(resolveOpencodeZenModelApi("minimax-m2.1-free")).toBe(
|
||||
"anthropic-messages",
|
||||
);
|
||||
expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-completions");
|
||||
expect(resolveOpencodeZenModelApi("gemini-3-pro")).toBe(
|
||||
"google-generative-ai",
|
||||
);
|
||||
expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-responses");
|
||||
expect(resolveOpencodeZenModelApi("glm-4.7-free")).toBe(
|
||||
"openai-completions",
|
||||
);
|
||||
expect(resolveOpencodeZenModelApi("some-unknown-model")).toBe(
|
||||
|
||||
@@ -12,7 +12,7 @@ import type { ModelApi, ModelDefinitionConfig } from "../config/types.js";
|
||||
|
||||
export const OPENCODE_ZEN_API_BASE_URL = "https://opencode.ai/zen/v1";
|
||||
export const OPENCODE_ZEN_DEFAULT_MODEL = "claude-opus-4-5";
|
||||
export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode-zen/${OPENCODE_ZEN_DEFAULT_MODEL}`;
|
||||
export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode/${OPENCODE_ZEN_DEFAULT_MODEL}`;
|
||||
|
||||
// Cache for fetched models (1 hour TTL)
|
||||
let cachedModels: ModelDefinitionConfig[] | null = null;
|
||||
@@ -87,10 +87,23 @@ export function resolveOpencodeZenAlias(modelIdOrAlias: string): string {
|
||||
}
|
||||
|
||||
/**
|
||||
* OpenCode Zen is an OpenAI-compatible proxy for all models.
|
||||
* All requests go through /chat/completions regardless of the underlying model.
|
||||
* OpenCode Zen routes models to different APIs based on model family.
|
||||
*/
|
||||
export function resolveOpencodeZenModelApi(_modelId: string): ModelApi {
|
||||
export function resolveOpencodeZenModelApi(modelId: string): ModelApi {
|
||||
const lower = modelId.toLowerCase();
|
||||
if (
|
||||
lower.startsWith("claude-") ||
|
||||
lower.startsWith("minimax") ||
|
||||
lower.startsWith("alpha-gd4")
|
||||
) {
|
||||
return "anthropic-messages";
|
||||
}
|
||||
if (lower.startsWith("gemini-")) {
|
||||
return "google-generative-ai";
|
||||
}
|
||||
if (lower.startsWith("gpt-")) {
|
||||
return "openai-responses";
|
||||
}
|
||||
return "openai-completions";
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user