refactor: centralize minimax onboarding + keys

This commit is contained in:
Peter Steinberger
2026-01-09 18:32:53 +01:00
parent dac3b675cc
commit 42ead1499f
5 changed files with 295 additions and 56 deletions

View File

@@ -177,7 +177,7 @@ Options:
- `--workspace <dir>`
- `--non-interactive`
- `--mode <local|remote>`
- `--auth-choice <setup-token|claude-cli|token|openai-codex|openai-api-key|codex-cli|antigravity|gemini-api-key|apiKey|minimax-cloud|minimax|skip>`
- `--auth-choice <setup-token|claude-cli|token|openai-codex|openai-api-key|codex-cli|antigravity|gemini-api-key|apiKey|minimax-cloud|minimax-api|minimax|skip>`
- `--token-provider <id>` (non-interactive; used with `--auth-choice token`)
- `--token <token>` (non-interactive; used with `--auth-choice token`)
- `--token-profile-id <id>` (non-interactive; default: `<provider>:manual`)

View File

@@ -78,7 +78,9 @@ Tip: `--json` does **not** imply non-interactive mode. Use `--non-interactive` (
- Sets `agents.defaults.model` to `openai-codex/gpt-5.2` when model is unset or `openai/*`.
- **OpenAI API key**: uses `OPENAI_API_KEY` if present or prompts for a key, then saves it to `~/.clawdbot/.env` so launchd can read it.
- **API key**: stores the key for you.
- **Minimax M2.1 (LM Studio)**: config is autowritten for the LM Studio endpoint.
- **MiniMax M2.1 (minimax.io)**: config is autowritten for the OpenAI-compatible `/v1` endpoint.
- **MiniMax API (platform.minimax.io)**: config is autowritten for the Anthropic-compatible `/anthropic` endpoint.
- **MiniMax M2.1 (LM Studio)**: config is autowritten for the LM Studio endpoint.
- **Skip**: no auth configured yet.
- Wizard runs a model check and warns if the configured model is unknown or missing auth.
- OAuth credentials live in `~/.clawdbot/credentials/oauth.json`; auth profiles live in `~/.clawdbot/agents/<agentId>/agent/auth-profiles.json` (API keys + OAuth).

View File

@@ -0,0 +1,100 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, describe, expect, it, vi } from "vitest";
import type { RuntimeEnv } from "../runtime.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import { applyAuthChoice } from "./auth-choice.js";
const noopAsync = async () => {};
const noop = () => {};
describe("applyAuthChoice", () => {
const previousStateDir = process.env.CLAWDBOT_STATE_DIR;
const previousAgentDir = process.env.CLAWDBOT_AGENT_DIR;
const previousPiAgentDir = process.env.PI_CODING_AGENT_DIR;
let tempStateDir: string | null = null;
afterEach(async () => {
if (tempStateDir) {
await fs.rm(tempStateDir, { recursive: true, force: true });
tempStateDir = null;
}
if (previousStateDir === undefined) {
delete process.env.CLAWDBOT_STATE_DIR;
} else {
process.env.CLAWDBOT_STATE_DIR = previousStateDir;
}
if (previousAgentDir === undefined) {
delete process.env.CLAWDBOT_AGENT_DIR;
} else {
process.env.CLAWDBOT_AGENT_DIR = previousAgentDir;
}
if (previousPiAgentDir === undefined) {
delete process.env.PI_CODING_AGENT_DIR;
} else {
process.env.PI_CODING_AGENT_DIR = previousPiAgentDir;
}
});
it("prompts and writes MiniMax API key when selecting minimax-api", async () => {
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-auth-"));
process.env.CLAWDBOT_STATE_DIR = tempStateDir;
process.env.CLAWDBOT_AGENT_DIR = path.join(tempStateDir, "agent");
process.env.PI_CODING_AGENT_DIR = process.env.CLAWDBOT_AGENT_DIR;
const text = vi.fn().mockResolvedValue("sk-minimax-test");
const select: WizardPrompter["select"] = vi.fn(
async (params) => params.options[0]?.value as never,
);
const multiselect: WizardPrompter["multiselect"] = vi.fn(async () => []);
const prompter: WizardPrompter = {
intro: vi.fn(noopAsync),
outro: vi.fn(noopAsync),
note: vi.fn(noopAsync),
select,
multiselect,
text,
confirm: vi.fn(async () => false),
progress: vi.fn(() => ({ update: noop, stop: noop })),
};
const runtime: RuntimeEnv = {
log: vi.fn(),
error: vi.fn(),
exit: vi.fn((code: number) => {
throw new Error(`exit:${code}`);
}),
};
const result = await applyAuthChoice({
authChoice: "minimax-api",
config: {},
prompter,
runtime,
setDefaultModel: true,
});
expect(text).toHaveBeenCalledWith(
expect.objectContaining({ message: "Enter MiniMax API key" }),
);
expect(result.config.auth?.profiles?.["minimax:default"]).toMatchObject({
provider: "minimax",
mode: "api_key",
});
const authProfilePath = path.join(
tempStateDir,
"agents",
"main",
"agent",
"auth-profiles.json",
);
const raw = await fs.readFile(authProfilePath, "utf8");
const parsed = JSON.parse(raw) as {
profiles?: Record<string, { key?: string }>;
};
expect(parsed.profiles?.["minimax:default"]?.key).toBe("sk-minimax-test");
});
});

View File

@@ -5,10 +5,73 @@ import type { ClawdbotConfig } from "../config/config.js";
import type { ModelDefinitionConfig } from "../config/types.js";
const DEFAULT_MINIMAX_BASE_URL = "https://api.minimax.io/v1";
const MINIMAX_API_BASE_URL = "https://api.minimax.io/anthropic";
export const MINIMAX_HOSTED_MODEL_ID = "MiniMax-M2.1";
const DEFAULT_MINIMAX_CONTEXT_WINDOW = 200000;
const DEFAULT_MINIMAX_MAX_TOKENS = 8192;
export const MINIMAX_HOSTED_MODEL_REF = `minimax/${MINIMAX_HOSTED_MODEL_ID}`;
// Pricing: MiniMax doesn't publish public rates. Override in models.json for accurate costs.
const MINIMAX_API_COST = {
input: 15,
output: 60,
cacheRead: 2,
cacheWrite: 10,
};
const MINIMAX_HOSTED_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
const MINIMAX_LM_STUDIO_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
const MINIMAX_MODEL_CATALOG = {
"MiniMax-M2.1": { name: "MiniMax M2.1", reasoning: false },
"MiniMax-M2.1-lightning": {
name: "MiniMax M2.1 Lightning",
reasoning: false,
},
"MiniMax-M2": { name: "MiniMax M2", reasoning: true },
} as const;
type MinimaxCatalogId = keyof typeof MINIMAX_MODEL_CATALOG;
function buildMinimaxModelDefinition(params: {
id: string;
name?: string;
reasoning?: boolean;
cost: ModelDefinitionConfig["cost"];
contextWindow: number;
maxTokens: number;
}): ModelDefinitionConfig {
const catalog = MINIMAX_MODEL_CATALOG[params.id as MinimaxCatalogId];
const fallbackReasoning = params.id === "MiniMax-M2";
return {
id: params.id,
name: params.name ?? catalog?.name ?? `MiniMax ${params.id}`,
reasoning: params.reasoning ?? catalog?.reasoning ?? fallbackReasoning,
input: ["text"],
cost: params.cost,
contextWindow: params.contextWindow,
maxTokens: params.maxTokens,
};
}
function buildMinimaxApiModelDefinition(
modelId: string,
): ModelDefinitionConfig {
return buildMinimaxModelDefinition({
id: modelId,
cost: MINIMAX_API_COST,
contextWindow: DEFAULT_MINIMAX_CONTEXT_WINDOW,
maxTokens: DEFAULT_MINIMAX_MAX_TOKENS,
});
}
export async function writeOAuthCredentials(
provider: OAuthProvider,
@@ -137,15 +200,14 @@ export function applyMinimaxProviderConfig(
apiKey: "lmstudio",
api: "openai-responses",
models: [
{
buildMinimaxModelDefinition({
id: "minimax-m2.1-gs32",
name: "MiniMax M2.1 GS32",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
cost: MINIMAX_LM_STUDIO_COST,
contextWindow: 196608,
maxTokens: 8192,
},
}),
],
};
}
@@ -177,15 +239,12 @@ export function applyMinimaxHostedProviderConfig(
};
const providers = { ...cfg.models?.providers };
const hostedModel: ModelDefinitionConfig = {
const hostedModel = buildMinimaxModelDefinition({
id: MINIMAX_HOSTED_MODEL_ID,
name: "MiniMax M2.1",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
cost: MINIMAX_HOSTED_COST,
contextWindow: DEFAULT_MINIMAX_CONTEXT_WINDOW,
maxTokens: DEFAULT_MINIMAX_MAX_TOKENS,
};
});
const existingProvider = providers.minimax;
const existingModels = Array.isArray(existingProvider?.models)
? existingProvider.models
@@ -271,21 +330,10 @@ export function applyMinimaxApiProviderConfig(
): ClawdbotConfig {
const providers = { ...cfg.models?.providers };
providers.minimax = {
baseUrl: "https://api.minimax.io/anthropic",
baseUrl: MINIMAX_API_BASE_URL,
apiKey: "", // Resolved via MINIMAX_API_KEY env var or auth profile
api: "anthropic-messages",
models: [
{
id: modelId,
name: `MiniMax ${modelId}`,
reasoning: modelId === "MiniMax-M2",
input: ["text"],
// Pricing: MiniMax doesn't publish public rates. Override in models.json for accurate costs.
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000,
maxTokens: 8192,
},
],
models: [buildMinimaxApiModelDefinition(modelId)],
};
const models = { ...cfg.agents?.defaults?.models };

View File

@@ -3,6 +3,8 @@ import {
CLAUDE_CLI_PROFILE_ID,
CODEX_CLI_PROFILE_ID,
ensureAuthProfileStore,
resolveApiKeyForProfile,
resolveAuthProfileOrder,
} from "../agents/auth-profiles.js";
import { resolveEnvApiKey } from "../agents/model-auth.js";
import {
@@ -46,6 +48,69 @@ import type { AuthChoice, OnboardOptions } from "./onboard-types.js";
import { applyOpenAICodexModelDefault } from "./openai-codex-model-default.js";
import { ensureSystemdUserLingerNonInteractive } from "./systemd-linger.js";
type NonInteractiveApiKeySource = "flag" | "env" | "profile";
async function resolveApiKeyFromProfiles(params: {
provider: string;
cfg: ClawdbotConfig;
agentDir?: string;
}): Promise<string | null> {
const store = ensureAuthProfileStore(params.agentDir);
const order = resolveAuthProfileOrder({
cfg: params.cfg,
store,
provider: params.provider,
});
for (const profileId of order) {
const cred = store.profiles[profileId];
if (cred?.type !== "api_key") continue;
const resolved = await resolveApiKeyForProfile({
cfg: params.cfg,
store,
profileId,
agentDir: params.agentDir,
});
if (resolved?.apiKey) return resolved.apiKey;
}
return null;
}
async function resolveNonInteractiveApiKey(params: {
provider: string;
cfg: ClawdbotConfig;
flagValue?: string;
flagName: string;
envVar: string;
runtime: RuntimeEnv;
agentDir?: string;
allowProfile?: boolean;
}): Promise<{ key: string; source: NonInteractiveApiKeySource } | null> {
const flagKey = params.flagValue?.trim();
if (flagKey) return { key: flagKey, source: "flag" };
const envResolved = resolveEnvApiKey(params.provider);
if (envResolved?.apiKey) return { key: envResolved.apiKey, source: "env" };
if (params.allowProfile ?? true) {
const profileKey = await resolveApiKeyFromProfiles({
provider: params.provider,
cfg: params.cfg,
agentDir: params.agentDir,
});
if (profileKey) return { key: profileKey, source: "profile" };
}
const profileHint =
params.allowProfile === false
? ""
: `, or existing ${params.provider} API-key profile`;
params.runtime.error(
`Missing ${params.flagName} (or ${params.envVar} in env${profileHint}).`,
);
params.runtime.exit(1);
return null;
}
export async function runNonInteractiveOnboarding(
opts: OnboardOptions,
runtime: RuntimeEnv = defaultRuntime,
@@ -121,26 +186,36 @@ export async function runNonInteractiveOnboarding(
const authChoice: AuthChoice = opts.authChoice ?? "skip";
if (authChoice === "apiKey") {
const key = opts.anthropicApiKey?.trim();
if (!key) {
runtime.error("Missing --anthropic-api-key");
runtime.exit(1);
return;
const resolved = await resolveNonInteractiveApiKey({
provider: "anthropic",
cfg: baseConfig,
flagValue: opts.anthropicApiKey,
flagName: "--anthropic-api-key",
envVar: "ANTHROPIC_API_KEY",
runtime,
});
if (!resolved) return;
if (resolved.source !== "profile") {
await setAnthropicApiKey(resolved.key);
}
await setAnthropicApiKey(key);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "anthropic:default",
provider: "anthropic",
mode: "api_key",
});
} else if (authChoice === "gemini-api-key") {
const key = opts.geminiApiKey?.trim();
if (!key) {
runtime.error("Missing --gemini-api-key");
runtime.exit(1);
return;
const resolved = await resolveNonInteractiveApiKey({
provider: "google",
cfg: baseConfig,
flagValue: opts.geminiApiKey,
flagName: "--gemini-api-key",
envVar: "GEMINI_API_KEY",
runtime,
});
if (!resolved) return;
if (resolved.source !== "profile") {
await setGeminiApiKey(resolved.key);
}
await setGeminiApiKey(key);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "google:default",
provider: "google",
@@ -148,12 +223,17 @@ export async function runNonInteractiveOnboarding(
});
nextConfig = applyGoogleGeminiModelDefault(nextConfig).next;
} else if (authChoice === "openai-api-key") {
const key = opts.openaiApiKey?.trim() || resolveEnvApiKey("openai")?.apiKey;
if (!key) {
runtime.error("Missing --openai-api-key (or OPENAI_API_KEY in env).");
runtime.exit(1);
return;
}
const resolved = await resolveNonInteractiveApiKey({
provider: "openai",
cfg: baseConfig,
flagValue: opts.openaiApiKey,
flagName: "--openai-api-key",
envVar: "OPENAI_API_KEY",
runtime,
allowProfile: false,
});
if (!resolved) return;
const key = resolved.key;
const result = upsertSharedEnvVar({
key: "OPENAI_API_KEY",
value: key,
@@ -161,13 +241,18 @@ export async function runNonInteractiveOnboarding(
process.env.OPENAI_API_KEY = key;
runtime.log(`Saved OPENAI_API_KEY to ${result.path}`);
} else if (authChoice === "minimax-cloud") {
const key = opts.minimaxApiKey?.trim();
if (!key) {
runtime.error("Missing --minimax-api-key");
runtime.exit(1);
return;
const resolved = await resolveNonInteractiveApiKey({
provider: "minimax",
cfg: baseConfig,
flagValue: opts.minimaxApiKey,
flagName: "--minimax-api-key",
envVar: "MINIMAX_API_KEY",
runtime,
});
if (!resolved) return;
if (resolved.source !== "profile") {
await setMinimaxApiKey(resolved.key);
}
await setMinimaxApiKey(key);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "minimax:default",
provider: "minimax",
@@ -175,14 +260,18 @@ export async function runNonInteractiveOnboarding(
});
nextConfig = applyMinimaxHostedConfig(nextConfig);
} else if (authChoice === "minimax-api") {
const key =
opts.minimaxApiKey?.trim() || resolveEnvApiKey("minimax")?.apiKey;
if (!key) {
runtime.error("Missing --minimax-api-key (or MINIMAX_API_KEY in env).");
runtime.exit(1);
return;
const resolved = await resolveNonInteractiveApiKey({
provider: "minimax",
cfg: baseConfig,
flagValue: opts.minimaxApiKey,
flagName: "--minimax-api-key",
envVar: "MINIMAX_API_KEY",
runtime,
});
if (!resolved) return;
if (resolved.source !== "profile") {
await setMinimaxApiKey(resolved.key);
}
await setMinimaxApiKey(key);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "minimax:default",
provider: "minimax",