feat: Add MiniMax Anthropic-compatible API support (minimax-api)

Add --auth-choice minimax-api for direct MiniMax API usage at
https://api.minimax.io/anthropic using the anthropic-messages API.

Changes:
- Add applyMinimaxApiConfig() function with provider/model config
- Add minimax-api to AuthChoice type and CLI options
- Add handler and non-interactive support
- Fix duplicate minimax entry in envMap
- Update live test to use anthropic-messages API
- Add 11 unit tests covering all edge cases
- Document configuration in gateway docs

Test results:
- 11/11 unit tests pass
- 1/1 live API test passes (verified with real API key)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
mneves75
2026-01-09 13:56:00 -03:00
committed by Peter Steinberger
parent cfcff68e91
commit 9279795c47
8 changed files with 302 additions and 84 deletions

View File

@@ -1464,6 +1464,67 @@ Notes:
- Responses API enables clean reasoning/output separation; WhatsApp sees only final text.
- Adjust `contextWindow`/`maxTokens` if your LM Studio context length differs.
### MiniMax API (platform.minimax.io)
Use MiniMax's Anthropic-compatible API directly without LM Studio:
```json5
{
agent: {
model: { primary: "minimax/MiniMax-M2.1" },
models: {
"anthropic/claude-opus-4-5": { alias: "Opus" },
"minimax/MiniMax-M2.1": { alias: "Minimax" }
}
},
models: {
mode: "merge",
providers: {
minimax: {
baseUrl: "https://api.minimax.io/anthropic",
apiKey: "${MINIMAX_API_KEY}",
api: "anthropic-messages",
models: [
{
id: "MiniMax-M2.1",
name: "MiniMax M2.1",
reasoning: false,
input: ["text"],
// Pricing: MiniMax doesn't publish public rates. Override in models.json for accurate costs.
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000,
maxTokens: 8192
},
{
id: "MiniMax-M2.1-lightning",
name: "MiniMax M2.1 Lightning",
reasoning: false,
input: ["text"],
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000,
maxTokens: 8192
},
{
id: "MiniMax-M2",
name: "MiniMax M2",
reasoning: true,
input: ["text"],
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000,
maxTokens: 8192
}
]
}
}
}
}
```
Notes:
- Set `MINIMAX_API_KEY` environment variable or use `clawdbot onboard --auth-choice minimax-api`
- Available models: `MiniMax-M2.1` (default), `MiniMax-M2.1-lightning` (~100 tps), `MiniMax-M2` (reasoning)
- Pricing is a placeholder; MiniMax doesn't publish public rates. Override in `models.json` for accurate cost tracking.
Notes:
- Supported APIs: `openai-completions`, `openai-responses`, `anthropic-messages`,
`google-generative-ai`

View File

@@ -3,7 +3,7 @@ import { describe, expect, it } from "vitest";
const MINIMAX_KEY = process.env.MINIMAX_API_KEY ?? "";
const MINIMAX_BASE_URL =
process.env.MINIMAX_BASE_URL?.trim() || "https://api.minimax.io/v1";
process.env.MINIMAX_BASE_URL?.trim() || "https://api.minimax.io/anthropic";
const MINIMAX_MODEL = process.env.MINIMAX_MODEL?.trim() || "MiniMax-M2.1";
const LIVE = process.env.MINIMAX_LIVE_TEST === "1" || process.env.LIVE === "1";
@@ -11,15 +11,16 @@ const describeLive = LIVE && MINIMAX_KEY ? describe : describe.skip;
describeLive("minimax live", () => {
it("returns assistant text", async () => {
const model: Model<"openai-completions"> = {
const model: Model<"anthropic-messages"> = {
id: MINIMAX_MODEL,
name: `MiniMax ${MINIMAX_MODEL}`,
api: "openai-completions",
api: "anthropic-messages",
provider: "minimax",
baseUrl: MINIMAX_BASE_URL,
reasoning: false,
reasoning: MINIMAX_MODEL === "MiniMax-M2",
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
// Pricing: placeholder values (per 1M tokens, multiplied by 1000 for display)
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000,
maxTokens: 8192,
};

View File

@@ -101,6 +101,10 @@ export function buildAuthChoiceOptions(params: {
// Token flow is currently Anthropic-only; use CLI for advanced providers.
options.push({ value: "minimax-cloud", label: "MiniMax M2.1 (minimax.io)" });
options.push({ value: "minimax", label: "Minimax M2.1 (LM Studio)" });
options.push({
value: "minimax-api",
label: "MiniMax API (platform.minimax.io)",
});
if (params.includeSkip) {
options.push({ value: "skip", label: "Skip for now" });
}

View File

@@ -36,6 +36,8 @@ import {
} from "./google-gemini-model-default.js";
import {
applyAuthProfileConfig,
applyMinimaxApiConfig,
applyMinimaxApiProviderConfig,
applyMinimaxConfig,
applyMinimaxHostedConfig,
applyMinimaxHostedProviderConfig,
@@ -629,6 +631,24 @@ export async function applyAuthChoice(params: {
agentModelOverride = "lmstudio/minimax-m2.1-gs32";
await noteAgentModel("lmstudio/minimax-m2.1-gs32");
}
} else if (params.authChoice === "minimax-api") {
const key = await params.prompter.text({
message: "Enter MiniMax API key",
validate: (value) => (value?.trim() ? undefined : "Required"),
});
await setMinimaxApiKey(String(key).trim(), params.agentDir);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "minimax:default",
provider: "minimax",
mode: "api_key",
});
if (params.setDefaultModel) {
nextConfig = applyMinimaxApiConfig(nextConfig);
} else {
nextConfig = applyMinimaxApiProviderConfig(nextConfig);
agentModelOverride = "minimax/MiniMax-M2.1";
await noteAgentModel("minimax/MiniMax-M2.1");
}
}
return { config: nextConfig, agentModelOverride };

View File

@@ -7,6 +7,8 @@ import { afterEach, describe, expect, it } from "vitest";
import {
applyAuthProfileConfig,
applyMinimaxApiProviderConfig,
applyMinimaxApiConfig,
writeOAuthCredentials,
} from "./onboard-auth.js";
@@ -105,3 +107,113 @@ describe("applyAuthProfileConfig", () => {
]);
});
});
describe("applyMinimaxApiConfig", () => {
it("adds minimax provider with correct settings", () => {
const cfg = applyMinimaxApiConfig({});
expect(cfg.models?.providers?.minimax).toMatchObject({
baseUrl: "https://api.minimax.io/anthropic",
api: "anthropic-messages",
});
});
it("sets correct primary model", () => {
const cfg = applyMinimaxApiConfig({}, "MiniMax-M2.1-lightning");
expect(cfg.agents?.defaults?.model?.primary).toBe("minimax/MiniMax-M2.1-lightning");
});
it("sets reasoning flag for MiniMax-M2 model", () => {
const cfg = applyMinimaxApiConfig({}, "MiniMax-M2");
expect(cfg.models?.providers?.minimax?.models[0]?.reasoning).toBe(true);
});
it("does not set reasoning for non-M2 models", () => {
const cfg = applyMinimaxApiConfig({}, "MiniMax-M2.1");
expect(cfg.models?.providers?.minimax?.models[0]?.reasoning).toBe(false);
});
it("preserves existing model fallbacks", () => {
const cfg = applyMinimaxApiConfig({
agents: {
defaults: {
model: { fallbacks: ["anthropic/claude-opus-4-5"] },
},
},
});
expect(cfg.agents?.defaults?.model?.fallbacks).toEqual(["anthropic/claude-opus-4-5"]);
});
it("adds model alias", () => {
const cfg = applyMinimaxApiConfig({}, "MiniMax-M2.1");
expect(cfg.agents?.defaults?.models?.["minimax/MiniMax-M2.1"]?.alias).toBe("Minimax");
});
it("preserves existing model params when adding alias", () => {
const cfg = applyMinimaxApiConfig({
agents: {
defaults: {
models: {
"minimax/MiniMax-M2.1": { alias: "MiniMax", params: { custom: "value" } },
},
},
},
}, "MiniMax-M2.1");
expect(cfg.agents?.defaults?.models?.["minimax/MiniMax-M2.1"]).toMatchObject({
alias: "Minimax",
params: { custom: "value" },
});
});
it("replaces existing minimax provider entirely", () => {
const cfg = applyMinimaxApiConfig({
models: {
providers: {
minimax: {
baseUrl: "https://old.example.com",
apiKey: "old-key",
api: "openai-completions",
models: [{ id: "old-model", name: "Old", reasoning: false, input: ["text"], cost: { input: 1, output: 2, cacheRead: 0, cacheWrite: 0 }, contextWindow: 1000, maxTokens: 100 }],
},
},
},
});
expect(cfg.models?.providers?.minimax?.baseUrl).toBe("https://api.minimax.io/anthropic");
expect(cfg.models?.providers?.minimax?.api).toBe("anthropic-messages");
expect(cfg.models?.providers?.minimax?.models[0]?.id).toBe("MiniMax-M2.1");
});
it("preserves other providers when adding minimax", () => {
const cfg = applyMinimaxApiConfig({
models: {
providers: {
anthropic: {
baseUrl: "https://api.anthropic.com",
apiKey: "anthropic-key",
api: "anthropic-messages",
models: [{ id: "claude-opus-4-5", name: "Claude Opus 4.5", reasoning: false, input: ["text"], cost: { input: 15, output: 75, cacheRead: 0, cacheWrite: 0 }, contextWindow: 200000, maxTokens: 8192 }],
},
},
},
});
expect(cfg.models?.providers?.anthropic).toBeDefined();
expect(cfg.models?.providers?.minimax).toBeDefined();
});
it("preserves existing models mode", () => {
const cfg = applyMinimaxApiConfig({
models: { mode: "replace", providers: {} },
});
expect(cfg.models?.mode).toBe("replace");
});
});
describe("applyMinimaxApiProviderConfig", () => {
it("does not overwrite existing primary model", () => {
const cfg = applyMinimaxApiProviderConfig({
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-5" } } },
});
expect(cfg.agents?.defaults?.model?.primary).toBe(
"anthropic/claude-opus-4-5",
);
});
});

View File

@@ -263,3 +263,73 @@ export function applyMinimaxHostedConfig(
},
};
}
// MiniMax Anthropic-compatible API (platform.minimax.io/anthropic)
export function applyMinimaxApiProviderConfig(
cfg: ClawdbotConfig,
modelId: string = "MiniMax-M2.1",
): ClawdbotConfig {
const providers = { ...cfg.models?.providers };
providers.minimax = {
baseUrl: "https://api.minimax.io/anthropic",
apiKey: "", // Resolved via MINIMAX_API_KEY env var or auth profile
api: "anthropic-messages",
models: [
{
id: modelId,
name: `MiniMax ${modelId}`,
reasoning: modelId === "MiniMax-M2",
input: ["text"],
// Pricing: MiniMax doesn't publish public rates. Override in models.json for accurate costs.
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000,
maxTokens: 8192,
},
],
};
const models = { ...cfg.agents?.defaults?.models };
models[`minimax/${modelId}`] = {
...models[`minimax/${modelId}`],
alias: "Minimax",
};
return {
...cfg,
agents: {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
models,
},
},
models: { mode: cfg.models?.mode ?? "merge", providers },
};
}
export function applyMinimaxApiConfig(
cfg: ClawdbotConfig,
modelId: string = "MiniMax-M2.1",
): ClawdbotConfig {
const next = applyMinimaxApiProviderConfig(cfg, modelId);
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
model: {
...(next.agents?.defaults?.model &&
"fallbacks" in (next.agents.defaults.model as Record<string, unknown>)
? {
fallbacks: (
next.agents.defaults.model as { fallbacks?: string[] }
).fallbacks,
}
: undefined),
primary: `minimax/${modelId}`,
},
},
},
};
}

View File

@@ -1,14 +1,10 @@
import { spawnSync } from "node:child_process";
import path from "node:path";
import {
CLAUDE_CLI_PROFILE_ID,
CODEX_CLI_PROFILE_ID,
ensureAuthProfileStore,
upsertAuthProfile,
} from "../agents/auth-profiles.js";
import { resolveEnvApiKey } from "../agents/model-auth.js";
import { normalizeProviderId } from "../agents/model-selection.js";
import { parseDurationMs } from "../cli/parse-duration.js";
import {
type ClawdbotConfig,
CONFIG_PATH_CLAWDBOT,
@@ -33,6 +29,7 @@ import { applyGoogleGeminiModelDefault } from "./google-gemini-model-default.js"
import { healthCommand } from "./health.js";
import {
applyAuthProfileConfig,
applyMinimaxApiConfig,
applyMinimaxConfig,
applyMinimaxHostedConfig,
setAnthropicApiKey,
@@ -177,6 +174,20 @@ export async function runNonInteractiveOnboarding(
mode: "api_key",
});
nextConfig = applyMinimaxHostedConfig(nextConfig);
} else if (authChoice === "minimax-api") {
const key = opts.minimaxApiKey?.trim() || resolveEnvApiKey("minimax")?.apiKey;
if (!key) {
runtime.error("Missing --minimax-api-key (or MINIMAX_API_KEY in env).");
runtime.exit(1);
return;
}
await setMinimaxApiKey(key);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "minimax:default",
provider: "minimax",
mode: "api_key",
});
nextConfig = applyMinimaxApiConfig(nextConfig);
} else if (authChoice === "claude-cli") {
const store = ensureAuthProfileStore(undefined, {
allowKeychainPrompt: false,
@@ -210,82 +221,20 @@ export async function runNonInteractiveOnboarding(
nextConfig = applyOpenAICodexModelDefault(nextConfig).next;
} else if (authChoice === "minimax") {
nextConfig = applyMinimaxConfig(nextConfig);
} else if (authChoice === "setup-token" || authChoice === "oauth") {
if (!process.stdin.isTTY) {
runtime.error("`claude setup-token` requires an interactive TTY.");
runtime.exit(1);
return;
}
const res = spawnSync("claude", ["setup-token"], { stdio: "inherit" });
if (res.error) throw res.error;
if (typeof res.status === "number" && res.status !== 0) {
runtime.error(`claude setup-token failed (exit ${res.status})`);
runtime.exit(1);
return;
}
const store = ensureAuthProfileStore(undefined, {
allowKeychainPrompt: true,
});
if (!store.profiles[CLAUDE_CLI_PROFILE_ID]) {
runtime.error(
`No Claude CLI credentials found after setup-token. Expected auth profile ${CLAUDE_CLI_PROFILE_ID}.`,
);
runtime.exit(1);
return;
}
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: CLAUDE_CLI_PROFILE_ID,
provider: "anthropic",
mode: "token",
});
} else if (authChoice === "token") {
const providerRaw = opts.tokenProvider?.trim();
const tokenRaw = opts.token?.trim();
if (!providerRaw) {
runtime.error(
"Missing --token-provider (required for --auth-choice token).",
);
runtime.exit(1);
return;
}
if (!tokenRaw) {
runtime.error("Missing --token (required for --auth-choice token).");
runtime.exit(1);
return;
}
const provider = normalizeProviderId(providerRaw);
const profileId = (
opts.tokenProfileId?.trim() || `${provider}:manual`
).trim();
const expires =
opts.tokenExpiresIn?.trim() && opts.tokenExpiresIn.trim().length > 0
? Date.now() +
parseDurationMs(String(opts.tokenExpiresIn).trim(), {
defaultUnit: "d",
})
: undefined;
upsertAuthProfile({
profileId,
credential: {
type: "token",
provider,
token: tokenRaw,
...(expires ? { expires } : {}),
},
});
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId,
provider,
mode: "token",
});
} else if (authChoice === "openai-codex" || authChoice === "antigravity") {
} else if (authChoice === "minimax-api") {
nextConfig = applyMinimaxApiConfig(nextConfig);
} else if (
authChoice === "token" ||
authChoice === "oauth" ||
authChoice === "openai-codex" ||
authChoice === "antigravity"
) {
const label =
authChoice === "antigravity" ? "Antigravity" : "OpenAI Codex OAuth";
authChoice === "antigravity"
? "Antigravity"
: authChoice === "token"
? "Token"
: "OAuth";
runtime.error(`${label} requires interactive mode.`);
runtime.exit(1);
return;

View File

@@ -16,6 +16,7 @@ export type AuthChoice =
| "gemini-api-key"
| "minimax-cloud"
| "minimax"
| "minimax-api"
| "skip";
export type GatewayAuthChoice = "off" | "token" | "password";
export type ResetScope = "config" | "config+creds+sessions" | "full";