Add Synthetic provider support
This commit is contained in:
committed by
Peter Steinberger
parent
25297ce3f5
commit
8b5cd97ceb
@@ -143,6 +143,34 @@ Moonshot uses OpenAI-compatible endpoints, so configure it as a custom provider:
|
||||
}
|
||||
```
|
||||
|
||||
### Synthetic
|
||||
|
||||
Synthetic provides Anthropic-compatible models behind the `synthetic` provider:
|
||||
|
||||
- Provider: `synthetic`
|
||||
- Auth: `SYNTHETIC_API_KEY`
|
||||
- Example model: `synthetic/hf:MiniMaxAI/MiniMax-M2.1`
|
||||
- CLI: `clawdbot onboard --auth-choice synthetic-api-key`
|
||||
|
||||
```json5
|
||||
{
|
||||
agents: {
|
||||
defaults: { model: { primary: "synthetic/hf:MiniMaxAI/MiniMax-M2.1" } }
|
||||
},
|
||||
models: {
|
||||
mode: "merge",
|
||||
providers: {
|
||||
synthetic: {
|
||||
baseUrl: "https://api.synthetic.new/anthropic",
|
||||
apiKey: "${SYNTHETIC_API_KEY}",
|
||||
api: "anthropic-messages",
|
||||
models: [{ id: "hf:MiniMaxAI/MiniMax-M2.1", name: "MiniMax M2.1" }]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### MiniMax
|
||||
|
||||
MiniMax is configured via `models.providers` because it uses custom endpoints:
|
||||
|
||||
@@ -1819,6 +1819,48 @@ Notes:
|
||||
- Model ref: `moonshot/kimi-k2-0905-preview`.
|
||||
- Use `https://api.moonshot.cn/v1` if you need the China endpoint.
|
||||
|
||||
### Synthetic (Anthropic-compatible)
|
||||
|
||||
Use Synthetic's Anthropic-compatible endpoint:
|
||||
|
||||
```json5
|
||||
{
|
||||
env: { SYNTHETIC_API_KEY: "sk-..." },
|
||||
agents: {
|
||||
defaults: {
|
||||
model: { primary: "synthetic/hf:MiniMaxAI/MiniMax-M2.1" },
|
||||
models: { "synthetic/hf:MiniMaxAI/MiniMax-M2.1": { alias: "MiniMax M2.1" } }
|
||||
}
|
||||
},
|
||||
models: {
|
||||
mode: "merge",
|
||||
providers: {
|
||||
synthetic: {
|
||||
baseUrl: "https://api.synthetic.new/anthropic",
|
||||
apiKey: "${SYNTHETIC_API_KEY}",
|
||||
api: "anthropic-messages",
|
||||
models: [
|
||||
{
|
||||
id: "hf:MiniMaxAI/MiniMax-M2.1",
|
||||
name: "MiniMax M2.1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||
contextWindow: 192000,
|
||||
maxTokens: 65536
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Notes:
|
||||
- Set `SYNTHETIC_API_KEY` or use `clawdbot onboard --auth-choice synthetic-api-key`.
|
||||
- Model ref: `synthetic/hf:MiniMaxAI/MiniMax-M2.1`.
|
||||
- Base URL should omit `/v1` because the Anthropic client appends it.
|
||||
|
||||
### Local models (LM Studio) — recommended setup
|
||||
|
||||
See [/gateway/local-models](/gateway/local-models) for the current local guidance. TL;DR: run MiniMax M2.1 via LM Studio Responses API on serious hardware; keep hosted models merged for fallback.
|
||||
|
||||
@@ -26,6 +26,7 @@ model as `provider/model`.
|
||||
- [Anthropic (API + Claude CLI)](/providers/anthropic)
|
||||
- [OpenRouter](/providers/openrouter)
|
||||
- [Moonshot AI (Kimi)](/providers/moonshot)
|
||||
- [Synthetic](/providers/synthetic)
|
||||
- [OpenCode Zen](/providers/opencode)
|
||||
- [Z.AI](/providers/zai)
|
||||
- [GLM models](/providers/glm)
|
||||
|
||||
98
docs/providers/synthetic.md
Normal file
98
docs/providers/synthetic.md
Normal file
@@ -0,0 +1,98 @@
|
||||
---
|
||||
summary: "Use Synthetic's Anthropic-compatible API in Clawdbot"
|
||||
read_when:
|
||||
- You want to use Synthetic as a model provider
|
||||
- You need a Synthetic API key or base URL setup
|
||||
---
|
||||
# Synthetic
|
||||
|
||||
Synthetic exposes Anthropic-compatible endpoints. Clawdbot registers it as the
|
||||
`synthetic` provider and uses the Anthropic Messages API.
|
||||
|
||||
## Quick setup
|
||||
|
||||
1) Set `SYNTHETIC_API_KEY` (or run the wizard below).
|
||||
2) Run onboarding:
|
||||
|
||||
```bash
|
||||
clawdbot onboard --auth-choice synthetic-api-key
|
||||
```
|
||||
|
||||
The default model is set to:
|
||||
|
||||
```
|
||||
synthetic/hf:MiniMaxAI/MiniMax-M2.1
|
||||
```
|
||||
|
||||
## Config example
|
||||
|
||||
```json5
|
||||
{
|
||||
env: { SYNTHETIC_API_KEY: "sk-..." },
|
||||
agents: {
|
||||
defaults: {
|
||||
model: { primary: "synthetic/hf:MiniMaxAI/MiniMax-M2.1" },
|
||||
models: { "synthetic/hf:MiniMaxAI/MiniMax-M2.1": { alias: "MiniMax M2.1" } }
|
||||
}
|
||||
},
|
||||
models: {
|
||||
mode: "merge",
|
||||
providers: {
|
||||
synthetic: {
|
||||
baseUrl: "https://api.synthetic.new/anthropic",
|
||||
apiKey: "${SYNTHETIC_API_KEY}",
|
||||
api: "anthropic-messages",
|
||||
models: [
|
||||
{
|
||||
id: "hf:MiniMaxAI/MiniMax-M2.1",
|
||||
name: "MiniMax M2.1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||
contextWindow: 192000,
|
||||
maxTokens: 65536
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Note: Clawdbot's Anthropic client appends `/v1` to the base URL, so use
|
||||
`https://api.synthetic.new/anthropic` (not `/anthropic/v1`). If Synthetic changes
|
||||
its base URL, override `models.providers.synthetic.baseUrl`.
|
||||
|
||||
## Model catalog
|
||||
|
||||
All models below use cost `0` (input/output/cache).
|
||||
|
||||
| Model ID | Context window | Max tokens | Reasoning | Input |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| `hf:MiniMaxAI/MiniMax-M2.1` | 192000 | 65536 | false | text |
|
||||
| `hf:moonshotai/Kimi-K2-Thinking` | 256000 | 8192 | true | text |
|
||||
| `hf:zai-org/GLM-4.7` | 198000 | 128000 | false | text |
|
||||
| `hf:deepseek-ai/DeepSeek-R1-0528` | 128000 | 8192 | false | text |
|
||||
| `hf:deepseek-ai/DeepSeek-V3-0324` | 128000 | 8192 | false | text |
|
||||
| `hf:deepseek-ai/DeepSeek-V3.1` | 128000 | 8192 | false | text |
|
||||
| `hf:deepseek-ai/DeepSeek-V3.1-Terminus` | 128000 | 8192 | false | text |
|
||||
| `hf:deepseek-ai/DeepSeek-V3.2` | 159000 | 8192 | false | text |
|
||||
| `hf:meta-llama/Llama-3.3-70B-Instruct` | 128000 | 8192 | false | text |
|
||||
| `hf:meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8` | 524000 | 8192 | false | text |
|
||||
| `hf:MiniMaxAI/MiniMax-M2` | 192000 | 65536 | false | text |
|
||||
| `hf:moonshotai/Kimi-K2-Instruct-0905` | 256000 | 8192 | false | text |
|
||||
| `hf:openai/gpt-oss-120b` | 128000 | 8192 | false | text |
|
||||
| `hf:Qwen/Qwen3-235B-A22B-Instruct-2507` | 256000 | 8192 | false | text |
|
||||
| `hf:Qwen/Qwen3-Coder-480B-A35B-Instruct` | 256000 | 8192 | false | text |
|
||||
| `hf:Qwen/Qwen3-VL-235B-A22B-Instruct` | 250000 | 8192 | false | text + image |
|
||||
| `hf:zai-org/GLM-4.5` | 128000 | 128000 | false | text |
|
||||
| `hf:zai-org/GLM-4.6` | 198000 | 128000 | false | text |
|
||||
| `hf:deepseek-ai/DeepSeek-V3` | 128000 | 8192 | false | text |
|
||||
| `hf:Qwen/Qwen3-235B-A22B-Thinking-2507` | 256000 | 8192 | true | text |
|
||||
|
||||
## Notes
|
||||
|
||||
- Model refs use `synthetic/<modelId>`.
|
||||
- If you enable a model allowlist (`agents.defaults.models`), add every model you
|
||||
plan to use.
|
||||
- See [Model providers](/concepts/model-providers) for provider rules.
|
||||
@@ -84,6 +84,8 @@ Tip: `--json` does **not** imply non-interactive mode. Use `--non-interactive` (
|
||||
- **API key**: stores the key for you.
|
||||
- **MiniMax M2.1**: config is auto-written.
|
||||
- More detail: [MiniMax](/providers/minimax)
|
||||
- **Synthetic (Anthropic-compatible)**: prompts for `SYNTHETIC_API_KEY`.
|
||||
- More detail: [Synthetic](/providers/synthetic)
|
||||
- **Moonshot (Kimi K2)**: config is auto-written.
|
||||
- More detail: [Moonshot AI](/providers/moonshot)
|
||||
- **Skip**: no auth configured yet.
|
||||
@@ -214,6 +216,17 @@ clawdbot onboard --non-interactive \
|
||||
--gateway-bind loopback
|
||||
```
|
||||
|
||||
Synthetic example:
|
||||
|
||||
```bash
|
||||
clawdbot onboard --non-interactive \
|
||||
--mode local \
|
||||
--auth-choice synthetic-api-key \
|
||||
--synthetic-api-key "$SYNTHETIC_API_KEY" \
|
||||
--gateway-port 18789 \
|
||||
--gateway-bind loopback
|
||||
```
|
||||
|
||||
OpenCode Zen example:
|
||||
|
||||
```bash
|
||||
|
||||
@@ -236,4 +236,28 @@ describe("getApiKeyForModel", () => {
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it("resolves Synthetic API key from env", async () => {
|
||||
const previousSynthetic = process.env.SYNTHETIC_API_KEY;
|
||||
|
||||
try {
|
||||
process.env.SYNTHETIC_API_KEY = "synthetic-test-key";
|
||||
|
||||
vi.resetModules();
|
||||
const { resolveApiKeyForProvider } = await import("./model-auth.js");
|
||||
|
||||
const resolved = await resolveApiKeyForProvider({
|
||||
provider: "synthetic",
|
||||
store: { version: 1, profiles: {} },
|
||||
});
|
||||
expect(resolved.apiKey).toBe("synthetic-test-key");
|
||||
expect(resolved.source).toContain("SYNTHETIC_API_KEY");
|
||||
} finally {
|
||||
if (previousSynthetic === undefined) {
|
||||
delete process.env.SYNTHETIC_API_KEY;
|
||||
} else {
|
||||
process.env.SYNTHETIC_API_KEY = previousSynthetic;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -148,6 +148,7 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
|
||||
openrouter: "OPENROUTER_API_KEY",
|
||||
moonshot: "MOONSHOT_API_KEY",
|
||||
minimax: "MINIMAX_API_KEY",
|
||||
synthetic: "SYNTHETIC_API_KEY",
|
||||
mistral: "MISTRAL_API_KEY",
|
||||
opencode: "OPENCODE_API_KEY",
|
||||
};
|
||||
|
||||
@@ -32,6 +32,177 @@ const MOONSHOT_DEFAULT_COST = {
|
||||
cacheWrite: 0,
|
||||
};
|
||||
|
||||
const SYNTHETIC_BASE_URL = "https://api.synthetic.new/anthropic";
|
||||
const SYNTHETIC_DEFAULT_MODEL_ID = "hf:MiniMaxAI/MiniMax-M2.1";
|
||||
const SYNTHETIC_DEFAULT_COST = {
|
||||
input: 0,
|
||||
output: 0,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
};
|
||||
const SYNTHETIC_MODELS = [
|
||||
{
|
||||
id: SYNTHETIC_DEFAULT_MODEL_ID,
|
||||
name: "MiniMax M2.1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 192000,
|
||||
maxTokens: 65536,
|
||||
},
|
||||
{
|
||||
id: "hf:moonshotai/Kimi-K2-Thinking",
|
||||
name: "Kimi K2 Thinking",
|
||||
reasoning: true,
|
||||
input: ["text"],
|
||||
contextWindow: 256000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:zai-org/GLM-4.7",
|
||||
name: "GLM-4.7",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 198000,
|
||||
maxTokens: 128000,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-R1-0528",
|
||||
name: "DeepSeek R1 0528",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-V3-0324",
|
||||
name: "DeepSeek V3 0324",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-V3.1",
|
||||
name: "DeepSeek V3.1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
|
||||
name: "DeepSeek V3.1 Terminus",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-V3.2",
|
||||
name: "DeepSeek V3.2",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 159000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:meta-llama/Llama-3.3-70B-Instruct",
|
||||
name: "Llama 3.3 70B Instruct",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
|
||||
name: "Llama 4 Maverick 17B 128E Instruct FP8",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 524000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:MiniMaxAI/MiniMax-M2",
|
||||
name: "MiniMax M2",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 192000,
|
||||
maxTokens: 65536,
|
||||
},
|
||||
{
|
||||
id: "hf:moonshotai/Kimi-K2-Instruct-0905",
|
||||
name: "Kimi K2 Instruct 0905",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 256000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:openai/gpt-oss-120b",
|
||||
name: "GPT OSS 120B",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:Qwen/Qwen3-235B-A22B-Instruct-2507",
|
||||
name: "Qwen3 235B A22B Instruct 2507",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 256000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
|
||||
name: "Qwen3 Coder 480B A35B Instruct",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 256000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:Qwen/Qwen3-VL-235B-A22B-Instruct",
|
||||
name: "Qwen3 VL 235B A22B Instruct",
|
||||
reasoning: false,
|
||||
input: ["text", "image"],
|
||||
contextWindow: 250000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:zai-org/GLM-4.5",
|
||||
name: "GLM-4.5",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 128000,
|
||||
},
|
||||
{
|
||||
id: "hf:zai-org/GLM-4.6",
|
||||
name: "GLM-4.6",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 198000,
|
||||
maxTokens: 128000,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-V3",
|
||||
name: "DeepSeek V3",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
|
||||
name: "Qwen3 235B A22B Thinking 2507",
|
||||
reasoning: true,
|
||||
input: ["text"],
|
||||
contextWindow: 256000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
] as const;
|
||||
|
||||
function normalizeApiKeyConfig(value: string): string {
|
||||
const trimmed = value.trim();
|
||||
const match = /^\$\{([A-Z0-9_]+)\}$/.exec(trimmed);
|
||||
@@ -180,6 +351,22 @@ function buildMoonshotProvider(): ProviderConfig {
|
||||
};
|
||||
}
|
||||
|
||||
function buildSyntheticProvider(): ProviderConfig {
|
||||
return {
|
||||
baseUrl: SYNTHETIC_BASE_URL,
|
||||
api: "anthropic-messages",
|
||||
models: SYNTHETIC_MODELS.map((model) => ({
|
||||
id: model.id,
|
||||
name: model.name,
|
||||
reasoning: model.reasoning,
|
||||
input: [...model.input],
|
||||
cost: SYNTHETIC_DEFAULT_COST,
|
||||
contextWindow: model.contextWindow,
|
||||
maxTokens: model.maxTokens,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
export function resolveImplicitProviders(params: {
|
||||
agentDir: string;
|
||||
}): ModelsConfig["providers"] {
|
||||
@@ -202,5 +389,12 @@ export function resolveImplicitProviders(params: {
|
||||
providers.moonshot = { ...buildMoonshotProvider(), apiKey: moonshotKey };
|
||||
}
|
||||
|
||||
const syntheticKey =
|
||||
resolveEnvApiKeyVarName("synthetic") ??
|
||||
resolveApiKeyFromProfiles({ provider: "synthetic", store: authStore });
|
||||
if (syntheticKey) {
|
||||
providers.synthetic = { ...buildSyntheticProvider(), apiKey: syntheticKey };
|
||||
}
|
||||
|
||||
return providers;
|
||||
}
|
||||
|
||||
@@ -348,11 +348,13 @@ describe("models config", () => {
|
||||
const previousGithub = process.env.GITHUB_TOKEN;
|
||||
const previousMinimax = process.env.MINIMAX_API_KEY;
|
||||
const previousMoonshot = process.env.MOONSHOT_API_KEY;
|
||||
const previousSynthetic = process.env.SYNTHETIC_API_KEY;
|
||||
delete process.env.COPILOT_GITHUB_TOKEN;
|
||||
delete process.env.GH_TOKEN;
|
||||
delete process.env.GITHUB_TOKEN;
|
||||
delete process.env.MINIMAX_API_KEY;
|
||||
delete process.env.MOONSHOT_API_KEY;
|
||||
delete process.env.SYNTHETIC_API_KEY;
|
||||
|
||||
try {
|
||||
vi.resetModules();
|
||||
@@ -381,6 +383,9 @@ describe("models config", () => {
|
||||
else process.env.MINIMAX_API_KEY = previousMinimax;
|
||||
if (previousMoonshot === undefined) delete process.env.MOONSHOT_API_KEY;
|
||||
else process.env.MOONSHOT_API_KEY = previousMoonshot;
|
||||
if (previousSynthetic === undefined)
|
||||
delete process.env.SYNTHETIC_API_KEY;
|
||||
else process.env.SYNTHETIC_API_KEY = previousSynthetic;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -451,6 +456,42 @@ describe("models config", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("adds synthetic provider when SYNTHETIC_API_KEY is set", async () => {
|
||||
await withTempHome(async () => {
|
||||
vi.resetModules();
|
||||
const prevKey = process.env.SYNTHETIC_API_KEY;
|
||||
process.env.SYNTHETIC_API_KEY = "sk-synthetic-test";
|
||||
try {
|
||||
const { ensureClawdbotModelsJson } = await import("./models-config.js");
|
||||
const { resolveClawdbotAgentDir } = await import("./agent-paths.js");
|
||||
|
||||
await ensureClawdbotModelsJson({});
|
||||
|
||||
const modelPath = path.join(resolveClawdbotAgentDir(), "models.json");
|
||||
const raw = await fs.readFile(modelPath, "utf8");
|
||||
const parsed = JSON.parse(raw) as {
|
||||
providers: Record<
|
||||
string,
|
||||
{
|
||||
baseUrl?: string;
|
||||
apiKey?: string;
|
||||
models?: Array<{ id: string }>;
|
||||
}
|
||||
>;
|
||||
};
|
||||
expect(parsed.providers.synthetic?.baseUrl).toBe(
|
||||
"https://api.synthetic.new/anthropic",
|
||||
);
|
||||
expect(parsed.providers.synthetic?.apiKey).toBe("SYNTHETIC_API_KEY");
|
||||
const ids = parsed.providers.synthetic?.models?.map((model) => model.id);
|
||||
expect(ids).toContain("hf:MiniMaxAI/MiniMax-M2.1");
|
||||
} finally {
|
||||
if (prevKey === undefined) delete process.env.SYNTHETIC_API_KEY;
|
||||
else process.env.SYNTHETIC_API_KEY = prevKey;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it("fills missing provider.apiKey from env var name when models exist", async () => {
|
||||
await withTempHome(async () => {
|
||||
vi.resetModules();
|
||||
|
||||
@@ -350,6 +350,7 @@ const MODEL_PICK_PROVIDER_PREFERENCE = [
|
||||
"openai",
|
||||
"openai-codex",
|
||||
"minimax",
|
||||
"synthetic",
|
||||
"google",
|
||||
"zai",
|
||||
"openrouter",
|
||||
|
||||
@@ -202,6 +202,29 @@ describe("cli program", () => {
|
||||
);
|
||||
});
|
||||
|
||||
it("passes synthetic api key to onboard", async () => {
|
||||
const program = buildProgram();
|
||||
await program.parseAsync(
|
||||
[
|
||||
"onboard",
|
||||
"--non-interactive",
|
||||
"--auth-choice",
|
||||
"synthetic-api-key",
|
||||
"--synthetic-api-key",
|
||||
"sk-synthetic-test",
|
||||
],
|
||||
{ from: "user" },
|
||||
);
|
||||
expect(onboardCommand).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
nonInteractive: true,
|
||||
authChoice: "synthetic-api-key",
|
||||
syntheticApiKey: "sk-synthetic-test",
|
||||
}),
|
||||
runtime,
|
||||
);
|
||||
});
|
||||
|
||||
it("passes zai api key to onboard", async () => {
|
||||
const program = buildProgram();
|
||||
await program.parseAsync(
|
||||
|
||||
@@ -264,7 +264,7 @@ export function buildProgram() {
|
||||
.option("--mode <mode>", "Wizard mode: local|remote")
|
||||
.option(
|
||||
"--auth-choice <choice>",
|
||||
"Auth: setup-token|claude-cli|token|openai-codex|openai-api-key|openrouter-api-key|moonshot-api-key|codex-cli|antigravity|gemini-api-key|zai-api-key|apiKey|minimax-api|minimax-api-lightning|opencode-zen|skip",
|
||||
"Auth: setup-token|claude-cli|token|openai-codex|openai-api-key|openrouter-api-key|moonshot-api-key|synthetic-api-key|codex-cli|antigravity|gemini-api-key|zai-api-key|apiKey|minimax-api|minimax-api-lightning|opencode-zen|skip",
|
||||
)
|
||||
.option(
|
||||
"--token-provider <id>",
|
||||
@@ -289,6 +289,7 @@ export function buildProgram() {
|
||||
.option("--gemini-api-key <key>", "Gemini API key")
|
||||
.option("--zai-api-key <key>", "Z.AI API key")
|
||||
.option("--minimax-api-key <key>", "MiniMax API key")
|
||||
.option("--synthetic-api-key <key>", "Synthetic API key")
|
||||
.option("--opencode-zen-api-key <key>", "OpenCode Zen API key")
|
||||
.option("--gateway-port <port>", "Gateway port")
|
||||
.option("--gateway-bind <mode>", "Gateway bind: loopback|lan|tailnet|auto")
|
||||
@@ -334,6 +335,7 @@ export function buildProgram() {
|
||||
| "openai-api-key"
|
||||
| "openrouter-api-key"
|
||||
| "moonshot-api-key"
|
||||
| "synthetic-api-key"
|
||||
| "codex-cli"
|
||||
| "antigravity"
|
||||
| "gemini-api-key"
|
||||
@@ -357,6 +359,7 @@ export function buildProgram() {
|
||||
geminiApiKey: opts.geminiApiKey as string | undefined,
|
||||
zaiApiKey: opts.zaiApiKey as string | undefined,
|
||||
minimaxApiKey: opts.minimaxApiKey as string | undefined,
|
||||
syntheticApiKey: opts.syntheticApiKey as string | undefined,
|
||||
opencodeZenApiKey: opts.opencodeZenApiKey as string | undefined,
|
||||
gatewayPort:
|
||||
typeof opts.gatewayPort === "string"
|
||||
|
||||
@@ -106,4 +106,16 @@ describe("buildAuthChoiceOptions", () => {
|
||||
|
||||
expect(options.some((opt) => opt.value === "moonshot-api-key")).toBe(true);
|
||||
});
|
||||
|
||||
it("includes Synthetic auth choice", () => {
|
||||
const store: AuthProfileStore = { version: 1, profiles: {} };
|
||||
const options = buildAuthChoiceOptions({
|
||||
store,
|
||||
includeSkip: false,
|
||||
includeClaudeCliIfMissing: true,
|
||||
platform: "darwin",
|
||||
});
|
||||
|
||||
expect(options.some((opt) => opt.value === "synthetic-api-key")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -20,7 +20,8 @@ export type AuthChoiceGroupId =
|
||||
| "moonshot"
|
||||
| "zai"
|
||||
| "opencode-zen"
|
||||
| "minimax";
|
||||
| "minimax"
|
||||
| "synthetic";
|
||||
|
||||
export type AuthChoiceGroup = {
|
||||
value: AuthChoiceGroupId;
|
||||
@@ -53,6 +54,12 @@ const AUTH_CHOICE_GROUP_DEFS: {
|
||||
hint: "M2.1 (recommended)",
|
||||
choices: ["minimax-api", "minimax-api-lightning"],
|
||||
},
|
||||
{
|
||||
value: "synthetic",
|
||||
label: "Synthetic",
|
||||
hint: "Anthropic-compatible (multi-model)",
|
||||
choices: ["synthetic-api-key"],
|
||||
},
|
||||
{
|
||||
value: "google",
|
||||
label: "Google",
|
||||
@@ -167,6 +174,7 @@ export function buildAuthChoiceOptions(params: {
|
||||
options.push({ value: "openai-api-key", label: "OpenAI API key" });
|
||||
options.push({ value: "openrouter-api-key", label: "OpenRouter API key" });
|
||||
options.push({ value: "moonshot-api-key", label: "Moonshot AI API key" });
|
||||
options.push({ value: "synthetic-api-key", label: "Synthetic API key" });
|
||||
options.push({
|
||||
value: "antigravity",
|
||||
label: "Google Antigravity (Claude Opus 4.5, Gemini 3, etc.)",
|
||||
|
||||
@@ -102,6 +102,67 @@ describe("applyAuthChoice", () => {
|
||||
expect(parsed.profiles?.["minimax:default"]?.key).toBe("sk-minimax-test");
|
||||
});
|
||||
|
||||
it("prompts and writes Synthetic API key when selecting synthetic-api-key", async () => {
|
||||
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-auth-"));
|
||||
process.env.CLAWDBOT_STATE_DIR = tempStateDir;
|
||||
process.env.CLAWDBOT_AGENT_DIR = path.join(tempStateDir, "agent");
|
||||
process.env.PI_CODING_AGENT_DIR = process.env.CLAWDBOT_AGENT_DIR;
|
||||
|
||||
const text = vi.fn().mockResolvedValue("sk-synthetic-test");
|
||||
const select: WizardPrompter["select"] = vi.fn(
|
||||
async (params) => params.options[0]?.value as never,
|
||||
);
|
||||
const multiselect: WizardPrompter["multiselect"] = vi.fn(async () => []);
|
||||
const prompter: WizardPrompter = {
|
||||
intro: vi.fn(noopAsync),
|
||||
outro: vi.fn(noopAsync),
|
||||
note: vi.fn(noopAsync),
|
||||
select,
|
||||
multiselect,
|
||||
text,
|
||||
confirm: vi.fn(async () => false),
|
||||
progress: vi.fn(() => ({ update: noop, stop: noop })),
|
||||
};
|
||||
const runtime: RuntimeEnv = {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
exit: vi.fn((code: number) => {
|
||||
throw new Error(`exit:${code}`);
|
||||
}),
|
||||
};
|
||||
|
||||
const result = await applyAuthChoice({
|
||||
authChoice: "synthetic-api-key",
|
||||
config: {},
|
||||
prompter,
|
||||
runtime,
|
||||
setDefaultModel: true,
|
||||
});
|
||||
|
||||
expect(text).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ message: "Enter Synthetic API key" }),
|
||||
);
|
||||
expect(result.config.auth?.profiles?.["synthetic:default"]).toMatchObject({
|
||||
provider: "synthetic",
|
||||
mode: "api_key",
|
||||
});
|
||||
|
||||
const authProfilePath = path.join(
|
||||
tempStateDir,
|
||||
"agents",
|
||||
"main",
|
||||
"agent",
|
||||
"auth-profiles.json",
|
||||
);
|
||||
const raw = await fs.readFile(authProfilePath, "utf8");
|
||||
const parsed = JSON.parse(raw) as {
|
||||
profiles?: Record<string, { key?: string }>;
|
||||
};
|
||||
expect(parsed.profiles?.["synthetic:default"]?.key).toBe(
|
||||
"sk-synthetic-test",
|
||||
);
|
||||
});
|
||||
|
||||
it("sets default model when selecting github-copilot", async () => {
|
||||
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-auth-"));
|
||||
process.env.CLAWDBOT_STATE_DIR = tempStateDir;
|
||||
|
||||
@@ -43,14 +43,18 @@ import {
|
||||
applyMinimaxHostedConfig,
|
||||
applyMinimaxHostedProviderConfig,
|
||||
applyMinimaxProviderConfig,
|
||||
applySyntheticConfig,
|
||||
applySyntheticProviderConfig,
|
||||
applyOpencodeZenConfig,
|
||||
applyOpencodeZenProviderConfig,
|
||||
applyZaiConfig,
|
||||
MINIMAX_HOSTED_MODEL_REF,
|
||||
SYNTHETIC_DEFAULT_MODEL_REF,
|
||||
setAnthropicApiKey,
|
||||
setGeminiApiKey,
|
||||
setMinimaxApiKey,
|
||||
setOpencodeZenApiKey,
|
||||
setSyntheticApiKey,
|
||||
setZaiApiKey,
|
||||
writeOAuthCredentials,
|
||||
ZAI_DEFAULT_MODEL_REF,
|
||||
@@ -641,6 +645,28 @@ export async function applyAuthChoice(params: {
|
||||
agentModelOverride = ZAI_DEFAULT_MODEL_REF;
|
||||
await noteAgentModel(ZAI_DEFAULT_MODEL_REF);
|
||||
}
|
||||
} else if (params.authChoice === "synthetic-api-key") {
|
||||
const key = await params.prompter.text({
|
||||
message: "Enter Synthetic API key",
|
||||
validate: (value) => (value?.trim() ? undefined : "Required"),
|
||||
});
|
||||
await setSyntheticApiKey(String(key).trim(), params.agentDir);
|
||||
nextConfig = applyAuthProfileConfig(nextConfig, {
|
||||
profileId: "synthetic:default",
|
||||
provider: "synthetic",
|
||||
mode: "api_key",
|
||||
});
|
||||
if (params.setDefaultModel) {
|
||||
nextConfig = applySyntheticConfig(nextConfig);
|
||||
await params.prompter.note(
|
||||
`Default model set to ${SYNTHETIC_DEFAULT_MODEL_REF}`,
|
||||
"Model configured",
|
||||
);
|
||||
} else {
|
||||
nextConfig = applySyntheticProviderConfig(nextConfig);
|
||||
agentModelOverride = SYNTHETIC_DEFAULT_MODEL_REF;
|
||||
await noteAgentModel(SYNTHETIC_DEFAULT_MODEL_REF);
|
||||
}
|
||||
} else if (params.authChoice === "apiKey") {
|
||||
const key = await params.prompter.text({
|
||||
message: "Enter Anthropic API key",
|
||||
@@ -805,6 +831,8 @@ export function resolvePreferredProviderForAuthChoice(
|
||||
return "google";
|
||||
case "antigravity":
|
||||
return "google-antigravity";
|
||||
case "synthetic-api-key":
|
||||
return "synthetic";
|
||||
case "minimax-cloud":
|
||||
case "minimax-api":
|
||||
return "minimax";
|
||||
|
||||
@@ -727,6 +727,7 @@ export async function modelsStatusCommand(
|
||||
"openrouter",
|
||||
"zai",
|
||||
"mistral",
|
||||
"synthetic",
|
||||
];
|
||||
for (const provider of envProbeProviders) {
|
||||
if (resolveEnvApiKey(provider)) providersFromEnv.add(provider);
|
||||
|
||||
@@ -9,11 +9,15 @@ import {
|
||||
applyAuthProfileConfig,
|
||||
applyMinimaxApiConfig,
|
||||
applyMinimaxApiProviderConfig,
|
||||
applySyntheticConfig,
|
||||
applySyntheticProviderConfig,
|
||||
applyOpencodeZenConfig,
|
||||
applyOpencodeZenProviderConfig,
|
||||
applyOpenrouterConfig,
|
||||
applyOpenrouterProviderConfig,
|
||||
OPENROUTER_DEFAULT_MODEL_REF,
|
||||
SYNTHETIC_DEFAULT_MODEL_ID,
|
||||
SYNTHETIC_DEFAULT_MODEL_REF,
|
||||
writeOAuthCredentials,
|
||||
} from "./onboard-auth.js";
|
||||
|
||||
@@ -260,6 +264,56 @@ describe("applyMinimaxApiProviderConfig", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("applySyntheticConfig", () => {
|
||||
it("adds synthetic provider with correct settings", () => {
|
||||
const cfg = applySyntheticConfig({});
|
||||
expect(cfg.models?.providers?.synthetic).toMatchObject({
|
||||
baseUrl: "https://api.synthetic.new/anthropic",
|
||||
api: "anthropic-messages",
|
||||
});
|
||||
});
|
||||
|
||||
it("sets correct primary model", () => {
|
||||
const cfg = applySyntheticConfig({});
|
||||
expect(cfg.agents?.defaults?.model?.primary).toBe(
|
||||
SYNTHETIC_DEFAULT_MODEL_REF,
|
||||
);
|
||||
});
|
||||
|
||||
it("merges existing synthetic provider models", () => {
|
||||
const cfg = applySyntheticProviderConfig({
|
||||
models: {
|
||||
providers: {
|
||||
synthetic: {
|
||||
baseUrl: "https://old.example.com",
|
||||
apiKey: "old-key",
|
||||
api: "openai-completions",
|
||||
models: [
|
||||
{
|
||||
id: "old-model",
|
||||
name: "Old",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: { input: 1, output: 2, cacheRead: 0, cacheWrite: 0 },
|
||||
contextWindow: 1000,
|
||||
maxTokens: 100,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(cfg.models?.providers?.synthetic?.baseUrl).toBe(
|
||||
"https://api.synthetic.new/anthropic",
|
||||
);
|
||||
expect(cfg.models?.providers?.synthetic?.api).toBe("anthropic-messages");
|
||||
expect(cfg.models?.providers?.synthetic?.apiKey).toBe("old-key");
|
||||
const ids = cfg.models?.providers?.synthetic?.models.map((m) => m.id);
|
||||
expect(ids).toContain("old-model");
|
||||
expect(ids).toContain(SYNTHETIC_DEFAULT_MODEL_ID);
|
||||
});
|
||||
});
|
||||
|
||||
describe("applyOpencodeZenProviderConfig", () => {
|
||||
it("adds allowlist entry for the default model", () => {
|
||||
const cfg = applyOpencodeZenProviderConfig({});
|
||||
|
||||
@@ -16,6 +16,9 @@ export const MOONSHOT_DEFAULT_MODEL_ID = "kimi-k2-0905-preview";
|
||||
const MOONSHOT_DEFAULT_CONTEXT_WINDOW = 256000;
|
||||
const MOONSHOT_DEFAULT_MAX_TOKENS = 8192;
|
||||
export const MOONSHOT_DEFAULT_MODEL_REF = `moonshot/${MOONSHOT_DEFAULT_MODEL_ID}`;
|
||||
const SYNTHETIC_BASE_URL = "https://api.synthetic.new/anthropic";
|
||||
export const SYNTHETIC_DEFAULT_MODEL_ID = "hf:MiniMaxAI/MiniMax-M2.1";
|
||||
export const SYNTHETIC_DEFAULT_MODEL_REF = `synthetic/${SYNTHETIC_DEFAULT_MODEL_ID}`;
|
||||
// Pricing: MiniMax doesn't publish public rates. Override in models.json for accurate costs.
|
||||
const MINIMAX_API_COST = {
|
||||
input: 15,
|
||||
@@ -41,6 +44,175 @@ const MOONSHOT_DEFAULT_COST = {
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
};
|
||||
const SYNTHETIC_DEFAULT_COST = {
|
||||
input: 0,
|
||||
output: 0,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
};
|
||||
|
||||
const SYNTHETIC_MODEL_CATALOG = [
|
||||
{
|
||||
id: SYNTHETIC_DEFAULT_MODEL_ID,
|
||||
name: "MiniMax M2.1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 192000,
|
||||
maxTokens: 65536,
|
||||
},
|
||||
{
|
||||
id: "hf:moonshotai/Kimi-K2-Thinking",
|
||||
name: "Kimi K2 Thinking",
|
||||
reasoning: true,
|
||||
input: ["text"],
|
||||
contextWindow: 256000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:zai-org/GLM-4.7",
|
||||
name: "GLM-4.7",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 198000,
|
||||
maxTokens: 128000,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-R1-0528",
|
||||
name: "DeepSeek R1 0528",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-V3-0324",
|
||||
name: "DeepSeek V3 0324",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-V3.1",
|
||||
name: "DeepSeek V3.1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
|
||||
name: "DeepSeek V3.1 Terminus",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-V3.2",
|
||||
name: "DeepSeek V3.2",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 159000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:meta-llama/Llama-3.3-70B-Instruct",
|
||||
name: "Llama 3.3 70B Instruct",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
|
||||
name: "Llama 4 Maverick 17B 128E Instruct FP8",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 524000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:MiniMaxAI/MiniMax-M2",
|
||||
name: "MiniMax M2",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 192000,
|
||||
maxTokens: 65536,
|
||||
},
|
||||
{
|
||||
id: "hf:moonshotai/Kimi-K2-Instruct-0905",
|
||||
name: "Kimi K2 Instruct 0905",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 256000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:openai/gpt-oss-120b",
|
||||
name: "GPT OSS 120B",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:Qwen/Qwen3-235B-A22B-Instruct-2507",
|
||||
name: "Qwen3 235B A22B Instruct 2507",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 256000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
|
||||
name: "Qwen3 Coder 480B A35B Instruct",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 256000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:Qwen/Qwen3-VL-235B-A22B-Instruct",
|
||||
name: "Qwen3 VL 235B A22B Instruct",
|
||||
reasoning: false,
|
||||
input: ["text", "image"],
|
||||
contextWindow: 250000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:zai-org/GLM-4.5",
|
||||
name: "GLM-4.5",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 128000,
|
||||
},
|
||||
{
|
||||
id: "hf:zai-org/GLM-4.6",
|
||||
name: "GLM-4.6",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 198000,
|
||||
maxTokens: 128000,
|
||||
},
|
||||
{
|
||||
id: "hf:deepseek-ai/DeepSeek-V3",
|
||||
name: "DeepSeek V3",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
contextWindow: 128000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
{
|
||||
id: "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
|
||||
name: "Qwen3 235B A22B Thinking 2507",
|
||||
reasoning: true,
|
||||
input: ["text"],
|
||||
contextWindow: 256000,
|
||||
maxTokens: 8192,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const MINIMAX_MODEL_CATALOG = {
|
||||
"MiniMax-M2.1": { name: "MiniMax M2.1", reasoning: false },
|
||||
@@ -97,6 +269,22 @@ function buildMoonshotModelDefinition(): ModelDefinitionConfig {
|
||||
};
|
||||
}
|
||||
|
||||
type SyntheticCatalogEntry = (typeof SYNTHETIC_MODEL_CATALOG)[number];
|
||||
|
||||
function buildSyntheticModelDefinition(
|
||||
entry: SyntheticCatalogEntry,
|
||||
): ModelDefinitionConfig {
|
||||
return {
|
||||
id: entry.id,
|
||||
name: entry.name,
|
||||
reasoning: entry.reasoning,
|
||||
input: [...entry.input],
|
||||
cost: SYNTHETIC_DEFAULT_COST,
|
||||
contextWindow: entry.contextWindow,
|
||||
maxTokens: entry.maxTokens,
|
||||
};
|
||||
}
|
||||
|
||||
export async function writeOAuthCredentials(
|
||||
provider: OAuthProvider,
|
||||
creds: OAuthCredentials,
|
||||
@@ -166,6 +354,19 @@ export async function setMoonshotApiKey(key: string, agentDir?: string) {
|
||||
});
|
||||
}
|
||||
|
||||
export async function setSyntheticApiKey(key: string, agentDir?: string) {
|
||||
// Write to the multi-agent path so gateway finds credentials on startup
|
||||
upsertAuthProfile({
|
||||
profileId: "synthetic:default",
|
||||
credential: {
|
||||
type: "api_key",
|
||||
provider: "synthetic",
|
||||
key,
|
||||
},
|
||||
agentDir: agentDir ?? resolveDefaultAgentDir(),
|
||||
});
|
||||
}
|
||||
|
||||
export const ZAI_DEFAULT_MODEL_REF = "zai/glm-4.7";
|
||||
export const OPENROUTER_DEFAULT_MODEL_REF = "openrouter/auto";
|
||||
|
||||
@@ -343,6 +544,83 @@ export function applyMoonshotConfig(cfg: ClawdbotConfig): ClawdbotConfig {
|
||||
};
|
||||
}
|
||||
|
||||
export function applySyntheticProviderConfig(
|
||||
cfg: ClawdbotConfig,
|
||||
): ClawdbotConfig {
|
||||
const models = { ...cfg.agents?.defaults?.models };
|
||||
models[SYNTHETIC_DEFAULT_MODEL_REF] = {
|
||||
...models[SYNTHETIC_DEFAULT_MODEL_REF],
|
||||
alias:
|
||||
models[SYNTHETIC_DEFAULT_MODEL_REF]?.alias ?? "MiniMax M2.1",
|
||||
};
|
||||
|
||||
const providers = { ...cfg.models?.providers };
|
||||
const existingProvider = providers.synthetic;
|
||||
const existingModels = Array.isArray(existingProvider?.models)
|
||||
? existingProvider.models
|
||||
: [];
|
||||
const syntheticModels = SYNTHETIC_MODEL_CATALOG.map(
|
||||
buildSyntheticModelDefinition,
|
||||
);
|
||||
const mergedModels = [
|
||||
...existingModels,
|
||||
...syntheticModels.filter(
|
||||
(model) => !existingModels.some((existing) => existing.id === model.id),
|
||||
),
|
||||
];
|
||||
const { apiKey: existingApiKey, ...existingProviderRest } =
|
||||
(existingProvider ?? {}) as Record<string, unknown> as { apiKey?: string };
|
||||
const resolvedApiKey =
|
||||
typeof existingApiKey === "string" ? existingApiKey : undefined;
|
||||
const normalizedApiKey = resolvedApiKey?.trim();
|
||||
providers.synthetic = {
|
||||
...existingProviderRest,
|
||||
baseUrl: SYNTHETIC_BASE_URL,
|
||||
api: "anthropic-messages",
|
||||
...(normalizedApiKey ? { apiKey: normalizedApiKey } : {}),
|
||||
models: mergedModels.length > 0 ? mergedModels : syntheticModels,
|
||||
};
|
||||
|
||||
return {
|
||||
...cfg,
|
||||
agents: {
|
||||
...cfg.agents,
|
||||
defaults: {
|
||||
...cfg.agents?.defaults,
|
||||
models,
|
||||
},
|
||||
},
|
||||
models: {
|
||||
mode: cfg.models?.mode ?? "merge",
|
||||
providers,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function applySyntheticConfig(cfg: ClawdbotConfig): ClawdbotConfig {
|
||||
const next = applySyntheticProviderConfig(cfg);
|
||||
const existingModel = next.agents?.defaults?.model;
|
||||
return {
|
||||
...next,
|
||||
agents: {
|
||||
...next.agents,
|
||||
defaults: {
|
||||
...next.agents?.defaults,
|
||||
model: {
|
||||
...(existingModel &&
|
||||
"fallbacks" in (existingModel as Record<string, unknown>)
|
||||
? {
|
||||
fallbacks: (existingModel as { fallbacks?: string[] })
|
||||
.fallbacks,
|
||||
}
|
||||
: undefined),
|
||||
primary: SYNTHETIC_DEFAULT_MODEL_REF,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function applyAuthProfileConfig(
|
||||
cfg: ClawdbotConfig,
|
||||
params: {
|
||||
|
||||
@@ -41,6 +41,7 @@ import {
|
||||
applyMoonshotConfig,
|
||||
applyOpencodeZenConfig,
|
||||
applyOpenrouterConfig,
|
||||
applySyntheticConfig,
|
||||
applyZaiConfig,
|
||||
setAnthropicApiKey,
|
||||
setGeminiApiKey,
|
||||
@@ -48,6 +49,7 @@ import {
|
||||
setMoonshotApiKey,
|
||||
setOpencodeZenApiKey,
|
||||
setOpenrouterApiKey,
|
||||
setSyntheticApiKey,
|
||||
setZaiApiKey,
|
||||
} from "./onboard-auth.js";
|
||||
import {
|
||||
@@ -316,6 +318,25 @@ export async function runNonInteractiveOnboarding(
|
||||
mode: "api_key",
|
||||
});
|
||||
nextConfig = applyMoonshotConfig(nextConfig);
|
||||
} else if (authChoice === "synthetic-api-key") {
|
||||
const resolved = await resolveNonInteractiveApiKey({
|
||||
provider: "synthetic",
|
||||
cfg: baseConfig,
|
||||
flagValue: opts.syntheticApiKey,
|
||||
flagName: "--synthetic-api-key",
|
||||
envVar: "SYNTHETIC_API_KEY",
|
||||
runtime,
|
||||
});
|
||||
if (!resolved) return;
|
||||
if (resolved.source !== "profile") {
|
||||
await setSyntheticApiKey(resolved.key);
|
||||
}
|
||||
nextConfig = applyAuthProfileConfig(nextConfig, {
|
||||
profileId: "synthetic:default",
|
||||
provider: "synthetic",
|
||||
mode: "api_key",
|
||||
});
|
||||
nextConfig = applySyntheticConfig(nextConfig);
|
||||
} else if (
|
||||
authChoice === "minimax-cloud" ||
|
||||
authChoice === "minimax-api" ||
|
||||
|
||||
@@ -12,6 +12,7 @@ export type AuthChoice =
|
||||
| "openai-api-key"
|
||||
| "openrouter-api-key"
|
||||
| "moonshot-api-key"
|
||||
| "synthetic-api-key"
|
||||
| "codex-cli"
|
||||
| "antigravity"
|
||||
| "apiKey"
|
||||
@@ -53,6 +54,7 @@ export type OnboardOptions = {
|
||||
geminiApiKey?: string;
|
||||
zaiApiKey?: string;
|
||||
minimaxApiKey?: string;
|
||||
syntheticApiKey?: string;
|
||||
opencodeZenApiKey?: string;
|
||||
gatewayPort?: number;
|
||||
gatewayBind?: GatewayBind;
|
||||
|
||||
@@ -49,6 +49,7 @@ const SHELL_ENV_EXPECTED_KEYS = [
|
||||
"ZAI_API_KEY",
|
||||
"OPENROUTER_API_KEY",
|
||||
"MINIMAX_API_KEY",
|
||||
"SYNTHETIC_API_KEY",
|
||||
"ELEVENLABS_API_KEY",
|
||||
"TELEGRAM_BOT_TOKEN",
|
||||
"DISCORD_BOT_TOKEN",
|
||||
|
||||
Reference in New Issue
Block a user