Merge branch 'main' into fix/imessage-groupish-threads

This commit is contained in:
Peter Steinberger
2026-01-09 17:36:15 +00:00
committed by GitHub
19 changed files with 849 additions and 224 deletions

View File

@@ -6,12 +6,15 @@
- Node bridge: harden keepalive + heartbeat handling (TCP keepalive, better disconnects, and keepalive config tests). (#577) — thanks @steipete - Node bridge: harden keepalive + heartbeat handling (TCP keepalive, better disconnects, and keepalive config tests). (#577) — thanks @steipete
- Control UI: improve mobile responsiveness. (#558) — thanks @carlulsoe - Control UI: improve mobile responsiveness. (#558) — thanks @carlulsoe
- CLI: add `sandbox list` and `sandbox recreate` commands for managing Docker sandbox containers after image/config updates. (#563) — thanks @pasogott - CLI: add `sandbox list` and `sandbox recreate` commands for managing Docker sandbox containers after image/config updates. (#563) — thanks @pasogott
- Docs: add Hetzner Docker VPS guide. (#556) — thanks @Iamadig
- Docs: link Hetzner guide from install + platforms docs. (#592) — thanks @steipete
- Providers: add Microsoft Teams provider with polling, attachments, and CLI send support. (#404) — thanks @onutc - Providers: add Microsoft Teams provider with polling, attachments, and CLI send support. (#404) — thanks @onutc
- Slack: honor reply tags + replyToMode while keeping threaded replies in-thread. (#574) — thanks @bolismauro - Slack: honor reply tags + replyToMode while keeping threaded replies in-thread. (#574) — thanks @bolismauro
- Discord: avoid category parent overrides for channel allowlists and refactor thread context helpers. (#588) — thanks @steipete - Discord: avoid category parent overrides for channel allowlists and refactor thread context helpers. (#588) — thanks @steipete
- Discord: fix forum thread starters and cache channel lookups for thread context. (#585) — thanks @thewilloftheshadow - Discord: fix forum thread starters and cache channel lookups for thread context. (#585) — thanks @thewilloftheshadow
- Commands: accept /models as an alias for /model. - Commands: accept /models as an alias for /model.
- Commands: add `/usage` as an alias for `/status`. (#492) — thanks @lc0rp - Commands: add `/usage` as an alias for `/status`. (#492) — thanks @lc0rp
- Models/Auth: add MiniMax Anthropic-compatible API onboarding (minimax-api). (#590) — thanks @mneves75
- Commands: harden slash command registry and list text-only commands in `/commands`. - Commands: harden slash command registry and list text-only commands in `/commands`.
- Models/Auth: show per-agent auth candidates in `/model status`, and add `clawdbot models auth order {get,set,clear}` (per-agent auth rotation overrides). — thanks @steipete - Models/Auth: show per-agent auth candidates in `/model status`, and add `clawdbot models auth order {get,set,clear}` (per-agent auth rotation overrides). — thanks @steipete
- Debugging: add raw model stream logging flags and document gateway watch mode. - Debugging: add raw model stream logging flags and document gateway watch mode.
@@ -28,6 +31,7 @@
- Security: per-agent mention patterns and group elevated directives now require explicit mention to avoid cross-agent toggles. - Security: per-agent mention patterns and group elevated directives now require explicit mention to avoid cross-agent toggles.
- Config: support inline env vars in config (`env.*` / `env.vars`) and document env precedence. - Config: support inline env vars in config (`env.*` / `env.vars`) and document env precedence.
- Agent: enable adaptive context pruning by default for tool-result trimming. - Agent: enable adaptive context pruning by default for tool-result trimming.
- Agent: drop empty error assistant messages when sanitizing session history. (#591) — thanks @steipete
- Doctor: check config/state permissions and offer to tighten them. — thanks @steipete - Doctor: check config/state permissions and offer to tighten them. — thanks @steipete
- Doctor/Daemon: audit supervisor configs, add --repair/--force flows, surface service config audits in daemon status, and document user vs system services. — thanks @steipete - Doctor/Daemon: audit supervisor configs, add --repair/--force flows, surface service config audits in daemon status, and document user vs system services. — thanks @steipete
- Doctor: repair gateway service entrypoint when switching between npm and git installs; add Docker e2e coverage. — thanks @steipete - Doctor: repair gateway service entrypoint when switching between npm and git installs; add Docker e2e coverage. — thanks @steipete

View File

@@ -331,7 +331,11 @@ actor MacNodeBridgeSession {
let now = self.clock.now let now = self.clock.now
if now > last.advanced(by: timeout) { if now > last.advanced(by: timeout) {
let age = last.duration(to: now) let age = last.duration(to: now)
self.logger.warning("Node bridge heartbeat timed out; disconnecting (age: \(String(describing: age), privacy: .public)).") let ageDescription = String(describing: age)
let message =
"Node bridge heartbeat timed out; disconnecting " +
"(age: \(ageDescription, privacy: .public))."
self.logger.warning(message)
await self.disconnect() await self.disconnect()
return return
} }
@@ -341,7 +345,11 @@ actor MacNodeBridgeSession {
do { do {
try await self.send(BridgePing(type: "ping", id: id)) try await self.send(BridgePing(type: "ping", id: id))
} catch { } catch {
self.logger.warning("Node bridge ping send failed; disconnecting (error: \(String(describing: error), privacy: .public)).") let errorDescription = String(describing: error)
let message =
"Node bridge ping send failed; disconnecting " +
"(error: \(errorDescription, privacy: .public))."
self.logger.warning(message)
await self.disconnect() await self.disconnect()
return return
} }
@@ -356,7 +364,11 @@ actor MacNodeBridgeSession {
private func handleConnectionState(_ state: NWConnection.State) async { private func handleConnectionState(_ state: NWConnection.State) async {
switch state { switch state {
case let .failed(error): case let .failed(error):
self.logger.warning("Node bridge connection failed; disconnecting (error: \(String(describing: error), privacy: .public)).") let errorDescription = String(describing: error)
let message =
"Node bridge connection failed; disconnecting " +
"(error: \(errorDescription, privacy: .public))."
self.logger.warning(message)
await self.disconnect() await self.disconnect()
case .cancelled: case .cancelled:
self.logger.warning("Node bridge connection cancelled; disconnecting.") self.logger.warning("Node bridge connection cancelled; disconnecting.")

View File

@@ -679,6 +679,7 @@
"platforms/android", "platforms/android",
"platforms/windows", "platforms/windows",
"platforms/linux", "platforms/linux",
"platforms/hetzner",
"platforms/exe-dev" "platforms/exe-dev"
] ]
}, },

View File

@@ -1464,6 +1464,67 @@ Notes:
- Responses API enables clean reasoning/output separation; WhatsApp sees only final text. - Responses API enables clean reasoning/output separation; WhatsApp sees only final text.
- Adjust `contextWindow`/`maxTokens` if your LM Studio context length differs. - Adjust `contextWindow`/`maxTokens` if your LM Studio context length differs.
### MiniMax API (platform.minimax.io)
Use MiniMax's Anthropic-compatible API directly without LM Studio:
```json5
{
agent: {
model: { primary: "minimax/MiniMax-M2.1" },
models: {
"anthropic/claude-opus-4-5": { alias: "Opus" },
"minimax/MiniMax-M2.1": { alias: "Minimax" }
}
},
models: {
mode: "merge",
providers: {
minimax: {
baseUrl: "https://api.minimax.io/anthropic",
apiKey: "${MINIMAX_API_KEY}",
api: "anthropic-messages",
models: [
{
id: "MiniMax-M2.1",
name: "MiniMax M2.1",
reasoning: false,
input: ["text"],
// Pricing: MiniMax doesn't publish public rates. Override in models.json for accurate costs.
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000,
maxTokens: 8192
},
{
id: "MiniMax-M2.1-lightning",
name: "MiniMax M2.1 Lightning",
reasoning: false,
input: ["text"],
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000,
maxTokens: 8192
},
{
id: "MiniMax-M2",
name: "MiniMax M2",
reasoning: true,
input: ["text"],
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000,
maxTokens: 8192
}
]
}
}
}
}
```
Notes:
- Set `MINIMAX_API_KEY` environment variable or use `clawdbot onboard --auth-choice minimax-api`
- Available models: `MiniMax-M2.1` (default), `MiniMax-M2.1-lightning` (~100 tps), `MiniMax-M2` (reasoning)
- Pricing is a placeholder; MiniMax doesn't publish public rates. Override in `models.json` for accurate cost tracking.
Notes: Notes:
- Supported APIs: `openai-completions`, `openai-responses`, `anthropic-messages`, - Supported APIs: `openai-completions`, `openai-responses`, `anthropic-messages`,
`google-generative-ai` `google-generative-ai`

View File

@@ -51,6 +51,8 @@ It writes config/workspace on the host:
- `~/.clawdbot/` - `~/.clawdbot/`
- `~/clawd` - `~/clawd`
Running on a VPS? See [Hetzner (Docker VPS)](/platforms/hetzner).
### Manual flow (compose) ### Manual flow (compose)
```bash ```bash

296
docs/platforms/hetzner.md Normal file
View File

@@ -0,0 +1,296 @@
---
summary: "Run Clawdbot Gateway on Hetzner (Docker + VPS) with durable state and baked-in binaries"
read_when:
- You want a production-grade, always-on Gateway on your own VPS
- You want full control over persistence, binaries, and restart behavior
- You are running Clawdbot in Docker on Hetzner or a similar provider
---
# Clawdbot on Hetzner (Docker, Production VPS Guide)
## Goal
Run a persistent Clawdbot Gateway on a Hetzner VPS using Docker, with durable state, baked-in binaries, and safe restart behavior.
The Gateway can be accessed via:
- SSH port forwarding from your laptop
- Direct port exposure if you manage firewalling and tokens yourself
This guide assumes Ubuntu or Debian on Hetzner.
If you are on another Linux VPS, map packages accordingly.
For the generic Docker flow, see [Docker](/install/docker).
---
## Quick path (experienced operators)
1) Provision Hetzner VPS
2) Install Docker
3) Clone Clawdbot repository
4) Create persistent host directories
5) Configure `.env` and `docker-compose.yml`
6) Bake required binaries into the image
7) `docker compose up -d`
8) Verify persistence and Gateway access
---
## What you need
- Hetzner VPS with root access
- SSH access from your laptop
- Docker and Docker Compose
- Model auth credentials
- Optional provider credentials
- WhatsApp QR
- Telegram bot token
- Gmail OAuth
---
## 1) Provision the VPS
Create an Ubuntu or Debian VPS in Hetzner.
Connect as root:
```bash
ssh root@YOUR_VPS_IP
```
This guide assumes the VPS is stateful.
Do not treat it as disposable infrastructure.
---
## 2) Install Docker (on the VPS)
```bash
apt-get update
apt-get install -y git curl ca-certificates
curl -fsSL https://get.docker.com | sh
```
Verify:
```bash
docker --version
docker compose version
```
---
## 3) Clone the Clawdbot repository
```bash
git clone https://github.com/clawdbot/clawdbot.git
cd clawdbot
```
This guide assumes you will build a custom image to guarantee binary persistence.
---
## 4) Create persistent host directories
Docker containers are ephemeral.
All long-lived state must live on the host.
```bash
mkdir -p /root/.clawdbot
mkdir -p /root/clawd
# Set ownership to the container user (uid 1000):
chown -R 1000:1000 /root/.clawdbot
chown -R 1000:1000 /root/clawd
```
---
## 5) Configure environment variables
Create `.env` in the repository root.
```bash
CLAWDBOT_IMAGE=clawdbot:latest
CLAWDBOT_GATEWAY_TOKEN=change-me-now
CLAWDBOT_GATEWAY_BIND=lan
CLAWDBOT_GATEWAY_PORT=18789
CLAWDBOT_BRIDGE_PORT=18790
CLAWDBOT_CONFIG_DIR=/root/.clawdbot
CLAWDBOT_WORKSPACE_DIR=/root/clawd
GOG_KEYRING_PASSWORD=change-me-now
XDG_CONFIG_HOME=/home/node/.clawdbot
```
**Do not commit this file.**
---
## 6) Docker Compose configuration
Create or update `docker-compose.yml`.
```yaml
services:
clawdbot-gateway:
image: ${CLAWDBOT_IMAGE}
build: .
restart: unless-stopped
env_file:
- .env
environment:
- HOME=/home/node
- NODE_ENV=production
- TERM=xterm-256color
- CLAWDBOT_GATEWAY_BIND=${CLAWDBOT_GATEWAY_BIND}
- CLAWDBOT_GATEWAY_PORT=${CLAWDBOT_GATEWAY_PORT}
- CLAWDBOT_BRIDGE_PORT=${CLAWDBOT_BRIDGE_PORT}
- CLAWDBOT_GATEWAY_TOKEN=${CLAWDBOT_GATEWAY_TOKEN}
- GOG_KEYRING_PASSWORD=${GOG_KEYRING_PASSWORD}
- XDG_CONFIG_HOME=${XDG_CONFIG_HOME}
- PATH=/home/linuxbrew/.linuxbrew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
volumes:
- ${CLAWDBOT_CONFIG_DIR}:/home/node/.clawdbot
- ${CLAWDBOT_WORKSPACE_DIR}:/home/node/clawd
ports:
- "${CLAWDBOT_GATEWAY_PORT}:18789"
- "${CLAWDBOT_BRIDGE_PORT}:18790"
- "18793:18793"
command:
[
"node",
"dist/index.js",
"gateway-daemon",
"--bind",
"${CLAWDBOT_GATEWAY_BIND}",
"--port",
"${CLAWDBOT_GATEWAY_PORT}"
]
```
---
## 7) Bake required binaries into the image (critical)
Installing binaries inside a running container is a trap.
Anything installed at runtime will be lost on restart.
All external binaries required by skills must be installed at image build time.
The examples below show three common binaries only:
- `gog` for Gmail access
- `goplaces` for Google Places
- `wacli` for WhatsApp
These are examples, not a complete list.
You may install as many binaries as needed using the same pattern.
If you add new skills later that depend on additional binaries, you must:
1. Update the Dockerfile
2. Rebuild the image
3. Restart the containers
**Example Dockerfile**
```dockerfile
FROM node:22-bookworm
RUN apt-get update && apt-get install -y socat && rm -rf /var/lib/apt/lists/*
# Example binary 1: Gmail CLI
RUN curl -L https://github.com/steipete/gog/releases/latest/download/gog_Linux_x86_64.tar.gz \
| tar -xz -C /usr/local/bin && chmod +x /usr/local/bin/gog
# Example binary 2: Google Places CLI
RUN curl -L https://github.com/steipete/goplaces/releases/latest/download/goplaces_Linux_x86_64.tar.gz \
| tar -xz -C /usr/local/bin && chmod +x /usr/local/bin/goplaces
# Example binary 3: WhatsApp CLI
RUN curl -L https://github.com/steipete/wacli/releases/latest/download/wacli_Linux_x86_64.tar.gz \
| tar -xz -C /usr/local/bin && chmod +x /usr/local/bin/wacli
# Add more binaries below using the same pattern
WORKDIR /app
COPY . .
RUN corepack enable
RUN pnpm install --frozen-lockfile
RUN pnpm build
RUN pnpm ui:install
RUN pnpm ui:build
CMD ["node","dist/index.js"]
```
---
## 8) Build and launch
```bash
docker compose build
docker compose up -d clawdbot-gateway
```
Verify binaries:
```bash
docker compose exec clawdbot-gateway which gog
docker compose exec clawdbot-gateway which goplaces
docker compose exec clawdbot-gateway which wacli
```
Expected output:
```
/usr/local/bin/gog
/usr/local/bin/goplaces
/usr/local/bin/wacli
```
---
## 9) Verify Gateway
```bash
docker compose logs -f clawdbot-gateway
```
Success:
```
[gateway] listening on ws://0.0.0.0:18789
```
From your laptop:
```bash
ssh -N -L 18789:127.0.0.1:18789 root@YOUR_VPS_IP
```
Open:
`http://127.0.0.1:18789/`
Paste your gateway token.
---
## What persists where (source of truth)
Clawdbot runs in Docker, but Docker is not the source of truth.
All long-lived state must survive restarts, rebuilds, and reboots.
| Component | Location | Persistence mechanism | Notes |
|---|---|---|---|
| Gateway config | `/home/node/.clawdbot/` | Host volume mount | Includes `clawdbot.json`, tokens |
| Model auth profiles | `/home/node/.clawdbot/` | Host volume mount | OAuth tokens, API keys |
| Skill configs | `/home/node/.clawdbot/skills/` | Host volume mount | Skill-level state |
| Agent workspace | `/home/node/clawd/` | Host volume mount | Code and agent artifacts |
| WhatsApp session | `/home/node/.clawdbot/` | Host volume mount | Preserves QR login |
| Gmail keyring | `/home/node/.clawdbot/` | Host volume + password | Requires `GOG_KEYRING_PASSWORD` |
| External binaries | `/usr/local/bin/` | Docker image | Must be baked at build time |
| Node runtime | Container filesystem | Docker image | Rebuilt every image build |
| OS packages | Container filesystem | Docker image | Do not install at runtime |
| Docker container | Ephemeral | Restartable | Safe to destroy |

View File

@@ -20,6 +20,11 @@ Native companion apps for Windows are also planned; the Gateway is recommended v
- Windows: [Windows](/platforms/windows) - Windows: [Windows](/platforms/windows)
- Linux: [Linux](/platforms/linux) - Linux: [Linux](/platforms/linux)
## VPS & hosting
- Hetzner (Docker): [Hetzner](/platforms/hetzner)
- exe.dev (VM + HTTPS proxy): [exe.dev](/platforms/exe-dev)
## Common links ## Common links
- Install guide: [Getting Started](/start/getting-started) - Install guide: [Getting Started](/start/getting-started)

View File

@@ -11,6 +11,7 @@ echo "Running doctor install switch E2E..."
docker run --rm -t "$IMAGE_NAME" bash -lc ' docker run --rm -t "$IMAGE_NAME" bash -lc '
set -euo pipefail set -euo pipefail
# Stub systemd/loginctl so doctor + daemon flows work in Docker.
export PATH="/tmp/clawdbot-bin:$PATH" export PATH="/tmp/clawdbot-bin:$PATH"
mkdir -p /tmp/clawdbot-bin mkdir -p /tmp/clawdbot-bin
@@ -65,6 +66,7 @@ exit 0
LOGINCTL LOGINCTL
chmod +x /tmp/clawdbot-bin/loginctl chmod +x /tmp/clawdbot-bin/loginctl
# Install the npm-global variant from the local /app source.
npm install -g --prefix /tmp/npm-prefix /app npm install -g --prefix /tmp/npm-prefix /app
npm_bin="/tmp/npm-prefix/bin/clawdbot" npm_bin="/tmp/npm-prefix/bin/clawdbot"
@@ -88,6 +90,8 @@ LOGINCTL
fi fi
} }
# Each flow: install service with one variant, run doctor from the other,
# and verify ExecStart entrypoint switches accordingly.
run_flow() { run_flow() {
local name="$1" local name="$1"
local install_cmd="$2" local install_cmd="$2"

View File

@@ -3,7 +3,7 @@ import { describe, expect, it } from "vitest";
const MINIMAX_KEY = process.env.MINIMAX_API_KEY ?? ""; const MINIMAX_KEY = process.env.MINIMAX_API_KEY ?? "";
const MINIMAX_BASE_URL = const MINIMAX_BASE_URL =
process.env.MINIMAX_BASE_URL?.trim() || "https://api.minimax.io/v1"; process.env.MINIMAX_BASE_URL?.trim() || "https://api.minimax.io/anthropic";
const MINIMAX_MODEL = process.env.MINIMAX_MODEL?.trim() || "MiniMax-M2.1"; const MINIMAX_MODEL = process.env.MINIMAX_MODEL?.trim() || "MiniMax-M2.1";
const LIVE = process.env.MINIMAX_LIVE_TEST === "1" || process.env.LIVE === "1"; const LIVE = process.env.MINIMAX_LIVE_TEST === "1" || process.env.LIVE === "1";
@@ -11,15 +11,16 @@ const describeLive = LIVE && MINIMAX_KEY ? describe : describe.skip;
describeLive("minimax live", () => { describeLive("minimax live", () => {
it("returns assistant text", async () => { it("returns assistant text", async () => {
const model: Model<"openai-completions"> = { const model: Model<"anthropic-messages"> = {
id: MINIMAX_MODEL, id: MINIMAX_MODEL,
name: `MiniMax ${MINIMAX_MODEL}`, name: `MiniMax ${MINIMAX_MODEL}`,
api: "openai-completions", api: "anthropic-messages",
provider: "minimax", provider: "minimax",
baseUrl: MINIMAX_BASE_URL, baseUrl: MINIMAX_BASE_URL,
reasoning: false, reasoning: MINIMAX_MODEL === "MiniMax-M2",
input: ["text"], input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, // Pricing: placeholder values (per 1M tokens, multiplied by 1000 for display)
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000, contextWindow: 200000,
maxTokens: 8192, maxTokens: 8192,
}; };

View File

@@ -304,6 +304,19 @@ describe("sanitizeSessionMessagesImages", () => {
expect(out[0]?.role).toBe("user"); expect(out[0]?.role).toBe("user");
}); });
it("drops empty assistant error messages", async () => {
const input = [
{ role: "user", content: "hello" },
{ role: "assistant", stopReason: "error", content: [] },
{ role: "assistant", stopReason: "error" },
] satisfies AgentMessage[];
const out = await sanitizeSessionMessagesImages(input, "test");
expect(out).toHaveLength(1);
expect(out[0]?.role).toBe("user");
});
it("leaves non-assistant messages unchanged", async () => { it("leaves non-assistant messages unchanged", async () => {
const input = [ const input = [
{ role: "user", content: "hello" }, { role: "user", content: "hello" },

View File

@@ -61,6 +61,27 @@ export async function ensureSessionHeader(params: {
type ContentBlock = AgentToolResult<unknown>["content"][number]; type ContentBlock = AgentToolResult<unknown>["content"][number];
export function isEmptyAssistantMessageContent(
message: Extract<AgentMessage, { role: "assistant" }>,
): boolean {
const content = message.content;
if (content == null) return true;
if (!Array.isArray(content)) return false;
return content.every((block) => {
if (!block || typeof block !== "object") return true;
const rec = block as { type?: unknown; text?: unknown };
if (rec.type !== "text") return false;
return typeof rec.text !== "string" || rec.text.trim().length === 0;
});
}
function isEmptyAssistantErrorMessage(
message: Extract<AgentMessage, { role: "assistant" }>,
): boolean {
if (message.stopReason !== "error") return false;
return isEmptyAssistantMessageContent(message);
}
export async function sanitizeSessionMessagesImages( export async function sanitizeSessionMessagesImages(
messages: AgentMessage[], messages: AgentMessage[],
label: string, label: string,
@@ -101,6 +122,9 @@ export async function sanitizeSessionMessagesImages(
if (role === "assistant") { if (role === "assistant") {
const assistantMsg = msg as Extract<AgentMessage, { role: "assistant" }>; const assistantMsg = msg as Extract<AgentMessage, { role: "assistant" }>;
if (isEmptyAssistantErrorMessage(assistantMsg)) {
continue;
}
const content = assistantMsg.content; const content = assistantMsg.content;
if (Array.isArray(content)) { if (Array.isArray(content)) {
const filteredContent = content.filter((block) => { const filteredContent = content.filter((block) => {

View File

@@ -101,6 +101,10 @@ export function buildAuthChoiceOptions(params: {
// Token flow is currently Anthropic-only; use CLI for advanced providers. // Token flow is currently Anthropic-only; use CLI for advanced providers.
options.push({ value: "minimax-cloud", label: "MiniMax M2.1 (minimax.io)" }); options.push({ value: "minimax-cloud", label: "MiniMax M2.1 (minimax.io)" });
options.push({ value: "minimax", label: "Minimax M2.1 (LM Studio)" }); options.push({ value: "minimax", label: "Minimax M2.1 (LM Studio)" });
options.push({
value: "minimax-api",
label: "MiniMax API (platform.minimax.io)",
});
if (params.includeSkip) { if (params.includeSkip) {
options.push({ value: "skip", label: "Skip for now" }); options.push({ value: "skip", label: "Skip for now" });
} }

View File

@@ -36,6 +36,8 @@ import {
} from "./google-gemini-model-default.js"; } from "./google-gemini-model-default.js";
import { import {
applyAuthProfileConfig, applyAuthProfileConfig,
applyMinimaxApiConfig,
applyMinimaxApiProviderConfig,
applyMinimaxConfig, applyMinimaxConfig,
applyMinimaxHostedConfig, applyMinimaxHostedConfig,
applyMinimaxHostedProviderConfig, applyMinimaxHostedProviderConfig,
@@ -629,6 +631,24 @@ export async function applyAuthChoice(params: {
agentModelOverride = "lmstudio/minimax-m2.1-gs32"; agentModelOverride = "lmstudio/minimax-m2.1-gs32";
await noteAgentModel("lmstudio/minimax-m2.1-gs32"); await noteAgentModel("lmstudio/minimax-m2.1-gs32");
} }
} else if (params.authChoice === "minimax-api") {
const key = await params.prompter.text({
message: "Enter MiniMax API key",
validate: (value) => (value?.trim() ? undefined : "Required"),
});
await setMinimaxApiKey(String(key).trim(), params.agentDir);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "minimax:default",
provider: "minimax",
mode: "api_key",
});
if (params.setDefaultModel) {
nextConfig = applyMinimaxApiConfig(nextConfig);
} else {
nextConfig = applyMinimaxApiProviderConfig(nextConfig);
agentModelOverride = "minimax/MiniMax-M2.1";
await noteAgentModel("minimax/MiniMax-M2.1");
}
} }
return { config: nextConfig, agentModelOverride }; return { config: nextConfig, agentModelOverride };

View File

@@ -7,6 +7,8 @@ import { afterEach, describe, expect, it } from "vitest";
import { import {
applyAuthProfileConfig, applyAuthProfileConfig,
applyMinimaxApiConfig,
applyMinimaxApiProviderConfig,
writeOAuthCredentials, writeOAuthCredentials,
} from "./onboard-auth.js"; } from "./onboard-auth.js";
@@ -105,3 +107,146 @@ describe("applyAuthProfileConfig", () => {
]); ]);
}); });
}); });
describe("applyMinimaxApiConfig", () => {
it("adds minimax provider with correct settings", () => {
const cfg = applyMinimaxApiConfig({});
expect(cfg.models?.providers?.minimax).toMatchObject({
baseUrl: "https://api.minimax.io/anthropic",
api: "anthropic-messages",
});
});
it("sets correct primary model", () => {
const cfg = applyMinimaxApiConfig({}, "MiniMax-M2.1-lightning");
expect(cfg.agents?.defaults?.model?.primary).toBe(
"minimax/MiniMax-M2.1-lightning",
);
});
it("sets reasoning flag for MiniMax-M2 model", () => {
const cfg = applyMinimaxApiConfig({}, "MiniMax-M2");
expect(cfg.models?.providers?.minimax?.models[0]?.reasoning).toBe(true);
});
it("does not set reasoning for non-M2 models", () => {
const cfg = applyMinimaxApiConfig({}, "MiniMax-M2.1");
expect(cfg.models?.providers?.minimax?.models[0]?.reasoning).toBe(false);
});
it("preserves existing model fallbacks", () => {
const cfg = applyMinimaxApiConfig({
agents: {
defaults: {
model: { fallbacks: ["anthropic/claude-opus-4-5"] },
},
},
});
expect(cfg.agents?.defaults?.model?.fallbacks).toEqual([
"anthropic/claude-opus-4-5",
]);
});
it("adds model alias", () => {
const cfg = applyMinimaxApiConfig({}, "MiniMax-M2.1");
expect(cfg.agents?.defaults?.models?.["minimax/MiniMax-M2.1"]?.alias).toBe(
"Minimax",
);
});
it("preserves existing model params when adding alias", () => {
const cfg = applyMinimaxApiConfig(
{
agents: {
defaults: {
models: {
"minimax/MiniMax-M2.1": {
alias: "MiniMax",
params: { custom: "value" },
},
},
},
},
},
"MiniMax-M2.1",
);
expect(
cfg.agents?.defaults?.models?.["minimax/MiniMax-M2.1"],
).toMatchObject({ alias: "Minimax", params: { custom: "value" } });
});
it("replaces existing minimax provider entirely", () => {
const cfg = applyMinimaxApiConfig({
models: {
providers: {
minimax: {
baseUrl: "https://old.example.com",
apiKey: "old-key",
api: "openai-completions",
models: [
{
id: "old-model",
name: "Old",
reasoning: false,
input: ["text"],
cost: { input: 1, output: 2, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1000,
maxTokens: 100,
},
],
},
},
},
});
expect(cfg.models?.providers?.minimax?.baseUrl).toBe(
"https://api.minimax.io/anthropic",
);
expect(cfg.models?.providers?.minimax?.api).toBe("anthropic-messages");
expect(cfg.models?.providers?.minimax?.models[0]?.id).toBe("MiniMax-M2.1");
});
it("preserves other providers when adding minimax", () => {
const cfg = applyMinimaxApiConfig({
models: {
providers: {
anthropic: {
baseUrl: "https://api.anthropic.com",
apiKey: "anthropic-key",
api: "anthropic-messages",
models: [
{
id: "claude-opus-4-5",
name: "Claude Opus 4.5",
reasoning: false,
input: ["text"],
cost: { input: 15, output: 75, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
},
],
},
},
},
});
expect(cfg.models?.providers?.anthropic).toBeDefined();
expect(cfg.models?.providers?.minimax).toBeDefined();
});
it("preserves existing models mode", () => {
const cfg = applyMinimaxApiConfig({
models: { mode: "replace", providers: {} },
});
expect(cfg.models?.mode).toBe("replace");
});
});
describe("applyMinimaxApiProviderConfig", () => {
it("does not overwrite existing primary model", () => {
const cfg = applyMinimaxApiProviderConfig({
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-5" } } },
});
expect(cfg.agents?.defaults?.model?.primary).toBe(
"anthropic/claude-opus-4-5",
);
});
});

View File

@@ -263,3 +263,73 @@ export function applyMinimaxHostedConfig(
}, },
}; };
} }
// MiniMax Anthropic-compatible API (platform.minimax.io/anthropic)
export function applyMinimaxApiProviderConfig(
cfg: ClawdbotConfig,
modelId: string = "MiniMax-M2.1",
): ClawdbotConfig {
const providers = { ...cfg.models?.providers };
providers.minimax = {
baseUrl: "https://api.minimax.io/anthropic",
apiKey: "", // Resolved via MINIMAX_API_KEY env var or auth profile
api: "anthropic-messages",
models: [
{
id: modelId,
name: `MiniMax ${modelId}`,
reasoning: modelId === "MiniMax-M2",
input: ["text"],
// Pricing: MiniMax doesn't publish public rates. Override in models.json for accurate costs.
cost: { input: 15, output: 60, cacheRead: 2, cacheWrite: 10 },
contextWindow: 200000,
maxTokens: 8192,
},
],
};
const models = { ...cfg.agents?.defaults?.models };
models[`minimax/${modelId}`] = {
...models[`minimax/${modelId}`],
alias: "Minimax",
};
return {
...cfg,
agents: {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
models,
},
},
models: { mode: cfg.models?.mode ?? "merge", providers },
};
}
export function applyMinimaxApiConfig(
cfg: ClawdbotConfig,
modelId: string = "MiniMax-M2.1",
): ClawdbotConfig {
const next = applyMinimaxApiProviderConfig(cfg, modelId);
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
model: {
...(next.agents?.defaults?.model &&
"fallbacks" in (next.agents.defaults.model as Record<string, unknown>)
? {
fallbacks: (
next.agents.defaults.model as { fallbacks?: string[] }
).fallbacks,
}
: undefined),
primary: `minimax/${modelId}`,
},
},
},
};
}

View File

@@ -1,14 +1,10 @@
import { spawnSync } from "node:child_process";
import path from "node:path"; import path from "node:path";
import { import {
CLAUDE_CLI_PROFILE_ID, CLAUDE_CLI_PROFILE_ID,
CODEX_CLI_PROFILE_ID, CODEX_CLI_PROFILE_ID,
ensureAuthProfileStore, ensureAuthProfileStore,
upsertAuthProfile,
} from "../agents/auth-profiles.js"; } from "../agents/auth-profiles.js";
import { resolveEnvApiKey } from "../agents/model-auth.js"; import { resolveEnvApiKey } from "../agents/model-auth.js";
import { normalizeProviderId } from "../agents/model-selection.js";
import { parseDurationMs } from "../cli/parse-duration.js";
import { import {
type ClawdbotConfig, type ClawdbotConfig,
CONFIG_PATH_CLAWDBOT, CONFIG_PATH_CLAWDBOT,
@@ -33,6 +29,7 @@ import { applyGoogleGeminiModelDefault } from "./google-gemini-model-default.js"
import { healthCommand } from "./health.js"; import { healthCommand } from "./health.js";
import { import {
applyAuthProfileConfig, applyAuthProfileConfig,
applyMinimaxApiConfig,
applyMinimaxConfig, applyMinimaxConfig,
applyMinimaxHostedConfig, applyMinimaxHostedConfig,
setAnthropicApiKey, setAnthropicApiKey,
@@ -177,6 +174,21 @@ export async function runNonInteractiveOnboarding(
mode: "api_key", mode: "api_key",
}); });
nextConfig = applyMinimaxHostedConfig(nextConfig); nextConfig = applyMinimaxHostedConfig(nextConfig);
} else if (authChoice === "minimax-api") {
const key =
opts.minimaxApiKey?.trim() || resolveEnvApiKey("minimax")?.apiKey;
if (!key) {
runtime.error("Missing --minimax-api-key (or MINIMAX_API_KEY in env).");
runtime.exit(1);
return;
}
await setMinimaxApiKey(key);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "minimax:default",
provider: "minimax",
mode: "api_key",
});
nextConfig = applyMinimaxApiConfig(nextConfig);
} else if (authChoice === "claude-cli") { } else if (authChoice === "claude-cli") {
const store = ensureAuthProfileStore(undefined, { const store = ensureAuthProfileStore(undefined, {
allowKeychainPrompt: false, allowKeychainPrompt: false,
@@ -210,82 +222,18 @@ export async function runNonInteractiveOnboarding(
nextConfig = applyOpenAICodexModelDefault(nextConfig).next; nextConfig = applyOpenAICodexModelDefault(nextConfig).next;
} else if (authChoice === "minimax") { } else if (authChoice === "minimax") {
nextConfig = applyMinimaxConfig(nextConfig); nextConfig = applyMinimaxConfig(nextConfig);
} else if (authChoice === "setup-token" || authChoice === "oauth") { } else if (
if (!process.stdin.isTTY) { authChoice === "token" ||
runtime.error("`claude setup-token` requires an interactive TTY."); authChoice === "oauth" ||
runtime.exit(1); authChoice === "openai-codex" ||
return; authChoice === "antigravity"
} ) {
const res = spawnSync("claude", ["setup-token"], { stdio: "inherit" });
if (res.error) throw res.error;
if (typeof res.status === "number" && res.status !== 0) {
runtime.error(`claude setup-token failed (exit ${res.status})`);
runtime.exit(1);
return;
}
const store = ensureAuthProfileStore(undefined, {
allowKeychainPrompt: true,
});
if (!store.profiles[CLAUDE_CLI_PROFILE_ID]) {
runtime.error(
`No Claude CLI credentials found after setup-token. Expected auth profile ${CLAUDE_CLI_PROFILE_ID}.`,
);
runtime.exit(1);
return;
}
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: CLAUDE_CLI_PROFILE_ID,
provider: "anthropic",
mode: "token",
});
} else if (authChoice === "token") {
const providerRaw = opts.tokenProvider?.trim();
const tokenRaw = opts.token?.trim();
if (!providerRaw) {
runtime.error(
"Missing --token-provider (required for --auth-choice token).",
);
runtime.exit(1);
return;
}
if (!tokenRaw) {
runtime.error("Missing --token (required for --auth-choice token).");
runtime.exit(1);
return;
}
const provider = normalizeProviderId(providerRaw);
const profileId = (
opts.tokenProfileId?.trim() || `${provider}:manual`
).trim();
const expires =
opts.tokenExpiresIn?.trim() && opts.tokenExpiresIn.trim().length > 0
? Date.now() +
parseDurationMs(String(opts.tokenExpiresIn).trim(), {
defaultUnit: "d",
})
: undefined;
upsertAuthProfile({
profileId,
credential: {
type: "token",
provider,
token: tokenRaw,
...(expires ? { expires } : {}),
},
});
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId,
provider,
mode: "token",
});
} else if (authChoice === "openai-codex" || authChoice === "antigravity") {
const label = const label =
authChoice === "antigravity" ? "Antigravity" : "OpenAI Codex OAuth"; authChoice === "antigravity"
? "Antigravity"
: authChoice === "token"
? "Token"
: "OAuth";
runtime.error(`${label} requires interactive mode.`); runtime.error(`${label} requires interactive mode.`);
runtime.exit(1); runtime.exit(1);
return; return;

View File

@@ -16,6 +16,7 @@ export type AuthChoice =
| "gemini-api-key" | "gemini-api-key"
| "minimax-cloud" | "minimax-cloud"
| "minimax" | "minimax"
| "minimax-api"
| "skip"; | "skip";
export type GatewayAuthChoice = "off" | "token" | "password"; export type GatewayAuthChoice = "off" | "token" | "password";
export type ResetScope = "config" | "config+creds+sessions" | "full"; export type ResetScope = "config" | "config+creds+sessions" | "full";

View File

@@ -2,119 +2,133 @@ import fs from "node:fs/promises";
import path from "node:path"; import path from "node:path";
import JSZip from "jszip"; import JSZip from "jszip";
import sharp from "sharp"; import sharp from "sharp";
import { afterAll, beforeAll, describe, expect, it, vi } from "vitest"; import { describe, expect, it, vi } from "vitest";
const realOs = await vi.importActual<typeof import("node:os")>("node:os"); import { withTempHome } from "../../test/helpers/temp-home.js";
const HOME = path.join(realOs.tmpdir(), "clawdbot-home-test");
vi.mock("node:os", () => ({
default: { homedir: () => HOME, tmpdir: () => realOs.tmpdir() },
homedir: () => HOME,
tmpdir: () => realOs.tmpdir(),
}));
const store = await import("./store.js");
describe("media store", () => { describe("media store", () => {
beforeAll(async () => {
await fs.rm(HOME, { recursive: true, force: true });
});
afterAll(async () => {
await fs.rm(HOME, { recursive: true, force: true });
});
it("creates and returns media directory", async () => { it("creates and returns media directory", async () => {
const dir = await store.ensureMediaDir(); await withTempHome(async () => {
expect(dir).toContain("clawdbot-home-test"); vi.resetModules();
const stat = await fs.stat(dir); const store = await import("./store.js");
expect(stat.isDirectory()).toBe(true);
const dir = await store.ensureMediaDir();
const normalized = path.normalize(dir);
expect(normalized).toContain(`${path.sep}.clawdbot${path.sep}media`);
const stat = await fs.stat(dir);
expect(stat.isDirectory()).toBe(true);
});
}); });
it("saves buffers and enforces size limit", async () => { it("saves buffers and enforces size limit", async () => {
const buf = Buffer.from("hello"); await withTempHome(async () => {
const saved = await store.saveMediaBuffer(buf, "text/plain"); vi.resetModules();
const savedStat = await fs.stat(saved.path); const store = await import("./store.js");
expect(savedStat.size).toBe(buf.length);
expect(saved.contentType).toBe("text/plain");
expect(saved.path.endsWith(".txt")).toBe(true);
const jpeg = await sharp({ const buf = Buffer.from("hello");
create: { width: 2, height: 2, channels: 3, background: "#123456" }, const saved = await store.saveMediaBuffer(buf, "text/plain");
}) const savedStat = await fs.stat(saved.path);
.jpeg({ quality: 80 }) expect(savedStat.size).toBe(buf.length);
.toBuffer(); expect(saved.contentType).toBe("text/plain");
const savedJpeg = await store.saveMediaBuffer(jpeg, "image/jpeg"); expect(saved.path.endsWith(".txt")).toBe(true);
expect(savedJpeg.contentType).toBe("image/jpeg");
expect(savedJpeg.path.endsWith(".jpg")).toBe(true);
const huge = Buffer.alloc(5 * 1024 * 1024 + 1); const jpeg = await sharp({
await expect(store.saveMediaBuffer(huge)).rejects.toThrow( create: { width: 2, height: 2, channels: 3, background: "#123456" },
"Media exceeds 5MB limit", })
); .jpeg({ quality: 80 })
.toBuffer();
const savedJpeg = await store.saveMediaBuffer(jpeg, "image/jpeg");
expect(savedJpeg.contentType).toBe("image/jpeg");
expect(savedJpeg.path.endsWith(".jpg")).toBe(true);
const huge = Buffer.alloc(5 * 1024 * 1024 + 1);
await expect(store.saveMediaBuffer(huge)).rejects.toThrow(
"Media exceeds 5MB limit",
);
});
}); });
it("copies local files and cleans old media", async () => { it("copies local files and cleans old media", async () => {
const srcFile = path.join(HOME, "tmp-src.txt"); await withTempHome(async (home) => {
await fs.mkdir(HOME, { recursive: true }); vi.resetModules();
await fs.writeFile(srcFile, "local file"); const store = await import("./store.js");
const saved = await store.saveMediaSource(srcFile);
expect(saved.size).toBe(10);
const savedStat = await fs.stat(saved.path);
expect(savedStat.isFile()).toBe(true);
expect(path.extname(saved.path)).toBe(".txt");
// make the file look old and ensure cleanOldMedia removes it const srcFile = path.join(home, "tmp-src.txt");
const past = Date.now() - 10_000; await fs.mkdir(home, { recursive: true });
await fs.utimes(saved.path, past / 1000, past / 1000); await fs.writeFile(srcFile, "local file");
await store.cleanOldMedia(1); const saved = await store.saveMediaSource(srcFile);
await expect(fs.stat(saved.path)).rejects.toThrow(); expect(saved.size).toBe(10);
const savedStat = await fs.stat(saved.path);
expect(savedStat.isFile()).toBe(true);
expect(path.extname(saved.path)).toBe(".txt");
// make the file look old and ensure cleanOldMedia removes it
const past = Date.now() - 10_000;
await fs.utimes(saved.path, past / 1000, past / 1000);
await store.cleanOldMedia(1);
await expect(fs.stat(saved.path)).rejects.toThrow();
});
}); });
it("sets correct mime for xlsx by extension", async () => { it("sets correct mime for xlsx by extension", async () => {
const xlsxPath = path.join(HOME, "sheet.xlsx"); await withTempHome(async (home) => {
await fs.mkdir(HOME, { recursive: true }); vi.resetModules();
await fs.writeFile(xlsxPath, "not really an xlsx"); const store = await import("./store.js");
const saved = await store.saveMediaSource(xlsxPath); const xlsxPath = path.join(home, "sheet.xlsx");
expect(saved.contentType).toBe( await fs.mkdir(home, { recursive: true });
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", await fs.writeFile(xlsxPath, "not really an xlsx");
);
expect(path.extname(saved.path)).toBe(".xlsx"); const saved = await store.saveMediaSource(xlsxPath);
expect(saved.contentType).toBe(
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
);
expect(path.extname(saved.path)).toBe(".xlsx");
});
}); });
it("renames media based on detected mime even when extension is wrong", async () => { it("renames media based on detected mime even when extension is wrong", async () => {
const pngBytes = await sharp({ await withTempHome(async (home) => {
create: { width: 2, height: 2, channels: 3, background: "#00ff00" }, vi.resetModules();
}) const store = await import("./store.js");
.png()
.toBuffer();
const bogusExt = path.join(HOME, "image-wrong.bin");
await fs.writeFile(bogusExt, pngBytes);
const saved = await store.saveMediaSource(bogusExt); const pngBytes = await sharp({
expect(saved.contentType).toBe("image/png"); create: { width: 2, height: 2, channels: 3, background: "#00ff00" },
expect(path.extname(saved.path)).toBe(".png"); })
.png()
.toBuffer();
const bogusExt = path.join(home, "image-wrong.bin");
await fs.writeFile(bogusExt, pngBytes);
const buf = await fs.readFile(saved.path); const saved = await store.saveMediaSource(bogusExt);
expect(buf.equals(pngBytes)).toBe(true); expect(saved.contentType).toBe("image/png");
expect(path.extname(saved.path)).toBe(".png");
const buf = await fs.readFile(saved.path);
expect(buf.equals(pngBytes)).toBe(true);
});
}); });
it("sniffs xlsx mime for zip buffers and renames extension", async () => { it("sniffs xlsx mime for zip buffers and renames extension", async () => {
const zip = new JSZip(); await withTempHome(async (home) => {
zip.file( vi.resetModules();
"[Content_Types].xml", const store = await import("./store.js");
'<Types><Override PartName="/xl/workbook.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml"/></Types>',
);
zip.file("xl/workbook.xml", "<workbook/>");
const fakeXlsx = await zip.generateAsync({ type: "nodebuffer" });
const bogusExt = path.join(HOME, "sheet.bin");
await fs.writeFile(bogusExt, fakeXlsx);
const saved = await store.saveMediaSource(bogusExt); const zip = new JSZip();
expect(saved.contentType).toBe( zip.file(
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", "[Content_Types].xml",
); '<Types><Override PartName="/xl/workbook.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml"/></Types>',
expect(path.extname(saved.path)).toBe(".xlsx"); );
zip.file("xl/workbook.xml", "<workbook/>");
const fakeXlsx = await zip.generateAsync({ type: "nodebuffer" });
const bogusExt = path.join(home, "sheet.bin");
await fs.writeFile(bogusExt, fakeXlsx);
const saved = await store.saveMediaSource(bogusExt);
expect(saved.contentType).toBe(
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
);
expect(path.extname(saved.path)).toBe(".xlsx");
});
}); });
}); });

View File

@@ -1,10 +1,10 @@
import fs from "node:fs"; import fs from "node:fs";
import fsPromises from "node:fs/promises";
import os from "node:os";
import path from "node:path"; import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { withTempHome } from "../../test/helpers/temp-home.js";
const runtime = { const runtime = {
log: vi.fn(), log: vi.fn(),
error: vi.fn(), error: vi.fn(),
@@ -12,74 +12,74 @@ const runtime = {
}; };
describe("web logout", () => { describe("web logout", () => {
const origHomedir = os.homedir;
let tmpDir: string;
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "clawdbot-logout-"));
vi.spyOn(os, "homedir").mockReturnValue(tmpDir);
vi.resetModules();
vi.doMock("../utils.js", async () => {
const actual =
await vi.importActual<typeof import("../utils.js")>("../utils.js");
return {
...actual,
CONFIG_DIR: path.join(tmpDir, ".clawdbot"),
};
});
}); });
afterEach(async () => { afterEach(() => {
vi.restoreAllMocks(); vi.restoreAllMocks();
vi.doUnmock("../utils.js");
await fsPromises
.rm(tmpDir, { recursive: true, force: true })
.catch(() => {});
// restore for safety
// eslint-disable-next-line @typescript-eslint/unbound-method
(os.homedir as unknown as typeof origHomedir) = origHomedir;
}); });
it( it(
"deletes cached credentials when present", "deletes cached credentials when present",
{ timeout: 15_000 }, { timeout: 15_000 },
async () => { async () => {
const { logoutWeb, WA_WEB_AUTH_DIR } = await import("./session.js"); await withTempHome(async (home) => {
vi.resetModules();
const { logoutWeb, WA_WEB_AUTH_DIR } = await import("./session.js");
expect(WA_WEB_AUTH_DIR.startsWith(tmpDir)).toBe(true); const rel = path.relative(
fs.mkdirSync(WA_WEB_AUTH_DIR, { recursive: true }); path.resolve(home),
fs.writeFileSync(path.join(WA_WEB_AUTH_DIR, "creds.json"), "{}"); path.resolve(WA_WEB_AUTH_DIR),
const result = await logoutWeb({ runtime: runtime as never }); );
expect(rel && !rel.startsWith("..") && !path.isAbsolute(rel)).toBe(
true,
);
expect(result).toBe(true); fs.mkdirSync(WA_WEB_AUTH_DIR, { recursive: true });
expect(fs.existsSync(WA_WEB_AUTH_DIR)).toBe(false); fs.writeFileSync(path.join(WA_WEB_AUTH_DIR, "creds.json"), "{}");
const result = await logoutWeb({ runtime: runtime as never });
expect(result).toBe(true);
expect(fs.existsSync(WA_WEB_AUTH_DIR)).toBe(false);
});
}, },
); );
it("no-ops when nothing to delete", { timeout: 15_000 }, async () => { it("no-ops when nothing to delete", { timeout: 15_000 }, async () => {
const { logoutWeb } = await import("./session.js"); await withTempHome(async () => {
const result = await logoutWeb({ runtime: runtime as never }); vi.resetModules();
expect(result).toBe(false); const { logoutWeb } = await import("./session.js");
expect(runtime.log).toHaveBeenCalled(); const result = await logoutWeb({ runtime: runtime as never });
expect(result).toBe(false);
expect(runtime.log).toHaveBeenCalled();
});
}); });
it("keeps shared oauth.json when using legacy auth dir", async () => { it("keeps shared oauth.json when using legacy auth dir", async () => {
const { logoutWeb } = await import("./session.js"); await withTempHome(async () => {
const credsDir = path.join(tmpDir, ".clawdbot", "credentials"); vi.resetModules();
fs.mkdirSync(credsDir, { recursive: true }); const { logoutWeb } = await import("./session.js");
fs.writeFileSync(path.join(credsDir, "creds.json"), "{}");
fs.writeFileSync(path.join(credsDir, "oauth.json"), '{"token":true}');
fs.writeFileSync(path.join(credsDir, "session-abc.json"), "{}");
const result = await logoutWeb({ const { resolveOAuthDir } = await import("../config/paths.js");
authDir: credsDir, const credsDir = resolveOAuthDir();
isLegacyAuthDir: true,
runtime: runtime as never, fs.mkdirSync(credsDir, { recursive: true });
fs.writeFileSync(path.join(credsDir, "creds.json"), "{}");
fs.writeFileSync(path.join(credsDir, "oauth.json"), '{"token":true}');
fs.writeFileSync(path.join(credsDir, "session-abc.json"), "{}");
const result = await logoutWeb({
authDir: credsDir,
isLegacyAuthDir: true,
runtime: runtime as never,
});
expect(result).toBe(true);
expect(fs.existsSync(path.join(credsDir, "oauth.json"))).toBe(true);
expect(fs.existsSync(path.join(credsDir, "creds.json"))).toBe(false);
expect(fs.existsSync(path.join(credsDir, "session-abc.json"))).toBe(
false,
);
}); });
expect(result).toBe(true);
expect(fs.existsSync(path.join(credsDir, "oauth.json"))).toBe(true);
expect(fs.existsSync(path.join(credsDir, "creds.json"))).toBe(false);
expect(fs.existsSync(path.join(credsDir, "session-abc.json"))).toBe(false);
}); });
}); });