Thinking: gate xhigh by model

This commit is contained in:
George Pickett
2026-01-07 17:17:38 -08:00
committed by Peter Steinberger
parent f50e06a1b6
commit a3641526ab
21 changed files with 503 additions and 150 deletions

View File

@@ -247,7 +247,7 @@ Send these in WhatsApp/Telegram/Slack/Microsoft Teams/WebChat (group commands ar
- `/status` — compact session status (model + tokens, cost when available)
- `/new` or `/reset` — reset the session
- `/compact` — compact session context (summary)
- `/think <level>` — off|minimal|low|medium|high
- `/think <level>` — off|minimal|low|medium|high|xhigh (GPT-5.2 + Codex models only)
- `/verbose on|off`
- `/cost on|off` — append per-response token/cost usage lines
- `/restart` — restart the gateway (owner-only in groups)

View File

@@ -105,7 +105,7 @@ Isolation options (only for `session=isolated`):
### Model and thinking overrides
Isolated jobs (`agentTurn`) can override the model and thinking level:
- `model`: Provider/model string (e.g., `anthropic/claude-sonnet-4-20250514`) or alias (e.g., `opus`)
- `thinking`: Thinking level (`off`, `minimal`, `low`, `medium`, `high`)
- `thinking`: Thinking level (`off`, `minimal`, `low`, `medium`, `high`, `xhigh`; GPT-5.2 + Codex models only)
Note: You can set `model` on main-session jobs too, but it changes the shared main
session model. We recommend model overrides only for isolated jobs to avoid

View File

@@ -398,7 +398,7 @@ Required:
Options:
- `--to <dest>` (for session key and optional delivery)
- `--session-id <id>`
- `--thinking <off|minimal|low|medium|high>`
- `--thinking <off|minimal|low|medium|high|xhigh>` (GPT-5.2 + Codex models only)
- `--verbose <on|off>`
- `--provider <whatsapp|telegram|discord|slack|signal|imessage>`
- `--local`

View File

@@ -39,7 +39,7 @@ Add `hooks.gmail.model` config option to specify an optional cheaper model for G
| Field | Type | Default | Description |
|-------|------|---------|-------------|
| `hooks.gmail.model` | `string` | (none) | Model to use for Gmail hook processing. Accepts `provider/model` refs or aliases from `agents.defaults.models`. |
| `hooks.gmail.thinking` | `string` | (inherited) | Thinking level override (`off`, `minimal`, `low`, `medium`, `high`). If unset, inherits from `agents.defaults.thinkingDefault` or model's default. |
| `hooks.gmail.thinking` | `string` | (inherited) | Thinking level override (`off`, `minimal`, `low`, `medium`, `high`, `xhigh`; GPT-5.2 + Codex models only). If unset, inherits from `agents.defaults.thinkingDefault` or model's default. |
### Alias Support

View File

@@ -38,7 +38,7 @@ clawdbot agent --to +15555550123 --message "Summon reply" --deliver
- `--local`: run locally (requires provider keys in your shell)
- `--deliver`: send the reply to the chosen provider (requires `--to`)
- `--provider`: `whatsapp|telegram|discord|slack|signal|imessage` (default: `whatsapp`)
- `--thinking <off|minimal|low|medium|high>`: persist thinking level
- `--thinking <off|minimal|low|medium|high|xhigh>`: persist thinking level (GPT-5.2 + Codex models only)
- `--verbose <on|off>`: persist verbose level
- `--timeout <seconds>`: override agent timeout
- `--json`: output structured JSON

View File

@@ -60,7 +60,7 @@ Text + native (when enabled):
- `/activation mention|always` (groups only)
- `/send on|off|inherit` (owner-only)
- `/reset` or `/new`
- `/think <level>` (aliases: `/thinking`, `/t`)
- `/think <off|minimal|low|medium|high|xhigh>` (GPT-5.2 + Codex models only; aliases: `/thinking`, `/t`)
- `/verbose on|off` (alias: `/v`)
- `/reasoning on|off|stream` (alias: `/reason`; when on, sends a separate message prefixed `Reasoning:`; `stream` = Telegram draft only)
- `/elevated on|off` (alias: `/elev`)

View File

@@ -7,11 +7,12 @@ read_when:
## What it does
- Inline directive in any inbound body: `/t <level>`, `/think:<level>`, or `/thinking <level>`.
- Levels (aliases): `off | minimal | low | medium | high`
- Levels (aliases): `off | minimal | low | medium | high | xhigh` (GPT-5.2 + Codex models only)
- minimal → “think”
- low → “think hard”
- medium → “think harder”
- high → “ultrathink” (max budget)
- xhigh → “ultrathink+” (GPT-5.2 + Codex models only)
- `highest`, `max` map to `high`.
## Resolution order

View File

@@ -50,7 +50,7 @@ Use SSH tunneling or Tailscale to reach the Gateway WS.
- `/agent <id>` (or `/agents`)
- `/session <key>` (or `/sessions`)
- `/model <provider/model>` (or `/model list`, `/models`)
- `/think <off|minimal|low|medium|high>`
- `/think <off|minimal|low|medium|high|xhigh>` (GPT-5.2 + Codex models only)
- `/verbose <on|off>`
- `/reasoning <on|off|stream>` (stream = Telegram draft only)
- `/cost <on|off>`

View File

@@ -1046,7 +1046,7 @@ export function resolveEmbeddedSessionLane(key: string) {
}
function mapThinkingLevel(level?: ThinkLevel): ThinkingLevel {
// pi-agent-core supports "xhigh" too; Clawdbot doesn't surface it for now.
// pi-agent-core supports "xhigh"; Clawdbot enables it for specific models.
if (!level) return "off";
return level;
}

View File

@@ -69,6 +69,91 @@ describe("directive behavior", () => {
vi.restoreAllMocks();
});
it("accepts /thinking xhigh for codex models", async () => {
await withTempHome(async (home) => {
const storePath = path.join(home, "sessions.json");
const res = await getReplyFromConfig(
{
Body: "/thinking xhigh",
From: "+1004",
To: "+2000",
},
{},
{
agent: {
model: "openai-codex/gpt-5.2-codex",
workspace: path.join(home, "clawd"),
},
whatsapp: { allowFrom: ["*"] },
session: { store: storePath },
},
);
const texts = (Array.isArray(res) ? res : [res])
.map((entry) => entry?.text)
.filter(Boolean);
expect(texts).toContain("Thinking level set to xhigh.");
});
});
it("accepts /thinking xhigh for openai gpt-5.2", async () => {
await withTempHome(async (home) => {
const storePath = path.join(home, "sessions.json");
const res = await getReplyFromConfig(
{
Body: "/thinking xhigh",
From: "+1004",
To: "+2000",
},
{},
{
agent: {
model: "openai/gpt-5.2",
workspace: path.join(home, "clawd"),
},
whatsapp: { allowFrom: ["*"] },
session: { store: storePath },
},
);
const texts = (Array.isArray(res) ? res : [res])
.map((entry) => entry?.text)
.filter(Boolean);
expect(texts).toContain("Thinking level set to xhigh.");
});
});
it("rejects /thinking xhigh for non-codex models", async () => {
await withTempHome(async (home) => {
const storePath = path.join(home, "sessions.json");
const res = await getReplyFromConfig(
{
Body: "/thinking xhigh",
From: "+1004",
To: "+2000",
},
{},
{
agent: {
model: "openai/gpt-4.1-mini",
workspace: path.join(home, "clawd"),
},
whatsapp: { allowFrom: ["*"] },
session: { store: storePath },
},
);
const texts = (Array.isArray(res) ? res : [res])
.map((entry) => entry?.text)
.filter(Boolean);
expect(texts).toContain(
'Thinking level "xhigh" is only supported for openai/gpt-5.2, openai-codex/gpt-5.2-codex or openai-codex/gpt-5.1-codex.',
);
});
});
it("keeps reserved command aliases from matching after trimming", async () => {
await withTempHome(async (home) => {
vi.mocked(runEmbeddedPiAgent).mockReset();

View File

@@ -94,8 +94,10 @@ import {
import type { MsgContext, TemplateContext } from "./templating.js";
import {
type ElevatedLevel,
formatXHighModelHint,
normalizeThinkLevel,
type ReasoningLevel,
supportsXHighThinking,
type ThinkLevel,
type VerboseLevel,
} from "./thinking.js";
@@ -1187,7 +1189,10 @@ export async function getReplyFromConfig(
if (!resolvedThinkLevel && prefixedCommandBody) {
const parts = prefixedCommandBody.split(/\s+/);
const maybeLevel = normalizeThinkLevel(parts[0]);
if (maybeLevel) {
if (
maybeLevel &&
(maybeLevel !== "xhigh" || supportsXHighThinking(provider, model))
) {
resolvedThinkLevel = maybeLevel;
prefixedCommandBody = parts.slice(1).join(" ").trim();
}
@@ -1195,6 +1200,33 @@ export async function getReplyFromConfig(
if (!resolvedThinkLevel) {
resolvedThinkLevel = await modelState.resolveDefaultThinkingLevel();
}
if (
resolvedThinkLevel === "xhigh" &&
!supportsXHighThinking(provider, model)
) {
const explicitThink =
directives.hasThinkDirective && directives.thinkLevel !== undefined;
if (explicitThink) {
typing.cleanup();
return {
text: `Thinking level "xhigh" is only supported for ${formatXHighModelHint()}. Use /think high or switch to one of those models.`,
};
}
resolvedThinkLevel = "high";
if (
sessionEntry &&
sessionStore &&
sessionKey &&
sessionEntry.thinkingLevel === "xhigh"
) {
sessionEntry.thinkingLevel = "high";
sessionEntry.updatedAt = Date.now();
sessionStore[sessionKey] = sessionEntry;
if (storePath) {
await saveSessionStore(storePath, sessionStore);
}
}
}
const sessionIdFinal = sessionId ?? crypto.randomUUID();
const sessionFile = resolveSessionFilePath(sessionIdFinal, sessionEntry);
const queueBodyBase = transcribedText

View File

@@ -37,6 +37,11 @@ import { applyVerboseOverride } from "../../sessions/level-overrides.js";
import { shortenHomePath } from "../../utils.js";
import { extractModelDirective } from "../model.js";
import type { MsgContext } from "../templating.js";
import {
formatThinkingLevels,
formatXHighModelHint,
supportsXHighThinking,
} from "../thinking.js";
import type { ReplyPayload } from "../types.js";
import {
type ElevatedLevel,
@@ -778,6 +783,7 @@ export async function handleDirectiveOnly(params: {
allowedModelCatalog,
resetModelOverride,
provider,
model,
initialModelLabel,
formatModelSwitchEvent,
currentThinkLevel,
@@ -943,6 +949,117 @@ export async function handleDirectiveOnly(params: {
}
}
let modelSelection: ModelDirectiveSelection | undefined;
let profileOverride: string | undefined;
if (directives.hasModelDirective && directives.rawModelDirective) {
const raw = directives.rawModelDirective.trim();
if (/^[0-9]+$/.test(raw)) {
const resolvedDefault = resolveConfiguredModelRef({
cfg: params.cfg,
defaultProvider,
defaultModel,
});
const pickerCatalog: ModelPickerCatalogEntry[] = (() => {
const keys = new Set<string>();
const out: ModelPickerCatalogEntry[] = [];
const push = (entry: ModelPickerCatalogEntry) => {
const provider = normalizeProviderId(entry.provider);
const id = String(entry.id ?? "").trim();
if (!provider || !id) return;
const key = modelKey(provider, id);
if (keys.has(key)) return;
keys.add(key);
out.push({ provider, id, name: entry.name });
};
for (const entry of allowedModelCatalog) push(entry);
for (const rawKey of Object.keys(
params.cfg.agents?.defaults?.models ?? {},
)) {
const resolved = resolveModelRefFromString({
raw: String(rawKey),
defaultProvider,
aliasIndex,
});
if (!resolved) continue;
push({
provider: resolved.ref.provider,
id: resolved.ref.model,
name: resolved.ref.model,
});
}
if (resolvedDefault.model) {
push({
provider: resolvedDefault.provider,
id: resolvedDefault.model,
name: resolvedDefault.model,
});
}
return out;
})();
const items = buildModelPickerItems(pickerCatalog);
const index = Number.parseInt(raw, 10) - 1;
const item = Number.isFinite(index) ? items[index] : undefined;
if (!item) {
return {
text: `Invalid model selection "${raw}". Use /model to list.`,
};
}
const picked = pickProviderForModel({
item,
preferredProvider: params.provider,
});
if (!picked) {
return {
text: `Invalid model selection "${raw}". Use /model to list.`,
};
}
const key = `${picked.provider}/${picked.model}`;
const aliases = aliasIndex.byKey.get(key);
const alias = aliases && aliases.length > 0 ? aliases[0] : undefined;
modelSelection = {
provider: picked.provider,
model: picked.model,
isDefault:
picked.provider === defaultProvider && picked.model === defaultModel,
...(alias ? { alias } : {}),
};
} else {
const resolved = resolveModelDirectiveSelection({
raw,
defaultProvider,
defaultModel,
aliasIndex,
allowedModelKeys,
});
if (resolved.error) {
return { text: resolved.error };
}
modelSelection = resolved.selection;
}
if (modelSelection && directives.rawModelProfile) {
const profileResolved = resolveProfileOverride({
rawProfile: directives.rawModelProfile,
provider: modelSelection.provider,
cfg: params.cfg,
agentDir,
});
if (profileResolved.error) {
return { text: profileResolved.error };
}
profileOverride = profileResolved.profileId;
}
}
if (directives.rawModelProfile && !modelSelection) {
return { text: "Auth profile override requires a model selection." };
}
const resolvedProvider = modelSelection?.provider ?? provider;
const resolvedModel = modelSelection?.model ?? model;
if (directives.hasThinkDirective && !directives.thinkLevel) {
// If no argument was provided, show the current level
if (!directives.rawThinkLevel) {
@@ -950,12 +1067,12 @@ export async function handleDirectiveOnly(params: {
return {
text: withOptions(
`Current thinking level: ${level}.`,
"off, minimal, low, medium, high",
formatThinkingLevels(resolvedProvider, resolvedModel),
),
};
}
return {
text: `Unrecognized thinking level "${directives.rawThinkLevel}". Valid levels: off, minimal, low, medium, high.`,
text: `Unrecognized thinking level "${directives.rawThinkLevel}". Valid levels: ${formatThinkingLevels(resolvedProvider, resolvedModel)}.`,
};
}
if (directives.hasVerboseDirective && !directives.verboseLevel) {
@@ -1098,126 +1215,25 @@ export async function handleDirectiveOnly(params: {
return { text: errors.join(" ") };
}
let modelSelection: ModelDirectiveSelection | undefined;
let profileOverride: string | undefined;
if (directives.hasModelDirective && directives.rawModelDirective) {
const raw = directives.rawModelDirective.trim();
if (/^[0-9]+$/.test(raw)) {
const resolvedDefault = resolveConfiguredModelRef({
cfg: params.cfg,
defaultProvider,
defaultModel,
});
const pickerCatalog: ModelPickerCatalogEntry[] = (() => {
const keys = new Set<string>();
const out: ModelPickerCatalogEntry[] = [];
const push = (entry: ModelPickerCatalogEntry) => {
const provider = normalizeProviderId(entry.provider);
const id = String(entry.id ?? "").trim();
if (!provider || !id) return;
const key = modelKey(provider, id);
if (keys.has(key)) return;
keys.add(key);
out.push({ provider, id, name: entry.name });
};
for (const entry of allowedModelCatalog) push(entry);
for (const rawKey of Object.keys(
params.cfg.agents?.defaults?.models ?? {},
)) {
const resolved = resolveModelRefFromString({
raw: String(rawKey),
defaultProvider,
aliasIndex,
});
if (!resolved) continue;
push({
provider: resolved.ref.provider,
id: resolved.ref.model,
name: resolved.ref.model,
});
}
if (resolvedDefault.model) {
push({
provider: resolvedDefault.provider,
id: resolvedDefault.model,
name: resolvedDefault.model,
});
}
return out;
})();
const items = buildModelPickerItems(pickerCatalog);
const index = Number.parseInt(raw, 10) - 1;
const item = Number.isFinite(index) ? items[index] : undefined;
if (!item) {
return {
text: `Invalid model selection "${raw}". Use /model to list.`,
};
}
const picked = pickProviderForModel({
item,
preferredProvider: params.provider,
});
if (!picked) {
return {
text: `Invalid model selection "${raw}". Use /model to list.`,
};
}
const key = `${picked.provider}/${picked.model}`;
const aliases = aliasIndex.byKey.get(key);
const alias = aliases && aliases.length > 0 ? aliases[0] : undefined;
modelSelection = {
provider: picked.provider,
model: picked.model,
isDefault:
picked.provider === defaultProvider && picked.model === defaultModel,
...(alias ? { alias } : {}),
};
} else {
const resolved = resolveModelDirectiveSelection({
raw,
defaultProvider,
defaultModel,
aliasIndex,
allowedModelKeys,
});
if (resolved.error) {
return { text: resolved.error };
}
modelSelection = resolved.selection;
}
if (modelSelection) {
if (directives.rawModelProfile) {
const profileResolved = resolveProfileOverride({
rawProfile: directives.rawModelProfile,
provider: modelSelection.provider,
cfg: params.cfg,
agentDir,
});
if (profileResolved.error) {
return { text: profileResolved.error };
}
profileOverride = profileResolved.profileId;
}
const nextLabel = `${modelSelection.provider}/${modelSelection.model}`;
if (nextLabel !== initialModelLabel) {
enqueueSystemEvent(
formatModelSwitchEvent(nextLabel, modelSelection.alias),
{
sessionKey,
contextKey: `model:${nextLabel}`,
},
);
}
}
}
if (directives.rawModelProfile && !modelSelection) {
return { text: "Auth profile override requires a model selection." };
if (
directives.hasThinkDirective &&
directives.thinkLevel === "xhigh" &&
!supportsXHighThinking(resolvedProvider, resolvedModel)
) {
return {
text: `Thinking level "xhigh" is only supported for ${formatXHighModelHint()}.`,
};
}
const nextThinkLevel = directives.hasThinkDirective
? directives.thinkLevel
: (sessionEntry?.thinkingLevel as ThinkLevel | undefined) ??
currentThinkLevel;
const shouldDowngradeXHigh =
!directives.hasThinkDirective &&
nextThinkLevel === "xhigh" &&
!supportsXHighThinking(resolvedProvider, resolvedModel);
if (sessionEntry && sessionStore && sessionKey) {
const prevElevatedLevel =
currentElevatedLevel ??
@@ -1239,6 +1255,9 @@ export async function handleDirectiveOnly(params: {
if (directives.thinkLevel === "off") delete sessionEntry.thinkingLevel;
else sessionEntry.thinkingLevel = directives.thinkLevel;
}
if (shouldDowngradeXHigh) {
sessionEntry.thinkingLevel = "high";
}
if (directives.hasVerboseDirective && directives.verboseLevel) {
applyVerboseOverride(sessionEntry, directives.verboseLevel);
}
@@ -1295,6 +1314,18 @@ export async function handleDirectiveOnly(params: {
if (storePath) {
await saveSessionStore(storePath, sessionStore);
}
if (modelSelection) {
const nextLabel = `${modelSelection.provider}/${modelSelection.model}`;
if (nextLabel !== initialModelLabel) {
enqueueSystemEvent(
formatModelSwitchEvent(nextLabel, modelSelection.alias),
{
sessionKey,
contextKey: `model:${nextLabel}`,
},
);
}
}
if (elevatedChanged) {
const nextElevated = (sessionEntry.elevatedLevel ??
"off") as ElevatedLevel;
@@ -1345,6 +1376,11 @@ export async function handleDirectiveOnly(params: {
);
if (shouldHintDirectRuntime) parts.push(formatElevatedRuntimeHint());
}
if (shouldDowngradeXHigh) {
parts.push(
`Thinking level set to high (xhigh not supported for ${resolvedProvider}/${resolvedModel}).`,
);
}
if (modelSelection) {
const label = `${modelSelection.provider}/${modelSelection.model}`;
const labelWithAlias = modelSelection.alias

View File

@@ -1,10 +1,34 @@
import { describe, expect, it } from "vitest";
import { normalizeReasoningLevel, normalizeThinkLevel } from "./thinking.js";
import {
listThinkingLevels,
normalizeReasoningLevel,
normalizeThinkLevel,
} from "./thinking.js";
describe("normalizeThinkLevel", () => {
it("accepts mid as medium", () => {
expect(normalizeThinkLevel("mid")).toBe("medium");
});
it("accepts xhigh", () => {
expect(normalizeThinkLevel("xhigh")).toBe("xhigh");
});
});
describe("listThinkingLevels", () => {
it("includes xhigh for codex models", () => {
expect(listThinkingLevels(undefined, "gpt-5.2-codex")).toContain("xhigh");
});
it("includes xhigh for openai gpt-5.2", () => {
expect(listThinkingLevels("openai", "gpt-5.2")).toContain("xhigh");
});
it("excludes xhigh for non-codex models", () => {
expect(listThinkingLevels(undefined, "gpt-4.1-mini")).not.toContain(
"xhigh",
);
});
});
describe("normalizeReasoningLevel", () => {

View File

@@ -1,9 +1,30 @@
export type ThinkLevel = "off" | "minimal" | "low" | "medium" | "high";
export type ThinkLevel =
| "off"
| "minimal"
| "low"
| "medium"
| "high"
| "xhigh";
export type VerboseLevel = "off" | "on";
export type ElevatedLevel = "off" | "on";
export type ReasoningLevel = "off" | "on" | "stream";
export type UsageDisplayLevel = "off" | "on";
export const XHIGH_MODEL_REFS = [
"openai/gpt-5.2",
"openai-codex/gpt-5.2-codex",
"openai-codex/gpt-5.1-codex",
] as const;
const XHIGH_MODEL_SET = new Set(
XHIGH_MODEL_REFS.map((entry) => entry.toLowerCase()),
);
const XHIGH_MODEL_IDS = new Set(
XHIGH_MODEL_REFS.map((entry) => entry.split("/")[1]?.toLowerCase()).filter(
(entry): entry is string => Boolean(entry),
),
);
// Normalize user-provided thinking level strings to the canonical enum.
export function normalizeThinkLevel(
raw?: string | null,
@@ -32,10 +53,49 @@ export function normalizeThinkLevel(
].includes(key)
)
return "high";
if (["xhigh", "x-high", "x_high"].includes(key)) return "xhigh";
if (["think"].includes(key)) return "minimal";
return undefined;
}
export function supportsXHighThinking(
provider?: string | null,
model?: string | null,
): boolean {
const modelKey = model?.trim().toLowerCase();
if (!modelKey) return false;
const providerKey = provider?.trim().toLowerCase();
if (providerKey) {
return XHIGH_MODEL_SET.has(`${providerKey}/${modelKey}`);
}
return XHIGH_MODEL_IDS.has(modelKey);
}
export function listThinkingLevels(
provider?: string | null,
model?: string | null,
): ThinkLevel[] {
const levels: ThinkLevel[] = ["off", "minimal", "low", "medium", "high"];
if (supportsXHighThinking(provider, model)) levels.push("xhigh");
return levels;
}
export function formatThinkingLevels(
provider?: string | null,
model?: string | null,
separator = ", ",
): string {
return listThinkingLevels(provider, model).join(separator);
}
export function formatXHighModelHint(): string {
if (XHIGH_MODEL_REFS.length === 1) return XHIGH_MODEL_REFS[0];
if (XHIGH_MODEL_REFS.length === 2) {
return `${XHIGH_MODEL_REFS[0]} or ${XHIGH_MODEL_REFS[1]}`;
}
return `${XHIGH_MODEL_REFS.slice(0, -1).join(", ")} or ${XHIGH_MODEL_REFS[XHIGH_MODEL_REFS.length - 1]}`;
}
// Normalize verbose flags used to toggle agent verbosity.
export function normalizeVerboseLevel(
raw?: string | null,

View File

@@ -28,8 +28,11 @@ import { hasNonzeroUsage } from "../agents/usage.js";
import { ensureAgentWorkspace } from "../agents/workspace.js";
import type { MsgContext } from "../auto-reply/templating.js";
import {
formatThinkingLevels,
formatXHighModelHint,
normalizeThinkLevel,
normalizeVerboseLevel,
supportsXHighThinking,
type ThinkLevel,
type VerboseLevel,
} from "../auto-reply/thinking.js";
@@ -214,17 +217,26 @@ export async function agentCommand(
ensureBootstrapFiles: !agentCfg?.skipBootstrap,
});
const workspaceDir = workspace.dir;
const configuredModel = resolveConfiguredModelRef({
cfg,
defaultProvider: DEFAULT_PROVIDER,
defaultModel: DEFAULT_MODEL,
});
const thinkingLevelsHint = formatThinkingLevels(
configuredModel.provider,
configuredModel.model,
);
const thinkOverride = normalizeThinkLevel(opts.thinking);
const thinkOnce = normalizeThinkLevel(opts.thinkingOnce);
if (opts.thinking && !thinkOverride) {
throw new Error(
"Invalid thinking level. Use one of: off, minimal, low, medium, high.",
`Invalid thinking level. Use one of: ${thinkingLevelsHint}.`,
);
}
if (opts.thinkingOnce && !thinkOnce) {
throw new Error(
"Invalid one-shot thinking level. Use one of: off, minimal, low, medium, high.",
`Invalid one-shot thinking level. Use one of: ${thinkingLevelsHint}.`,
);
}
@@ -423,6 +435,29 @@ export async function agentCommand(
catalog: catalogForThinking,
});
}
if (
resolvedThinkLevel === "xhigh" &&
!supportsXHighThinking(provider, model)
) {
const explicitThink = Boolean(thinkOnce || thinkOverride);
if (explicitThink) {
throw new Error(
`Thinking level "xhigh" is only supported for ${formatXHighModelHint()}.`,
);
}
resolvedThinkLevel = "high";
if (
sessionEntry &&
sessionStore &&
sessionKey &&
sessionEntry.thinkingLevel === "xhigh"
) {
sessionEntry.thinkingLevel = "high";
sessionEntry.updatedAt = Date.now();
sessionStore[sessionKey] = sessionEntry;
await saveSessionStore(storePath, sessionStore);
}
}
const sessionFile = resolveSessionFilePath(sessionId, sessionEntry, {
agentId: sessionAgentId,
});

View File

@@ -1620,7 +1620,7 @@ export type AgentDefaultsConfig = {
/** Vector memory search configuration (per-agent overrides supported). */
memorySearch?: MemorySearchConfig;
/** Default thinking level when no /think directive is present. */
thinkingDefault?: "off" | "minimal" | "low" | "medium" | "high";
thinkingDefault?: "off" | "minimal" | "low" | "medium" | "high" | "xhigh";
/** Default verbose level when no /verbose directive is present. */
verboseDefault?: "off" | "on";
/** Default elevated level when no /elevated directive is present. */

View File

@@ -1242,6 +1242,7 @@ const AgentDefaultsSchema = z
z.literal("low"),
z.literal("medium"),
z.literal("high"),
z.literal("xhigh"),
])
.optional(),
verboseDefault: z.union([z.literal("off"), z.literal("on")]).optional(),

View File

@@ -31,7 +31,11 @@ import {
DEFAULT_HEARTBEAT_ACK_MAX_CHARS,
stripHeartbeatToken,
} from "../auto-reply/heartbeat.js";
import { normalizeThinkLevel } from "../auto-reply/thinking.js";
import {
formatXHighModelHint,
normalizeThinkLevel,
supportsXHighThinking,
} from "../auto-reply/thinking.js";
import type { CliDeps } from "../cli/deps.js";
import type { ClawdbotConfig } from "../config/config.js";
import {
@@ -366,6 +370,11 @@ export async function runCronIsolatedAgentTurn(params: {
catalog: await loadCatalog(),
});
}
if (thinkLevel === "xhigh" && !supportsXHighThinking(provider, model)) {
throw new Error(
`Thinking level "xhigh" is only supported for ${formatXHighModelHint()}.`,
);
}
const timeoutMs = resolveAgentTimeoutMs({
cfg: cfgWithAgentDefaults,

View File

@@ -8,10 +8,13 @@ import {
} from "../agents/model-selection.js";
import { normalizeGroupActivation } from "../auto-reply/group-activation.js";
import {
formatThinkingLevels,
formatXHighModelHint,
normalizeElevatedLevel,
normalizeReasoningLevel,
normalizeThinkLevel,
normalizeUsageDisplay,
supportsXHighThinking,
} from "../auto-reply/thinking.js";
import type { ClawdbotConfig } from "../config/config.js";
import type { SessionEntry } from "../config/sessions.js";
@@ -95,8 +98,17 @@ export async function applySessionsPatchToStore(params: {
} else if (raw !== undefined) {
const normalized = normalizeThinkLevel(String(raw));
if (!normalized) {
const resolvedDefault = resolveConfiguredModelRef({
cfg,
defaultProvider: DEFAULT_PROVIDER,
defaultModel: DEFAULT_MODEL,
});
const hintProvider =
existing?.providerOverride?.trim() || resolvedDefault.provider;
const hintModel =
existing?.modelOverride?.trim() || resolvedDefault.model;
return invalid(
"invalid thinkingLevel (use off|minimal|low|medium|high)",
`invalid thinkingLevel (use ${formatThinkingLevels(hintProvider, hintModel, "|")})`,
);
}
if (normalized === "off") delete next.thinkingLevel;
@@ -196,6 +208,24 @@ export async function applySessionsPatchToStore(params: {
}
}
if (next.thinkingLevel === "xhigh") {
const resolvedDefault = resolveConfiguredModelRef({
cfg,
defaultProvider: DEFAULT_PROVIDER,
defaultModel: DEFAULT_MODEL,
});
const effectiveProvider = next.providerOverride ?? resolvedDefault.provider;
const effectiveModel = next.modelOverride ?? resolvedDefault.model;
if (!supportsXHighThinking(effectiveProvider, effectiveModel)) {
if ("thinkingLevel" in patch) {
return invalid(
`thinkingLevel "xhigh" is only supported for ${formatXHighModelHint()}`,
);
}
next.thinkingLevel = "high";
}
}
if ("sendPolicy" in patch) {
const raw = patch.sendPolicy;
if (raw === null) {

View File

@@ -1,6 +1,9 @@
import type { SlashCommand } from "@mariozechner/pi-tui";
import {
formatThinkingLevels,
listThinkingLevels,
} from "../auto-reply/thinking.js";
const THINK_LEVELS = ["off", "minimal", "low", "medium", "high"];
const VERBOSE_LEVELS = ["on", "off"];
const REASONING_LEVELS = ["on", "off"];
const ELEVATED_LEVELS = ["on", "off"];
@@ -12,6 +15,11 @@ export type ParsedCommand = {
args: string;
};
export type SlashCommandOptions = {
provider?: string;
model?: string;
};
const COMMAND_ALIASES: Record<string, string> = {
elev: "elevated",
};
@@ -27,7 +35,10 @@ export function parseCommand(input: string): ParsedCommand {
};
}
export function getSlashCommands(): SlashCommand[] {
export function getSlashCommands(
options: SlashCommandOptions = {},
): SlashCommand[] {
const thinkLevels = listThinkingLevels(options.provider, options.model);
return [
{ name: "help", description: "Show slash command help" },
{ name: "status", description: "Show gateway status summary" },
@@ -44,9 +55,9 @@ export function getSlashCommands(): SlashCommand[] {
name: "think",
description: "Set thinking level",
getArgumentCompletions: (prefix) =>
THINK_LEVELS.filter((v) => v.startsWith(prefix.toLowerCase())).map(
(value) => ({ value, label: value }),
),
thinkLevels
.filter((v) => v.startsWith(prefix.toLowerCase()))
.map((value) => ({ value, label: value })),
},
{
name: "verbose",
@@ -105,7 +116,12 @@ export function getSlashCommands(): SlashCommand[] {
];
}
export function helpText(): string {
export function helpText(options: SlashCommandOptions = {}): string {
const thinkLevels = formatThinkingLevels(
options.provider,
options.model,
"|",
);
return [
"Slash commands:",
"/help",
@@ -113,7 +129,7 @@ export function helpText(): string {
"/agent <id> (or /agents)",
"/session <key> (or /sessions)",
"/model <provider/model> (or /models)",
"/think <off|minimal|low|medium|high>",
`/think <${thinkLevels}>`,
"/verbose <on|off>",
"/reasoning <on|off>",
"/cost <on|off>",

View File

@@ -7,7 +7,10 @@ import {
TUI,
} from "@mariozechner/pi-tui";
import { resolveDefaultAgentId } from "../agents/agent-scope.js";
import { normalizeUsageDisplay } from "../auto-reply/thinking.js";
import {
formatThinkingLevels,
normalizeUsageDisplay,
} from "../auto-reply/thinking.js";
import { loadConfig } from "../config/config.js";
import { formatAge } from "../infra/provider-summary.js";
import {
@@ -239,6 +242,18 @@ export async function runTui(opts: TuiOptions) {
root.addChild(footer);
root.addChild(editor);
const updateAutocompleteProvider = () => {
editor.setAutocompleteProvider(
new CombinedAutocompleteProvider(
getSlashCommands({
provider: sessionInfo.modelProvider,
model: sessionInfo.model,
}),
process.cwd(),
),
);
};
const tui = new TUI(new ProcessTerminal());
tui.addChild(root);
tui.setFocus(editor);
@@ -524,6 +539,7 @@ export async function runTui(opts: TuiOptions) {
} catch (err) {
chatLog.addSystem(`sessions list failed: ${String(err)}`);
}
updateAutocompleteProvider();
updateFooter();
tui.requestRender();
};
@@ -861,7 +877,12 @@ export async function runTui(opts: TuiOptions) {
if (!name) return;
switch (name) {
case "help":
chatLog.addSystem(helpText());
chatLog.addSystem(
helpText({
provider: sessionInfo.modelProvider,
model: sessionInfo.model,
}),
);
break;
case "status":
try {
@@ -921,7 +942,12 @@ export async function runTui(opts: TuiOptions) {
break;
case "think":
if (!args) {
chatLog.addSystem("usage: /think <off|minimal|low|medium|high>");
const levels = formatThinkingLevels(
sessionInfo.modelProvider,
sessionInfo.model,
"|",
);
chatLog.addSystem(`usage: /think <${levels}>`);
break;
}
try {
@@ -1071,9 +1097,7 @@ export async function runTui(opts: TuiOptions) {
tui.requestRender();
};
editor.setAutocompleteProvider(
new CombinedAutocompleteProvider(getSlashCommands(), process.cwd()),
);
updateAutocompleteProvider();
editor.onSubmit = (text) => {
const value = text.trim();
editor.setText("");