fix: default low thinking for reasoning models
This commit is contained in:
@@ -380,6 +380,7 @@ export const ModelChoiceSchema = Type.Object(
|
||||
name: NonEmptyString,
|
||||
provider: NonEmptyString,
|
||||
contextWindow: Type.Optional(Type.Integer({ minimum: 1 })),
|
||||
reasoning: Type.Optional(Type.Boolean()),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
);
|
||||
|
||||
@@ -82,6 +82,7 @@ const piSdkMock = vi.hoisted(() => ({
|
||||
name?: string;
|
||||
provider: string;
|
||||
contextWindow?: number;
|
||||
reasoning?: boolean;
|
||||
}>,
|
||||
}));
|
||||
const cronIsolatedRun = vi.hoisted(() =>
|
||||
@@ -2807,6 +2808,57 @@ describe("gateway server", () => {
|
||||
await server.close();
|
||||
});
|
||||
|
||||
test("chat.history defaults thinking to low for reasoning-capable models", async () => {
|
||||
piSdkMock.enabled = true;
|
||||
piSdkMock.models = [
|
||||
{
|
||||
id: "claude-opus-4-5",
|
||||
name: "Opus 4.5",
|
||||
provider: "anthropic",
|
||||
reasoning: true,
|
||||
},
|
||||
];
|
||||
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdis-gw-"));
|
||||
testSessionStorePath = path.join(dir, "sessions.json");
|
||||
await fs.writeFile(
|
||||
testSessionStorePath,
|
||||
JSON.stringify(
|
||||
{
|
||||
main: {
|
||||
sessionId: "sess-main",
|
||||
updatedAt: Date.now(),
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
"utf-8",
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(dir, "sess-main.jsonl"),
|
||||
JSON.stringify({
|
||||
message: {
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "hello" }],
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
}),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const { server, ws } = await startServerWithClient();
|
||||
await connectOk(ws);
|
||||
|
||||
const res = await rpcReq<{ thinkingLevel?: string }>(ws, "chat.history", {
|
||||
sessionKey: "main",
|
||||
});
|
||||
expect(res.ok).toBe(true);
|
||||
expect(res.payload?.thinkingLevel).toBe("low");
|
||||
|
||||
ws.close();
|
||||
await server.close();
|
||||
});
|
||||
|
||||
test("chat.history caps payload bytes", { timeout: 15_000 }, async () => {
|
||||
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdis-gw-"));
|
||||
testSessionStorePath = path.join(dir, "sessions.json");
|
||||
|
||||
@@ -26,6 +26,7 @@ import {
|
||||
modelKey,
|
||||
resolveConfiguredModelRef,
|
||||
resolveModelRefFromString,
|
||||
resolveThinkingDefault,
|
||||
} from "../agents/model-selection.js";
|
||||
import { installSkill } from "../agents/skills-install.js";
|
||||
import { buildWorkspaceSkillStatus } from "../agents/skills-status.js";
|
||||
@@ -925,6 +926,25 @@ function getSessionDefaults(cfg: ClawdisConfig): GatewaySessionsDefaults {
|
||||
};
|
||||
}
|
||||
|
||||
function resolveSessionModelRef(
|
||||
cfg: ClawdisConfig,
|
||||
entry?: SessionEntry,
|
||||
): { provider: string; model: string } {
|
||||
const resolved = resolveConfiguredModelRef({
|
||||
cfg,
|
||||
defaultProvider: DEFAULT_PROVIDER,
|
||||
defaultModel: DEFAULT_MODEL,
|
||||
});
|
||||
let provider = resolved.provider;
|
||||
let model = resolved.model;
|
||||
const storedModelOverride = entry?.modelOverride?.trim();
|
||||
if (storedModelOverride) {
|
||||
provider = entry?.providerOverride?.trim() || provider;
|
||||
model = storedModelOverride;
|
||||
}
|
||||
return { provider, model };
|
||||
}
|
||||
|
||||
function listSessionsFromStore(params: {
|
||||
cfg: ClawdisConfig;
|
||||
storePath: string;
|
||||
@@ -3283,7 +3303,7 @@ export async function startGatewayServer(
|
||||
sessionKey: string;
|
||||
limit?: number;
|
||||
};
|
||||
const { storePath, entry } = loadSessionEntry(sessionKey);
|
||||
const { cfg, storePath, entry } = loadSessionEntry(sessionKey);
|
||||
const sessionId = entry?.sessionId;
|
||||
const rawMessages =
|
||||
sessionId && storePath
|
||||
@@ -3296,10 +3316,22 @@ export async function startGatewayServer(
|
||||
sliced,
|
||||
MAX_CHAT_HISTORY_MESSAGES_BYTES,
|
||||
).items;
|
||||
const thinkingLevel =
|
||||
entry?.thinkingLevel ??
|
||||
loadConfig().agent?.thinkingDefault ??
|
||||
"off";
|
||||
let thinkingLevel = entry?.thinkingLevel;
|
||||
if (!thinkingLevel) {
|
||||
const configured = cfg.agent?.thinkingDefault;
|
||||
if (configured) {
|
||||
thinkingLevel = configured;
|
||||
} else {
|
||||
const { provider, model } = resolveSessionModelRef(cfg, entry);
|
||||
const catalog = await loadGatewayModelCatalog();
|
||||
thinkingLevel = resolveThinkingDefault({
|
||||
cfg,
|
||||
provider,
|
||||
model,
|
||||
catalog,
|
||||
});
|
||||
}
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
payloadJSON: JSON.stringify({
|
||||
@@ -4668,7 +4700,7 @@ export async function startGatewayServer(
|
||||
sessionKey: string;
|
||||
limit?: number;
|
||||
};
|
||||
const { storePath, entry } = loadSessionEntry(sessionKey);
|
||||
const { cfg, storePath, entry } = loadSessionEntry(sessionKey);
|
||||
const sessionId = entry?.sessionId;
|
||||
const rawMessages =
|
||||
sessionId && storePath
|
||||
@@ -4687,10 +4719,22 @@ export async function startGatewayServer(
|
||||
sliced,
|
||||
MAX_CHAT_HISTORY_MESSAGES_BYTES,
|
||||
).items;
|
||||
const thinkingLevel =
|
||||
entry?.thinkingLevel ??
|
||||
loadConfig().agent?.thinkingDefault ??
|
||||
"off";
|
||||
let thinkingLevel = entry?.thinkingLevel;
|
||||
if (!thinkingLevel) {
|
||||
const configured = cfg.agent?.thinkingDefault;
|
||||
if (configured) {
|
||||
thinkingLevel = configured;
|
||||
} else {
|
||||
const { provider, model } = resolveSessionModelRef(cfg, entry);
|
||||
const catalog = await loadGatewayModelCatalog();
|
||||
thinkingLevel = resolveThinkingDefault({
|
||||
cfg,
|
||||
provider,
|
||||
model,
|
||||
catalog,
|
||||
});
|
||||
}
|
||||
}
|
||||
respond(true, {
|
||||
sessionKey,
|
||||
sessionId,
|
||||
|
||||
Reference in New Issue
Block a user