refactor(agents): split tools + PI subscribe
This commit is contained in:
457
src/agents/bash-tools.exec.ts
Normal file
457
src/agents/bash-tools.exec.ts
Normal file
@@ -0,0 +1,457 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import { randomUUID } from "node:crypto";
|
||||
import type { AgentTool, AgentToolResult } from "@mariozechner/pi-agent-core";
|
||||
import { Type } from "@sinclair/typebox";
|
||||
|
||||
import { logInfo } from "../logger.js";
|
||||
import {
|
||||
addSession,
|
||||
appendOutput,
|
||||
markBackgrounded,
|
||||
markExited,
|
||||
} from "./bash-process-registry.js";
|
||||
import type { BashSandboxConfig } from "./bash-tools.shared.js";
|
||||
import {
|
||||
buildDockerExecArgs,
|
||||
buildSandboxEnv,
|
||||
chunkString,
|
||||
clampNumber,
|
||||
coerceEnv,
|
||||
killSession,
|
||||
readEnvInt,
|
||||
resolveSandboxWorkdir,
|
||||
resolveWorkdir,
|
||||
truncateMiddle,
|
||||
} from "./bash-tools.shared.js";
|
||||
import { getShellConfig, sanitizeBinaryOutput } from "./shell-utils.js";
|
||||
|
||||
const DEFAULT_MAX_OUTPUT = clampNumber(
|
||||
readEnvInt("PI_BASH_MAX_OUTPUT_CHARS"),
|
||||
30_000,
|
||||
1_000,
|
||||
150_000,
|
||||
);
|
||||
const DEFAULT_PATH =
|
||||
process.env.PATH ??
|
||||
"/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin";
|
||||
|
||||
export type ExecToolDefaults = {
|
||||
backgroundMs?: number;
|
||||
timeoutSec?: number;
|
||||
sandbox?: BashSandboxConfig;
|
||||
elevated?: ExecElevatedDefaults;
|
||||
allowBackground?: boolean;
|
||||
scopeKey?: string;
|
||||
cwd?: string;
|
||||
};
|
||||
|
||||
export type { BashSandboxConfig } from "./bash-tools.shared.js";
|
||||
|
||||
export type ExecElevatedDefaults = {
|
||||
enabled: boolean;
|
||||
allowed: boolean;
|
||||
defaultLevel: "on" | "off";
|
||||
};
|
||||
|
||||
const execSchema = Type.Object({
|
||||
command: Type.String({ description: "Shell command to execute" }),
|
||||
workdir: Type.Optional(
|
||||
Type.String({ description: "Working directory (defaults to cwd)" }),
|
||||
),
|
||||
env: Type.Optional(Type.Record(Type.String(), Type.String())),
|
||||
yieldMs: Type.Optional(
|
||||
Type.Number({
|
||||
description: "Milliseconds to wait before backgrounding (default 10000)",
|
||||
}),
|
||||
),
|
||||
background: Type.Optional(
|
||||
Type.Boolean({ description: "Run in background immediately" }),
|
||||
),
|
||||
timeout: Type.Optional(
|
||||
Type.Number({
|
||||
description: "Timeout in seconds (optional, kills process on expiry)",
|
||||
}),
|
||||
),
|
||||
elevated: Type.Optional(
|
||||
Type.Boolean({
|
||||
description: "Run on the host with elevated permissions (if allowed)",
|
||||
}),
|
||||
),
|
||||
});
|
||||
|
||||
export type ExecToolDetails =
|
||||
| {
|
||||
status: "running";
|
||||
sessionId: string;
|
||||
pid?: number;
|
||||
startedAt: number;
|
||||
cwd?: string;
|
||||
tail?: string;
|
||||
}
|
||||
| {
|
||||
status: "completed" | "failed";
|
||||
exitCode: number | null;
|
||||
durationMs: number;
|
||||
aggregated: string;
|
||||
cwd?: string;
|
||||
};
|
||||
|
||||
export function createExecTool(
|
||||
defaults?: ExecToolDefaults,
|
||||
// biome-ignore lint/suspicious/noExplicitAny: TypeBox schema type from pi-agent-core uses a different module instance.
|
||||
): AgentTool<any, ExecToolDetails> {
|
||||
const defaultBackgroundMs = clampNumber(
|
||||
defaults?.backgroundMs ?? readEnvInt("PI_BASH_YIELD_MS"),
|
||||
10_000,
|
||||
10,
|
||||
120_000,
|
||||
);
|
||||
const allowBackground = defaults?.allowBackground ?? true;
|
||||
const defaultTimeoutSec =
|
||||
typeof defaults?.timeoutSec === "number" && defaults.timeoutSec > 0
|
||||
? defaults.timeoutSec
|
||||
: 1800;
|
||||
|
||||
return {
|
||||
name: "exec",
|
||||
label: "exec",
|
||||
description:
|
||||
"Execute shell commands with background continuation. Use yieldMs/background to continue later via process tool. For real TTY mode, use the tmux skill.",
|
||||
parameters: execSchema,
|
||||
execute: async (_toolCallId, args, signal, onUpdate) => {
|
||||
const params = args as {
|
||||
command: string;
|
||||
workdir?: string;
|
||||
env?: Record<string, string>;
|
||||
yieldMs?: number;
|
||||
background?: boolean;
|
||||
timeout?: number;
|
||||
elevated?: boolean;
|
||||
};
|
||||
|
||||
if (!params.command) {
|
||||
throw new Error("Provide a command to start.");
|
||||
}
|
||||
|
||||
const maxOutput = DEFAULT_MAX_OUTPUT;
|
||||
const startedAt = Date.now();
|
||||
const sessionId = randomUUID();
|
||||
const warnings: string[] = [];
|
||||
const backgroundRequested = params.background === true;
|
||||
const yieldRequested = typeof params.yieldMs === "number";
|
||||
if (!allowBackground && (backgroundRequested || yieldRequested)) {
|
||||
warnings.push(
|
||||
"Warning: background execution is disabled; running synchronously.",
|
||||
);
|
||||
}
|
||||
const yieldWindow = allowBackground
|
||||
? backgroundRequested
|
||||
? 0
|
||||
: clampNumber(
|
||||
params.yieldMs ?? defaultBackgroundMs,
|
||||
defaultBackgroundMs,
|
||||
10,
|
||||
120_000,
|
||||
)
|
||||
: null;
|
||||
const elevatedDefaults = defaults?.elevated;
|
||||
const elevatedDefaultOn =
|
||||
elevatedDefaults?.defaultLevel === "on" &&
|
||||
elevatedDefaults.enabled &&
|
||||
elevatedDefaults.allowed;
|
||||
const elevatedRequested =
|
||||
typeof params.elevated === "boolean"
|
||||
? params.elevated
|
||||
: elevatedDefaultOn;
|
||||
if (elevatedRequested) {
|
||||
if (!elevatedDefaults?.enabled || !elevatedDefaults.allowed) {
|
||||
const runtime = defaults?.sandbox ? "sandboxed" : "direct";
|
||||
const gates: string[] = [];
|
||||
if (!elevatedDefaults?.enabled) {
|
||||
gates.push(
|
||||
"enabled (tools.elevated.enabled / agents.list[].tools.elevated.enabled)",
|
||||
);
|
||||
} else {
|
||||
gates.push(
|
||||
"allowFrom (tools.elevated.allowFrom.<provider> / agents.list[].tools.elevated.allowFrom.<provider>)",
|
||||
);
|
||||
}
|
||||
throw new Error(
|
||||
[
|
||||
`elevated is not available right now (runtime=${runtime}).`,
|
||||
`Failing gates: ${gates.join(", ")}`,
|
||||
"Fix-it keys:",
|
||||
"- tools.elevated.enabled",
|
||||
"- tools.elevated.allowFrom.<provider>",
|
||||
"- agents.list[].tools.elevated.enabled",
|
||||
"- agents.list[].tools.elevated.allowFrom.<provider>",
|
||||
].join("\n"),
|
||||
);
|
||||
}
|
||||
logInfo(
|
||||
`exec: elevated command (${sessionId.slice(0, 8)}) ${truncateMiddle(
|
||||
params.command,
|
||||
120,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const sandbox = elevatedRequested ? undefined : defaults?.sandbox;
|
||||
const rawWorkdir =
|
||||
params.workdir?.trim() || defaults?.cwd || process.cwd();
|
||||
let workdir = rawWorkdir;
|
||||
let containerWorkdir = sandbox?.containerWorkdir;
|
||||
if (sandbox) {
|
||||
const resolved = await resolveSandboxWorkdir({
|
||||
workdir: rawWorkdir,
|
||||
sandbox,
|
||||
warnings,
|
||||
});
|
||||
workdir = resolved.hostWorkdir;
|
||||
containerWorkdir = resolved.containerWorkdir;
|
||||
} else {
|
||||
workdir = resolveWorkdir(rawWorkdir, warnings);
|
||||
}
|
||||
|
||||
const { shell, args: shellArgs } = getShellConfig();
|
||||
const baseEnv = coerceEnv(process.env);
|
||||
const mergedEnv = params.env ? { ...baseEnv, ...params.env } : baseEnv;
|
||||
const env = sandbox
|
||||
? buildSandboxEnv({
|
||||
defaultPath: DEFAULT_PATH,
|
||||
paramsEnv: params.env,
|
||||
sandboxEnv: sandbox.env,
|
||||
containerWorkdir: containerWorkdir ?? sandbox.containerWorkdir,
|
||||
})
|
||||
: mergedEnv;
|
||||
const child = sandbox
|
||||
? spawn(
|
||||
"docker",
|
||||
buildDockerExecArgs({
|
||||
containerName: sandbox.containerName,
|
||||
command: params.command,
|
||||
workdir: containerWorkdir ?? sandbox.containerWorkdir,
|
||||
env,
|
||||
tty: false,
|
||||
}),
|
||||
{
|
||||
cwd: workdir,
|
||||
env: process.env,
|
||||
detached: process.platform !== "win32",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
windowsHide: true,
|
||||
},
|
||||
)
|
||||
: spawn(shell, [...shellArgs, params.command], {
|
||||
cwd: workdir,
|
||||
env,
|
||||
detached: process.platform !== "win32",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
windowsHide: true,
|
||||
});
|
||||
|
||||
const session = {
|
||||
id: sessionId,
|
||||
command: params.command,
|
||||
scopeKey: defaults?.scopeKey,
|
||||
child,
|
||||
pid: child?.pid,
|
||||
startedAt,
|
||||
cwd: workdir,
|
||||
maxOutputChars: maxOutput,
|
||||
totalOutputChars: 0,
|
||||
pendingStdout: [],
|
||||
pendingStderr: [],
|
||||
aggregated: "",
|
||||
tail: "",
|
||||
exited: false,
|
||||
exitCode: undefined as number | null | undefined,
|
||||
exitSignal: undefined as NodeJS.Signals | number | null | undefined,
|
||||
truncated: false,
|
||||
backgrounded: false,
|
||||
};
|
||||
addSession(session);
|
||||
|
||||
let settled = false;
|
||||
let yielded = false;
|
||||
let yieldTimer: NodeJS.Timeout | null = null;
|
||||
let timeoutTimer: NodeJS.Timeout | null = null;
|
||||
let timedOut = false;
|
||||
|
||||
const settle = (fn: () => void) => {
|
||||
if (settled) return;
|
||||
settled = true;
|
||||
fn();
|
||||
};
|
||||
|
||||
const onAbort = () => {
|
||||
killSession(session);
|
||||
};
|
||||
|
||||
if (signal?.aborted) onAbort();
|
||||
else if (signal) {
|
||||
signal.addEventListener("abort", onAbort, { once: true });
|
||||
}
|
||||
|
||||
const effectiveTimeout =
|
||||
typeof params.timeout === "number" ? params.timeout : defaultTimeoutSec;
|
||||
if (effectiveTimeout > 0) {
|
||||
timeoutTimer = setTimeout(() => {
|
||||
timedOut = true;
|
||||
onAbort();
|
||||
}, effectiveTimeout * 1000);
|
||||
}
|
||||
|
||||
const emitUpdate = () => {
|
||||
if (!onUpdate) return;
|
||||
const tailText = session.tail || session.aggregated;
|
||||
const warningText = warnings.length ? `${warnings.join("\n")}\n\n` : "";
|
||||
onUpdate({
|
||||
content: [{ type: "text", text: warningText + (tailText || "") }],
|
||||
details: {
|
||||
status: "running",
|
||||
sessionId,
|
||||
pid: session.pid ?? undefined,
|
||||
startedAt,
|
||||
cwd: session.cwd,
|
||||
tail: session.tail,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
child.stdout.on("data", (data) => {
|
||||
const str = sanitizeBinaryOutput(data.toString());
|
||||
for (const chunk of chunkString(str)) {
|
||||
appendOutput(session, "stdout", chunk);
|
||||
emitUpdate();
|
||||
}
|
||||
});
|
||||
|
||||
child.stderr.on("data", (data) => {
|
||||
const str = sanitizeBinaryOutput(data.toString());
|
||||
for (const chunk of chunkString(str)) {
|
||||
appendOutput(session, "stderr", chunk);
|
||||
emitUpdate();
|
||||
}
|
||||
});
|
||||
|
||||
return new Promise<AgentToolResult<ExecToolDetails>>(
|
||||
(resolve, reject) => {
|
||||
const resolveRunning = () => {
|
||||
settle(() =>
|
||||
resolve({
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text:
|
||||
`${warnings.length ? `${warnings.join("\n")}\n\n` : ""}` +
|
||||
`Command still running (session ${sessionId}, pid ${session.pid ?? "n/a"}). ` +
|
||||
"Use process (list/poll/log/write/kill/clear/remove) for follow-up.",
|
||||
},
|
||||
],
|
||||
details: {
|
||||
status: "running",
|
||||
sessionId,
|
||||
pid: session.pid ?? undefined,
|
||||
startedAt,
|
||||
cwd: session.cwd,
|
||||
tail: session.tail,
|
||||
},
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
const onYieldNow = () => {
|
||||
if (yieldTimer) clearTimeout(yieldTimer);
|
||||
if (settled) return;
|
||||
yielded = true;
|
||||
markBackgrounded(session);
|
||||
resolveRunning();
|
||||
};
|
||||
|
||||
if (allowBackground && yieldWindow !== null) {
|
||||
if (yieldWindow === 0) {
|
||||
onYieldNow();
|
||||
} else {
|
||||
yieldTimer = setTimeout(() => {
|
||||
if (settled) return;
|
||||
yielded = true;
|
||||
markBackgrounded(session);
|
||||
resolveRunning();
|
||||
}, yieldWindow);
|
||||
}
|
||||
}
|
||||
|
||||
const handleExit = (
|
||||
code: number | null,
|
||||
exitSignal: NodeJS.Signals | number | null,
|
||||
) => {
|
||||
if (yieldTimer) clearTimeout(yieldTimer);
|
||||
if (timeoutTimer) clearTimeout(timeoutTimer);
|
||||
const durationMs = Date.now() - startedAt;
|
||||
const wasSignal = exitSignal != null;
|
||||
const isSuccess =
|
||||
code === 0 && !wasSignal && !signal?.aborted && !timedOut;
|
||||
const status: "completed" | "failed" = isSuccess
|
||||
? "completed"
|
||||
: "failed";
|
||||
markExited(session, code, exitSignal, status);
|
||||
|
||||
if (yielded || session.backgrounded) return;
|
||||
|
||||
const aggregated = session.aggregated.trim();
|
||||
if (!isSuccess) {
|
||||
const reason = timedOut
|
||||
? `Command timed out after ${effectiveTimeout} seconds`
|
||||
: wasSignal && exitSignal
|
||||
? `Command aborted by signal ${exitSignal}`
|
||||
: code === null
|
||||
? "Command aborted before exit code was captured"
|
||||
: `Command exited with code ${code}`;
|
||||
const message = aggregated
|
||||
? `${aggregated}\n\n${reason}`
|
||||
: reason;
|
||||
settle(() => reject(new Error(message)));
|
||||
return;
|
||||
}
|
||||
|
||||
settle(() =>
|
||||
resolve({
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text:
|
||||
`${warnings.length ? `${warnings.join("\n")}\n\n` : ""}` +
|
||||
(aggregated || "(no output)"),
|
||||
},
|
||||
],
|
||||
details: {
|
||||
status: "completed",
|
||||
exitCode: code ?? 0,
|
||||
durationMs,
|
||||
aggregated,
|
||||
cwd: session.cwd,
|
||||
},
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
// `exit` can fire before stdio fully flushes (notably on Windows).
|
||||
// `close` waits for streams to close, so aggregated output is complete.
|
||||
child.once("close", (code, exitSignal) => {
|
||||
handleExit(code, exitSignal);
|
||||
});
|
||||
|
||||
child.once("error", (err) => {
|
||||
if (yieldTimer) clearTimeout(yieldTimer);
|
||||
if (timeoutTimer) clearTimeout(timeoutTimer);
|
||||
markExited(session, null, null, "failed");
|
||||
settle(() => reject(err));
|
||||
});
|
||||
},
|
||||
);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const execTool = createExecTool();
|
||||
469
src/agents/bash-tools.process.ts
Normal file
469
src/agents/bash-tools.process.ts
Normal file
@@ -0,0 +1,469 @@
|
||||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import { Type } from "@sinclair/typebox";
|
||||
|
||||
import {
|
||||
deleteSession,
|
||||
drainSession,
|
||||
getFinishedSession,
|
||||
getSession,
|
||||
listFinishedSessions,
|
||||
listRunningSessions,
|
||||
markExited,
|
||||
setJobTtlMs,
|
||||
} from "./bash-process-registry.js";
|
||||
import {
|
||||
deriveSessionName,
|
||||
formatDuration,
|
||||
killSession,
|
||||
pad,
|
||||
sliceLogLines,
|
||||
truncateMiddle,
|
||||
} from "./bash-tools.shared.js";
|
||||
|
||||
export type ProcessToolDefaults = {
|
||||
cleanupMs?: number;
|
||||
scopeKey?: string;
|
||||
};
|
||||
|
||||
const processSchema = Type.Object({
|
||||
action: Type.String({ description: "Process action" }),
|
||||
sessionId: Type.Optional(
|
||||
Type.String({ description: "Session id for actions other than list" }),
|
||||
),
|
||||
data: Type.Optional(Type.String({ description: "Data to write for write" })),
|
||||
eof: Type.Optional(Type.Boolean({ description: "Close stdin after write" })),
|
||||
offset: Type.Optional(Type.Number({ description: "Log offset" })),
|
||||
limit: Type.Optional(Type.Number({ description: "Log length" })),
|
||||
});
|
||||
|
||||
export function createProcessTool(
|
||||
defaults?: ProcessToolDefaults,
|
||||
// biome-ignore lint/suspicious/noExplicitAny: TypeBox schema type from pi-agent-core uses a different module instance.
|
||||
): AgentTool<any> {
|
||||
if (defaults?.cleanupMs !== undefined) {
|
||||
setJobTtlMs(defaults.cleanupMs);
|
||||
}
|
||||
const scopeKey = defaults?.scopeKey;
|
||||
const isInScope = (session?: { scopeKey?: string } | null) =>
|
||||
!scopeKey || session?.scopeKey === scopeKey;
|
||||
|
||||
return {
|
||||
name: "process",
|
||||
label: "process",
|
||||
description: "Manage running exec sessions: list, poll, log, write, kill.",
|
||||
parameters: processSchema,
|
||||
execute: async (_toolCallId, args) => {
|
||||
const params = args as {
|
||||
action: "list" | "poll" | "log" | "write" | "kill" | "clear" | "remove";
|
||||
sessionId?: string;
|
||||
data?: string;
|
||||
eof?: boolean;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
};
|
||||
|
||||
if (params.action === "list") {
|
||||
const running = listRunningSessions()
|
||||
.filter((s) => isInScope(s))
|
||||
.map((s) => ({
|
||||
sessionId: s.id,
|
||||
status: "running",
|
||||
pid: s.pid ?? undefined,
|
||||
startedAt: s.startedAt,
|
||||
runtimeMs: Date.now() - s.startedAt,
|
||||
cwd: s.cwd,
|
||||
command: s.command,
|
||||
name: deriveSessionName(s.command),
|
||||
tail: s.tail,
|
||||
truncated: s.truncated,
|
||||
}));
|
||||
const finished = listFinishedSessions()
|
||||
.filter((s) => isInScope(s))
|
||||
.map((s) => ({
|
||||
sessionId: s.id,
|
||||
status: s.status,
|
||||
startedAt: s.startedAt,
|
||||
endedAt: s.endedAt,
|
||||
runtimeMs: s.endedAt - s.startedAt,
|
||||
cwd: s.cwd,
|
||||
command: s.command,
|
||||
name: deriveSessionName(s.command),
|
||||
tail: s.tail,
|
||||
truncated: s.truncated,
|
||||
exitCode: s.exitCode ?? undefined,
|
||||
exitSignal: s.exitSignal ?? undefined,
|
||||
}));
|
||||
const lines = [...running, ...finished]
|
||||
.sort((a, b) => b.startedAt - a.startedAt)
|
||||
.map((s) => {
|
||||
const label = s.name
|
||||
? truncateMiddle(s.name, 80)
|
||||
: truncateMiddle(s.command, 120);
|
||||
return `${s.sessionId.slice(0, 8)} ${pad(
|
||||
s.status,
|
||||
9,
|
||||
)} ${formatDuration(s.runtimeMs)} :: ${label}`;
|
||||
});
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: lines.join("\n") || "No running or recent sessions.",
|
||||
},
|
||||
],
|
||||
details: { status: "completed", sessions: [...running, ...finished] },
|
||||
};
|
||||
}
|
||||
|
||||
if (!params.sessionId) {
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: "sessionId is required for this action." },
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
|
||||
const session = getSession(params.sessionId);
|
||||
const finished = getFinishedSession(params.sessionId);
|
||||
const scopedSession = isInScope(session) ? session : undefined;
|
||||
const scopedFinished = isInScope(finished) ? finished : undefined;
|
||||
|
||||
switch (params.action) {
|
||||
case "poll": {
|
||||
if (!scopedSession) {
|
||||
if (scopedFinished) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text:
|
||||
(scopedFinished.tail ||
|
||||
`(no output recorded${
|
||||
scopedFinished.truncated ? " — truncated to cap" : ""
|
||||
})`) +
|
||||
`\n\nProcess exited with ${
|
||||
scopedFinished.exitSignal
|
||||
? `signal ${scopedFinished.exitSignal}`
|
||||
: `code ${scopedFinished.exitCode ?? 0}`
|
||||
}.`,
|
||||
},
|
||||
],
|
||||
details: {
|
||||
status:
|
||||
scopedFinished.status === "completed"
|
||||
? "completed"
|
||||
: "failed",
|
||||
sessionId: params.sessionId,
|
||||
exitCode: scopedFinished.exitCode ?? undefined,
|
||||
aggregated: scopedFinished.aggregated,
|
||||
name: deriveSessionName(scopedFinished.command),
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `No session found for ${params.sessionId}`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
if (!scopedSession.backgrounded) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Session ${params.sessionId} is not backgrounded.`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
const { stdout, stderr } = drainSession(scopedSession);
|
||||
const exited = scopedSession.exited;
|
||||
const exitCode = scopedSession.exitCode ?? 0;
|
||||
const exitSignal = scopedSession.exitSignal ?? undefined;
|
||||
if (exited) {
|
||||
const status =
|
||||
exitCode === 0 && exitSignal == null ? "completed" : "failed";
|
||||
markExited(
|
||||
scopedSession,
|
||||
scopedSession.exitCode ?? null,
|
||||
scopedSession.exitSignal ?? null,
|
||||
status,
|
||||
);
|
||||
}
|
||||
const status = exited
|
||||
? exitCode === 0 && exitSignal == null
|
||||
? "completed"
|
||||
: "failed"
|
||||
: "running";
|
||||
const output = [stdout.trimEnd(), stderr.trimEnd()]
|
||||
.filter(Boolean)
|
||||
.join("\n")
|
||||
.trim();
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text:
|
||||
(output || "(no new output)") +
|
||||
(exited
|
||||
? `\n\nProcess exited with ${
|
||||
exitSignal ? `signal ${exitSignal}` : `code ${exitCode}`
|
||||
}.`
|
||||
: "\n\nProcess still running."),
|
||||
},
|
||||
],
|
||||
details: {
|
||||
status,
|
||||
sessionId: params.sessionId,
|
||||
exitCode: exited ? exitCode : undefined,
|
||||
aggregated: scopedSession.aggregated,
|
||||
name: deriveSessionName(scopedSession.command),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
case "log": {
|
||||
if (scopedSession) {
|
||||
if (!scopedSession.backgrounded) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Session ${params.sessionId} is not backgrounded.`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
const { slice, totalLines, totalChars } = sliceLogLines(
|
||||
scopedSession.aggregated,
|
||||
params.offset,
|
||||
params.limit,
|
||||
);
|
||||
return {
|
||||
content: [{ type: "text", text: slice || "(no output yet)" }],
|
||||
details: {
|
||||
status: scopedSession.exited ? "completed" : "running",
|
||||
sessionId: params.sessionId,
|
||||
total: totalLines,
|
||||
totalLines,
|
||||
totalChars,
|
||||
truncated: scopedSession.truncated,
|
||||
name: deriveSessionName(scopedSession.command),
|
||||
},
|
||||
};
|
||||
}
|
||||
if (scopedFinished) {
|
||||
const { slice, totalLines, totalChars } = sliceLogLines(
|
||||
scopedFinished.aggregated,
|
||||
params.offset,
|
||||
params.limit,
|
||||
);
|
||||
const status =
|
||||
scopedFinished.status === "completed" ? "completed" : "failed";
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: slice || "(no output recorded)" },
|
||||
],
|
||||
details: {
|
||||
status,
|
||||
sessionId: params.sessionId,
|
||||
total: totalLines,
|
||||
totalLines,
|
||||
totalChars,
|
||||
truncated: scopedFinished.truncated,
|
||||
exitCode: scopedFinished.exitCode ?? undefined,
|
||||
exitSignal: scopedFinished.exitSignal ?? undefined,
|
||||
name: deriveSessionName(scopedFinished.command),
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `No session found for ${params.sessionId}`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
|
||||
case "write": {
|
||||
if (!scopedSession) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `No active session found for ${params.sessionId}`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
if (!scopedSession.backgrounded) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Session ${params.sessionId} is not backgrounded.`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
if (
|
||||
!scopedSession.child?.stdin ||
|
||||
scopedSession.child.stdin.destroyed
|
||||
) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Session ${params.sessionId} stdin is not writable.`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
scopedSession.child?.stdin.write(params.data ?? "", (err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
});
|
||||
});
|
||||
if (params.eof) {
|
||||
scopedSession.child.stdin.end();
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Wrote ${(params.data ?? "").length} bytes to session ${params.sessionId}${
|
||||
params.eof ? " (stdin closed)" : ""
|
||||
}.`,
|
||||
},
|
||||
],
|
||||
details: {
|
||||
status: "running",
|
||||
sessionId: params.sessionId,
|
||||
name: scopedSession
|
||||
? deriveSessionName(scopedSession.command)
|
||||
: undefined,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
case "kill": {
|
||||
if (!scopedSession) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `No active session found for ${params.sessionId}`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
if (!scopedSession.backgrounded) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Session ${params.sessionId} is not backgrounded.`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
killSession(scopedSession);
|
||||
markExited(scopedSession, null, "SIGKILL", "failed");
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Killed session ${params.sessionId}.` },
|
||||
],
|
||||
details: {
|
||||
status: "failed",
|
||||
name: scopedSession
|
||||
? deriveSessionName(scopedSession.command)
|
||||
: undefined,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
case "clear": {
|
||||
if (scopedFinished) {
|
||||
deleteSession(params.sessionId);
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Cleared session ${params.sessionId}.` },
|
||||
],
|
||||
details: { status: "completed" },
|
||||
};
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `No finished session found for ${params.sessionId}`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
|
||||
case "remove": {
|
||||
if (scopedSession) {
|
||||
killSession(scopedSession);
|
||||
markExited(scopedSession, null, "SIGKILL", "failed");
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Removed session ${params.sessionId}.` },
|
||||
],
|
||||
details: {
|
||||
status: "failed",
|
||||
name: scopedSession
|
||||
? deriveSessionName(scopedSession.command)
|
||||
: undefined,
|
||||
},
|
||||
};
|
||||
}
|
||||
if (scopedFinished) {
|
||||
deleteSession(params.sessionId);
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Removed session ${params.sessionId}.` },
|
||||
],
|
||||
details: { status: "completed" },
|
||||
};
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `No session found for ${params.sessionId}`,
|
||||
},
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Unknown action ${params.action as string}` },
|
||||
],
|
||||
details: { status: "failed" },
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const processTool = createProcessTool();
|
||||
234
src/agents/bash-tools.shared.ts
Normal file
234
src/agents/bash-tools.shared.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
import type { ChildProcessWithoutNullStreams } from "node:child_process";
|
||||
import { existsSync, statSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import { homedir } from "node:os";
|
||||
import path from "node:path";
|
||||
|
||||
import { sliceUtf16Safe } from "../utils.js";
|
||||
import { assertSandboxPath } from "./sandbox-paths.js";
|
||||
import { killProcessTree } from "./shell-utils.js";
|
||||
|
||||
const CHUNK_LIMIT = 8 * 1024;
|
||||
|
||||
export type BashSandboxConfig = {
|
||||
containerName: string;
|
||||
workspaceDir: string;
|
||||
containerWorkdir: string;
|
||||
env?: Record<string, string>;
|
||||
};
|
||||
|
||||
export function buildSandboxEnv(params: {
|
||||
defaultPath: string;
|
||||
paramsEnv?: Record<string, string>;
|
||||
sandboxEnv?: Record<string, string>;
|
||||
containerWorkdir: string;
|
||||
}) {
|
||||
const env: Record<string, string> = {
|
||||
PATH: params.defaultPath,
|
||||
HOME: params.containerWorkdir,
|
||||
};
|
||||
for (const [key, value] of Object.entries(params.sandboxEnv ?? {})) {
|
||||
env[key] = value;
|
||||
}
|
||||
for (const [key, value] of Object.entries(params.paramsEnv ?? {})) {
|
||||
env[key] = value;
|
||||
}
|
||||
return env;
|
||||
}
|
||||
|
||||
export function coerceEnv(env?: NodeJS.ProcessEnv | Record<string, string>) {
|
||||
const record: Record<string, string> = {};
|
||||
if (!env) return record;
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
if (typeof value === "string") record[key] = value;
|
||||
}
|
||||
return record;
|
||||
}
|
||||
|
||||
export function buildDockerExecArgs(params: {
|
||||
containerName: string;
|
||||
command: string;
|
||||
workdir?: string;
|
||||
env: Record<string, string>;
|
||||
tty: boolean;
|
||||
}) {
|
||||
const args = ["exec", "-i"];
|
||||
if (params.tty) args.push("-t");
|
||||
if (params.workdir) {
|
||||
args.push("-w", params.workdir);
|
||||
}
|
||||
for (const [key, value] of Object.entries(params.env)) {
|
||||
args.push("-e", `${key}=${value}`);
|
||||
}
|
||||
args.push(params.containerName, "sh", "-lc", params.command);
|
||||
return args;
|
||||
}
|
||||
|
||||
export async function resolveSandboxWorkdir(params: {
|
||||
workdir: string;
|
||||
sandbox: BashSandboxConfig;
|
||||
warnings: string[];
|
||||
}) {
|
||||
const fallback = params.sandbox.workspaceDir;
|
||||
try {
|
||||
const resolved = await assertSandboxPath({
|
||||
filePath: params.workdir,
|
||||
cwd: process.cwd(),
|
||||
root: params.sandbox.workspaceDir,
|
||||
});
|
||||
const stats = await fs.stat(resolved.resolved);
|
||||
if (!stats.isDirectory()) {
|
||||
throw new Error("workdir is not a directory");
|
||||
}
|
||||
const relative = resolved.relative
|
||||
? resolved.relative.split(path.sep).join(path.posix.sep)
|
||||
: "";
|
||||
const containerWorkdir = relative
|
||||
? path.posix.join(params.sandbox.containerWorkdir, relative)
|
||||
: params.sandbox.containerWorkdir;
|
||||
return { hostWorkdir: resolved.resolved, containerWorkdir };
|
||||
} catch {
|
||||
params.warnings.push(
|
||||
`Warning: workdir "${params.workdir}" is unavailable; using "${fallback}".`,
|
||||
);
|
||||
return {
|
||||
hostWorkdir: fallback,
|
||||
containerWorkdir: params.sandbox.containerWorkdir,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function killSession(session: {
|
||||
pid?: number;
|
||||
child?: ChildProcessWithoutNullStreams;
|
||||
}) {
|
||||
const pid = session.pid ?? session.child?.pid;
|
||||
if (pid) {
|
||||
killProcessTree(pid);
|
||||
}
|
||||
}
|
||||
|
||||
export function resolveWorkdir(workdir: string, warnings: string[]) {
|
||||
const current = safeCwd();
|
||||
const fallback = current ?? homedir();
|
||||
try {
|
||||
const stats = statSync(workdir);
|
||||
if (stats.isDirectory()) return workdir;
|
||||
} catch {
|
||||
// ignore, fallback below
|
||||
}
|
||||
warnings.push(
|
||||
`Warning: workdir "${workdir}" is unavailable; using "${fallback}".`,
|
||||
);
|
||||
return fallback;
|
||||
}
|
||||
|
||||
function safeCwd() {
|
||||
try {
|
||||
const cwd = process.cwd();
|
||||
return existsSync(cwd) ? cwd : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function clampNumber(
|
||||
value: number | undefined,
|
||||
defaultValue: number,
|
||||
min: number,
|
||||
max: number,
|
||||
) {
|
||||
if (value === undefined || Number.isNaN(value)) return defaultValue;
|
||||
return Math.min(Math.max(value, min), max);
|
||||
}
|
||||
|
||||
export function readEnvInt(key: string) {
|
||||
const raw = process.env[key];
|
||||
if (!raw) return undefined;
|
||||
const parsed = Number.parseInt(raw, 10);
|
||||
return Number.isFinite(parsed) ? parsed : undefined;
|
||||
}
|
||||
|
||||
export function chunkString(input: string, limit = CHUNK_LIMIT) {
|
||||
const chunks: string[] = [];
|
||||
for (let i = 0; i < input.length; i += limit) {
|
||||
chunks.push(input.slice(i, i + limit));
|
||||
}
|
||||
return chunks;
|
||||
}
|
||||
|
||||
export function truncateMiddle(str: string, max: number) {
|
||||
if (str.length <= max) return str;
|
||||
const half = Math.floor((max - 3) / 2);
|
||||
return `${sliceUtf16Safe(str, 0, half)}...${sliceUtf16Safe(str, -half)}`;
|
||||
}
|
||||
|
||||
export function sliceLogLines(
|
||||
text: string,
|
||||
offset?: number,
|
||||
limit?: number,
|
||||
): { slice: string; totalLines: number; totalChars: number } {
|
||||
if (!text) return { slice: "", totalLines: 0, totalChars: 0 };
|
||||
const normalized = text.replace(/\r\n/g, "\n");
|
||||
const lines = normalized.split("\n");
|
||||
if (lines.length > 0 && lines[lines.length - 1] === "") {
|
||||
lines.pop();
|
||||
}
|
||||
const totalLines = lines.length;
|
||||
const totalChars = text.length;
|
||||
let start =
|
||||
typeof offset === "number" && Number.isFinite(offset)
|
||||
? Math.max(0, Math.floor(offset))
|
||||
: 0;
|
||||
if (limit !== undefined && offset === undefined) {
|
||||
const tailCount = Math.max(0, Math.floor(limit));
|
||||
start = Math.max(totalLines - tailCount, 0);
|
||||
}
|
||||
const end =
|
||||
typeof limit === "number" && Number.isFinite(limit)
|
||||
? start + Math.max(0, Math.floor(limit))
|
||||
: undefined;
|
||||
return { slice: lines.slice(start, end).join("\n"), totalLines, totalChars };
|
||||
}
|
||||
|
||||
export function deriveSessionName(command: string): string | undefined {
|
||||
const tokens = tokenizeCommand(command);
|
||||
if (tokens.length === 0) return undefined;
|
||||
const verb = tokens[0];
|
||||
let target = tokens.slice(1).find((t) => !t.startsWith("-"));
|
||||
if (!target) target = tokens[1];
|
||||
if (!target) return verb;
|
||||
const cleaned = truncateMiddle(stripQuotes(target), 48);
|
||||
return `${stripQuotes(verb)} ${cleaned}`;
|
||||
}
|
||||
|
||||
function tokenizeCommand(command: string): string[] {
|
||||
const matches =
|
||||
command.match(/(?:[^\s"']+|"(?:\\.|[^"])*"|'(?:\\.|[^'])*')+/g) ?? [];
|
||||
return matches.map((token) => stripQuotes(token)).filter(Boolean);
|
||||
}
|
||||
|
||||
function stripQuotes(value: string): string {
|
||||
const trimmed = value.trim();
|
||||
if (
|
||||
(trimmed.startsWith('"') && trimmed.endsWith('"')) ||
|
||||
(trimmed.startsWith("'") && trimmed.endsWith("'"))
|
||||
) {
|
||||
return trimmed.slice(1, -1);
|
||||
}
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
export function formatDuration(ms: number) {
|
||||
if (ms < 1000) return `${ms}ms`;
|
||||
const seconds = Math.floor(ms / 1000);
|
||||
if (seconds < 60) return `${seconds}s`;
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const rem = seconds % 60;
|
||||
return `${minutes}m${rem.toString().padStart(2, "0")}s`;
|
||||
}
|
||||
|
||||
export function pad(str: string, width: number) {
|
||||
if (str.length >= width) return str;
|
||||
return str + " ".repeat(width - str.length);
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
84
src/agents/pi-embedded-subscribe.handlers.lifecycle.ts
Normal file
84
src/agents/pi-embedded-subscribe.handlers.lifecycle.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import type { AgentEvent } from "@mariozechner/pi-agent-core";
|
||||
|
||||
import { emitAgentEvent } from "../infra/agent-events.js";
|
||||
import type { EmbeddedPiSubscribeContext } from "./pi-embedded-subscribe.handlers.types.js";
|
||||
|
||||
export function handleAgentStart(ctx: EmbeddedPiSubscribeContext) {
|
||||
ctx.log.debug(`embedded run agent start: runId=${ctx.params.runId}`);
|
||||
emitAgentEvent({
|
||||
runId: ctx.params.runId,
|
||||
stream: "lifecycle",
|
||||
data: {
|
||||
phase: "start",
|
||||
startedAt: Date.now(),
|
||||
},
|
||||
});
|
||||
ctx.params.onAgentEvent?.({
|
||||
stream: "lifecycle",
|
||||
data: { phase: "start" },
|
||||
});
|
||||
}
|
||||
|
||||
export function handleAutoCompactionStart(ctx: EmbeddedPiSubscribeContext) {
|
||||
ctx.state.compactionInFlight = true;
|
||||
ctx.ensureCompactionPromise();
|
||||
ctx.log.debug(`embedded run compaction start: runId=${ctx.params.runId}`);
|
||||
ctx.params.onAgentEvent?.({
|
||||
stream: "compaction",
|
||||
data: { phase: "start" },
|
||||
});
|
||||
}
|
||||
|
||||
export function handleAutoCompactionEnd(
|
||||
ctx: EmbeddedPiSubscribeContext,
|
||||
evt: AgentEvent & { willRetry?: unknown },
|
||||
) {
|
||||
ctx.state.compactionInFlight = false;
|
||||
const willRetry = Boolean(evt.willRetry);
|
||||
if (willRetry) {
|
||||
ctx.noteCompactionRetry();
|
||||
ctx.resetForCompactionRetry();
|
||||
ctx.log.debug(`embedded run compaction retry: runId=${ctx.params.runId}`);
|
||||
} else {
|
||||
ctx.maybeResolveCompactionWait();
|
||||
}
|
||||
ctx.params.onAgentEvent?.({
|
||||
stream: "compaction",
|
||||
data: { phase: "end", willRetry },
|
||||
});
|
||||
}
|
||||
|
||||
export function handleAgentEnd(ctx: EmbeddedPiSubscribeContext) {
|
||||
ctx.log.debug(`embedded run agent end: runId=${ctx.params.runId}`);
|
||||
emitAgentEvent({
|
||||
runId: ctx.params.runId,
|
||||
stream: "lifecycle",
|
||||
data: {
|
||||
phase: "end",
|
||||
endedAt: Date.now(),
|
||||
},
|
||||
});
|
||||
ctx.params.onAgentEvent?.({
|
||||
stream: "lifecycle",
|
||||
data: { phase: "end" },
|
||||
});
|
||||
|
||||
if (ctx.params.onBlockReply) {
|
||||
if (ctx.blockChunker?.hasBuffered()) {
|
||||
ctx.blockChunker.drain({ force: true, emit: ctx.emitBlockChunk });
|
||||
ctx.blockChunker.reset();
|
||||
} else if (ctx.state.blockBuffer.length > 0) {
|
||||
ctx.emitBlockChunk(ctx.state.blockBuffer);
|
||||
ctx.state.blockBuffer = "";
|
||||
}
|
||||
}
|
||||
|
||||
ctx.state.blockState.thinking = false;
|
||||
ctx.state.blockState.final = false;
|
||||
|
||||
if (ctx.state.pendingCompactionRetry > 0) {
|
||||
ctx.resolveCompactionRetry();
|
||||
} else {
|
||||
ctx.maybeResolveCompactionWait();
|
||||
}
|
||||
}
|
||||
273
src/agents/pi-embedded-subscribe.handlers.messages.ts
Normal file
273
src/agents/pi-embedded-subscribe.handlers.messages.ts
Normal file
@@ -0,0 +1,273 @@
|
||||
import type { AgentEvent, AgentMessage } from "@mariozechner/pi-agent-core";
|
||||
import type { AssistantMessage } from "@mariozechner/pi-ai";
|
||||
|
||||
import { parseReplyDirectives } from "../auto-reply/reply/reply-directives.js";
|
||||
import { emitAgentEvent } from "../infra/agent-events.js";
|
||||
import {
|
||||
isMessagingToolDuplicateNormalized,
|
||||
normalizeTextForComparison,
|
||||
} from "./pi-embedded-helpers.js";
|
||||
import type { EmbeddedPiSubscribeContext } from "./pi-embedded-subscribe.handlers.types.js";
|
||||
import { appendRawStream } from "./pi-embedded-subscribe.raw-stream.js";
|
||||
import {
|
||||
extractAssistantText,
|
||||
extractAssistantThinking,
|
||||
extractThinkingFromTaggedStream,
|
||||
extractThinkingFromTaggedText,
|
||||
formatReasoningMessage,
|
||||
promoteThinkingTagsToBlocks,
|
||||
} from "./pi-embedded-utils.js";
|
||||
|
||||
export function handleMessageStart(
|
||||
ctx: EmbeddedPiSubscribeContext,
|
||||
evt: AgentEvent & { message: AgentMessage },
|
||||
) {
|
||||
const msg = evt.message;
|
||||
if (msg?.role !== "assistant") return;
|
||||
|
||||
// KNOWN: Resetting at `text_end` is unsafe (late/duplicate end events).
|
||||
// ASSUME: `message_start` is the only reliable boundary for “new assistant message begins”.
|
||||
// Start-of-message is a safer reset point than message_end: some providers
|
||||
// may deliver late text_end updates after message_end, which would otherwise
|
||||
// re-trigger block replies.
|
||||
ctx.resetAssistantMessageState(ctx.state.assistantTexts.length);
|
||||
// Use assistant message_start as the earliest "writing" signal for typing.
|
||||
void ctx.params.onAssistantMessageStart?.();
|
||||
}
|
||||
|
||||
export function handleMessageUpdate(
|
||||
ctx: EmbeddedPiSubscribeContext,
|
||||
evt: AgentEvent & { message: AgentMessage; assistantMessageEvent?: unknown },
|
||||
) {
|
||||
const msg = evt.message;
|
||||
if (msg?.role !== "assistant") return;
|
||||
|
||||
const assistantEvent = evt.assistantMessageEvent;
|
||||
const assistantRecord =
|
||||
assistantEvent && typeof assistantEvent === "object"
|
||||
? (assistantEvent as Record<string, unknown>)
|
||||
: undefined;
|
||||
const evtType =
|
||||
typeof assistantRecord?.type === "string" ? assistantRecord.type : "";
|
||||
|
||||
if (
|
||||
evtType !== "text_delta" &&
|
||||
evtType !== "text_start" &&
|
||||
evtType !== "text_end"
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const delta =
|
||||
typeof assistantRecord?.delta === "string" ? assistantRecord.delta : "";
|
||||
const content =
|
||||
typeof assistantRecord?.content === "string" ? assistantRecord.content : "";
|
||||
|
||||
appendRawStream({
|
||||
ts: Date.now(),
|
||||
event: "assistant_text_stream",
|
||||
runId: ctx.params.runId,
|
||||
sessionId: (ctx.params.session as { id?: string }).id,
|
||||
evtType,
|
||||
delta,
|
||||
content,
|
||||
});
|
||||
|
||||
let chunk = "";
|
||||
if (evtType === "text_delta") {
|
||||
chunk = delta;
|
||||
} else if (evtType === "text_start" || evtType === "text_end") {
|
||||
if (delta) {
|
||||
chunk = delta;
|
||||
} else if (content) {
|
||||
// KNOWN: Some providers resend full content on `text_end`.
|
||||
// We only append a suffix (or nothing) to keep output monotonic.
|
||||
if (content.startsWith(ctx.state.deltaBuffer)) {
|
||||
chunk = content.slice(ctx.state.deltaBuffer.length);
|
||||
} else if (ctx.state.deltaBuffer.startsWith(content)) {
|
||||
chunk = "";
|
||||
} else if (!ctx.state.deltaBuffer.includes(content)) {
|
||||
chunk = content;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (chunk) {
|
||||
ctx.state.deltaBuffer += chunk;
|
||||
if (ctx.blockChunker) {
|
||||
ctx.blockChunker.append(chunk);
|
||||
} else {
|
||||
ctx.state.blockBuffer += chunk;
|
||||
}
|
||||
}
|
||||
|
||||
if (ctx.state.streamReasoning) {
|
||||
// Handle partial <think> tags: stream whatever reasoning is visible so far.
|
||||
ctx.emitReasoningStream(
|
||||
extractThinkingFromTaggedStream(ctx.state.deltaBuffer),
|
||||
);
|
||||
}
|
||||
|
||||
const next = ctx
|
||||
.stripBlockTags(ctx.state.deltaBuffer, {
|
||||
thinking: false,
|
||||
final: false,
|
||||
})
|
||||
.trim();
|
||||
if (next && next !== ctx.state.lastStreamedAssistant) {
|
||||
ctx.state.lastStreamedAssistant = next;
|
||||
const { text: cleanedText, mediaUrls } = parseReplyDirectives(next);
|
||||
emitAgentEvent({
|
||||
runId: ctx.params.runId,
|
||||
stream: "assistant",
|
||||
data: {
|
||||
text: cleanedText,
|
||||
mediaUrls: mediaUrls?.length ? mediaUrls : undefined,
|
||||
},
|
||||
});
|
||||
ctx.params.onAgentEvent?.({
|
||||
stream: "assistant",
|
||||
data: {
|
||||
text: cleanedText,
|
||||
mediaUrls: mediaUrls?.length ? mediaUrls : undefined,
|
||||
},
|
||||
});
|
||||
if (ctx.params.onPartialReply && ctx.state.shouldEmitPartialReplies) {
|
||||
void ctx.params.onPartialReply({
|
||||
text: cleanedText,
|
||||
mediaUrls: mediaUrls?.length ? mediaUrls : undefined,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
ctx.params.onBlockReply &&
|
||||
ctx.blockChunking &&
|
||||
ctx.state.blockReplyBreak === "text_end"
|
||||
) {
|
||||
ctx.blockChunker?.drain({ force: false, emit: ctx.emitBlockChunk });
|
||||
}
|
||||
|
||||
if (evtType === "text_end" && ctx.state.blockReplyBreak === "text_end") {
|
||||
if (ctx.blockChunker?.hasBuffered()) {
|
||||
ctx.blockChunker.drain({ force: true, emit: ctx.emitBlockChunk });
|
||||
ctx.blockChunker.reset();
|
||||
} else if (ctx.state.blockBuffer.length > 0) {
|
||||
ctx.emitBlockChunk(ctx.state.blockBuffer);
|
||||
ctx.state.blockBuffer = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function handleMessageEnd(
|
||||
ctx: EmbeddedPiSubscribeContext,
|
||||
evt: AgentEvent & { message: AgentMessage },
|
||||
) {
|
||||
const msg = evt.message;
|
||||
if (msg?.role !== "assistant") return;
|
||||
|
||||
const assistantMessage = msg as AssistantMessage;
|
||||
promoteThinkingTagsToBlocks(assistantMessage);
|
||||
|
||||
const rawText = extractAssistantText(assistantMessage);
|
||||
appendRawStream({
|
||||
ts: Date.now(),
|
||||
event: "assistant_message_end",
|
||||
runId: ctx.params.runId,
|
||||
sessionId: (ctx.params.session as { id?: string }).id,
|
||||
rawText,
|
||||
rawThinking: extractAssistantThinking(assistantMessage),
|
||||
});
|
||||
|
||||
const text = ctx.stripBlockTags(rawText, { thinking: false, final: false });
|
||||
const rawThinking =
|
||||
ctx.state.includeReasoning || ctx.state.streamReasoning
|
||||
? extractAssistantThinking(assistantMessage) ||
|
||||
extractThinkingFromTaggedText(rawText)
|
||||
: "";
|
||||
const formattedReasoning = rawThinking
|
||||
? formatReasoningMessage(rawThinking)
|
||||
: "";
|
||||
|
||||
const addedDuringMessage =
|
||||
ctx.state.assistantTexts.length > ctx.state.assistantTextBaseline;
|
||||
const chunkerHasBuffered = ctx.blockChunker?.hasBuffered() ?? false;
|
||||
ctx.finalizeAssistantTexts({ text, addedDuringMessage, chunkerHasBuffered });
|
||||
|
||||
const onBlockReply = ctx.params.onBlockReply;
|
||||
const shouldEmitReasoning = Boolean(
|
||||
ctx.state.includeReasoning &&
|
||||
formattedReasoning &&
|
||||
onBlockReply &&
|
||||
formattedReasoning !== ctx.state.lastReasoningSent,
|
||||
);
|
||||
const shouldEmitReasoningBeforeAnswer =
|
||||
shouldEmitReasoning &&
|
||||
ctx.state.blockReplyBreak === "message_end" &&
|
||||
!addedDuringMessage;
|
||||
const maybeEmitReasoning = () => {
|
||||
if (!shouldEmitReasoning || !formattedReasoning) return;
|
||||
ctx.state.lastReasoningSent = formattedReasoning;
|
||||
void onBlockReply?.({ text: formattedReasoning });
|
||||
};
|
||||
|
||||
if (shouldEmitReasoningBeforeAnswer) maybeEmitReasoning();
|
||||
|
||||
if (
|
||||
(ctx.state.blockReplyBreak === "message_end" ||
|
||||
(ctx.blockChunker
|
||||
? ctx.blockChunker.hasBuffered()
|
||||
: ctx.state.blockBuffer.length > 0)) &&
|
||||
text &&
|
||||
onBlockReply
|
||||
) {
|
||||
if (ctx.blockChunker?.hasBuffered()) {
|
||||
ctx.blockChunker.drain({ force: true, emit: ctx.emitBlockChunk });
|
||||
ctx.blockChunker.reset();
|
||||
} else if (text !== ctx.state.lastBlockReplyText) {
|
||||
// Check for duplicates before emitting (same logic as emitBlockChunk).
|
||||
const normalizedText = normalizeTextForComparison(text);
|
||||
if (
|
||||
isMessagingToolDuplicateNormalized(
|
||||
normalizedText,
|
||||
ctx.state.messagingToolSentTextsNormalized,
|
||||
)
|
||||
) {
|
||||
ctx.log.debug(
|
||||
`Skipping message_end block reply - already sent via messaging tool: ${text.slice(0, 50)}...`,
|
||||
);
|
||||
} else {
|
||||
ctx.state.lastBlockReplyText = text;
|
||||
const {
|
||||
text: cleanedText,
|
||||
mediaUrls,
|
||||
audioAsVoice,
|
||||
} = parseReplyDirectives(text);
|
||||
// Emit if there's content OR audioAsVoice flag (to propagate the flag).
|
||||
if (
|
||||
cleanedText ||
|
||||
(mediaUrls && mediaUrls.length > 0) ||
|
||||
audioAsVoice
|
||||
) {
|
||||
void onBlockReply({
|
||||
text: cleanedText,
|
||||
mediaUrls: mediaUrls?.length ? mediaUrls : undefined,
|
||||
audioAsVoice,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!shouldEmitReasoningBeforeAnswer) maybeEmitReasoning();
|
||||
if (ctx.state.streamReasoning && rawThinking) {
|
||||
ctx.emitReasoningStream(rawThinking);
|
||||
}
|
||||
|
||||
ctx.state.deltaBuffer = "";
|
||||
ctx.state.blockBuffer = "";
|
||||
ctx.blockChunker?.reset();
|
||||
ctx.state.blockState.thinking = false;
|
||||
ctx.state.blockState.final = false;
|
||||
ctx.state.lastStreamedAssistant = undefined;
|
||||
}
|
||||
200
src/agents/pi-embedded-subscribe.handlers.tools.ts
Normal file
200
src/agents/pi-embedded-subscribe.handlers.tools.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import type { AgentEvent } from "@mariozechner/pi-agent-core";
|
||||
|
||||
import { emitAgentEvent } from "../infra/agent-events.js";
|
||||
import { normalizeTextForComparison } from "./pi-embedded-helpers.js";
|
||||
import {
|
||||
isMessagingTool,
|
||||
isMessagingToolSendAction,
|
||||
} from "./pi-embedded-messaging.js";
|
||||
import type { EmbeddedPiSubscribeContext } from "./pi-embedded-subscribe.handlers.types.js";
|
||||
import {
|
||||
extractMessagingToolSend,
|
||||
isToolResultError,
|
||||
sanitizeToolResult,
|
||||
} from "./pi-embedded-subscribe.tools.js";
|
||||
import { inferToolMetaFromArgs } from "./pi-embedded-utils.js";
|
||||
|
||||
export function handleToolExecutionStart(
|
||||
ctx: EmbeddedPiSubscribeContext,
|
||||
evt: AgentEvent & { toolName: string; toolCallId: string; args: unknown },
|
||||
) {
|
||||
// Flush pending block replies to preserve message boundaries before tool execution.
|
||||
ctx.flushBlockReplyBuffer();
|
||||
if (ctx.params.onBlockReplyFlush) {
|
||||
void ctx.params.onBlockReplyFlush();
|
||||
}
|
||||
|
||||
const toolName = String(evt.toolName);
|
||||
const toolCallId = String(evt.toolCallId);
|
||||
const args = evt.args;
|
||||
|
||||
if (toolName === "read") {
|
||||
const record =
|
||||
args && typeof args === "object" ? (args as Record<string, unknown>) : {};
|
||||
const filePath = typeof record.path === "string" ? record.path.trim() : "";
|
||||
if (!filePath) {
|
||||
const argsPreview =
|
||||
typeof args === "string" ? args.slice(0, 200) : undefined;
|
||||
ctx.log.warn(
|
||||
`read tool called without path: toolCallId=${toolCallId} argsType=${typeof args}${argsPreview ? ` argsPreview=${argsPreview}` : ""}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const meta = inferToolMetaFromArgs(toolName, args);
|
||||
ctx.state.toolMetaById.set(toolCallId, meta);
|
||||
ctx.log.debug(
|
||||
`embedded run tool start: runId=${ctx.params.runId} tool=${toolName} toolCallId=${toolCallId}`,
|
||||
);
|
||||
|
||||
const shouldEmitToolEvents = ctx.shouldEmitToolResult();
|
||||
emitAgentEvent({
|
||||
runId: ctx.params.runId,
|
||||
stream: "tool",
|
||||
data: {
|
||||
phase: "start",
|
||||
name: toolName,
|
||||
toolCallId,
|
||||
args: args as Record<string, unknown>,
|
||||
},
|
||||
});
|
||||
ctx.params.onAgentEvent?.({
|
||||
stream: "tool",
|
||||
data: { phase: "start", name: toolName, toolCallId },
|
||||
});
|
||||
|
||||
if (
|
||||
ctx.params.onToolResult &&
|
||||
shouldEmitToolEvents &&
|
||||
!ctx.state.toolSummaryById.has(toolCallId)
|
||||
) {
|
||||
ctx.state.toolSummaryById.add(toolCallId);
|
||||
ctx.emitToolSummary(toolName, meta);
|
||||
}
|
||||
|
||||
// Track messaging tool sends (pending until confirmed in tool_execution_end).
|
||||
if (isMessagingTool(toolName)) {
|
||||
const argsRecord =
|
||||
args && typeof args === "object" ? (args as Record<string, unknown>) : {};
|
||||
const isMessagingSend = isMessagingToolSendAction(toolName, argsRecord);
|
||||
if (isMessagingSend) {
|
||||
const sendTarget = extractMessagingToolSend(toolName, argsRecord);
|
||||
if (sendTarget) {
|
||||
ctx.state.pendingMessagingTargets.set(toolCallId, sendTarget);
|
||||
}
|
||||
// Field names vary by tool: Discord/Slack use "content", sessions_send uses "message"
|
||||
const text =
|
||||
(argsRecord.content as string) ?? (argsRecord.message as string);
|
||||
if (text && typeof text === "string") {
|
||||
ctx.state.pendingMessagingTexts.set(toolCallId, text);
|
||||
ctx.log.debug(
|
||||
`Tracking pending messaging text: tool=${toolName} len=${text.length}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function handleToolExecutionUpdate(
|
||||
ctx: EmbeddedPiSubscribeContext,
|
||||
evt: AgentEvent & {
|
||||
toolName: string;
|
||||
toolCallId: string;
|
||||
partialResult?: unknown;
|
||||
},
|
||||
) {
|
||||
const toolName = String(evt.toolName);
|
||||
const toolCallId = String(evt.toolCallId);
|
||||
const partial = evt.partialResult;
|
||||
const sanitized = sanitizeToolResult(partial);
|
||||
emitAgentEvent({
|
||||
runId: ctx.params.runId,
|
||||
stream: "tool",
|
||||
data: {
|
||||
phase: "update",
|
||||
name: toolName,
|
||||
toolCallId,
|
||||
partialResult: sanitized,
|
||||
},
|
||||
});
|
||||
ctx.params.onAgentEvent?.({
|
||||
stream: "tool",
|
||||
data: {
|
||||
phase: "update",
|
||||
name: toolName,
|
||||
toolCallId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export function handleToolExecutionEnd(
|
||||
ctx: EmbeddedPiSubscribeContext,
|
||||
evt: AgentEvent & {
|
||||
toolName: string;
|
||||
toolCallId: string;
|
||||
isError: boolean;
|
||||
result?: unknown;
|
||||
},
|
||||
) {
|
||||
const toolName = String(evt.toolName);
|
||||
const toolCallId = String(evt.toolCallId);
|
||||
const isError = Boolean(evt.isError);
|
||||
const result = evt.result;
|
||||
const isToolError = isError || isToolResultError(result);
|
||||
const sanitizedResult = sanitizeToolResult(result);
|
||||
const meta = ctx.state.toolMetaById.get(toolCallId);
|
||||
ctx.state.toolMetas.push({ toolName, meta });
|
||||
ctx.state.toolMetaById.delete(toolCallId);
|
||||
ctx.state.toolSummaryById.delete(toolCallId);
|
||||
|
||||
// Commit messaging tool text on success, discard on error.
|
||||
const pendingText = ctx.state.pendingMessagingTexts.get(toolCallId);
|
||||
const pendingTarget = ctx.state.pendingMessagingTargets.get(toolCallId);
|
||||
if (pendingText) {
|
||||
ctx.state.pendingMessagingTexts.delete(toolCallId);
|
||||
if (!isToolError) {
|
||||
ctx.state.messagingToolSentTexts.push(pendingText);
|
||||
ctx.state.messagingToolSentTextsNormalized.push(
|
||||
normalizeTextForComparison(pendingText),
|
||||
);
|
||||
ctx.log.debug(
|
||||
`Committed messaging text: tool=${toolName} len=${pendingText.length}`,
|
||||
);
|
||||
ctx.trimMessagingToolSent();
|
||||
}
|
||||
}
|
||||
if (pendingTarget) {
|
||||
ctx.state.pendingMessagingTargets.delete(toolCallId);
|
||||
if (!isToolError) {
|
||||
ctx.state.messagingToolSentTargets.push(pendingTarget);
|
||||
ctx.trimMessagingToolSent();
|
||||
}
|
||||
}
|
||||
|
||||
emitAgentEvent({
|
||||
runId: ctx.params.runId,
|
||||
stream: "tool",
|
||||
data: {
|
||||
phase: "result",
|
||||
name: toolName,
|
||||
toolCallId,
|
||||
meta,
|
||||
isError: isToolError,
|
||||
result: sanitizedResult,
|
||||
},
|
||||
});
|
||||
ctx.params.onAgentEvent?.({
|
||||
stream: "tool",
|
||||
data: {
|
||||
phase: "result",
|
||||
name: toolName,
|
||||
toolCallId,
|
||||
meta,
|
||||
isError: isToolError,
|
||||
},
|
||||
});
|
||||
|
||||
ctx.log.debug(
|
||||
`embedded run tool end: runId=${ctx.params.runId} tool=${toolName} toolCallId=${toolCallId}`,
|
||||
);
|
||||
}
|
||||
61
src/agents/pi-embedded-subscribe.handlers.ts
Normal file
61
src/agents/pi-embedded-subscribe.handlers.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import {
|
||||
handleAgentEnd,
|
||||
handleAgentStart,
|
||||
handleAutoCompactionEnd,
|
||||
handleAutoCompactionStart,
|
||||
} from "./pi-embedded-subscribe.handlers.lifecycle.js";
|
||||
import {
|
||||
handleMessageEnd,
|
||||
handleMessageStart,
|
||||
handleMessageUpdate,
|
||||
} from "./pi-embedded-subscribe.handlers.messages.js";
|
||||
import {
|
||||
handleToolExecutionEnd,
|
||||
handleToolExecutionStart,
|
||||
handleToolExecutionUpdate,
|
||||
} from "./pi-embedded-subscribe.handlers.tools.js";
|
||||
import type {
|
||||
EmbeddedPiSubscribeContext,
|
||||
EmbeddedPiSubscribeEvent,
|
||||
} from "./pi-embedded-subscribe.handlers.types.js";
|
||||
|
||||
export function createEmbeddedPiSessionEventHandler(
|
||||
ctx: EmbeddedPiSubscribeContext,
|
||||
) {
|
||||
return (evt: EmbeddedPiSubscribeEvent) => {
|
||||
switch (evt.type) {
|
||||
case "message_start":
|
||||
handleMessageStart(ctx, evt as never);
|
||||
return;
|
||||
case "message_update":
|
||||
handleMessageUpdate(ctx, evt as never);
|
||||
return;
|
||||
case "message_end":
|
||||
handleMessageEnd(ctx, evt as never);
|
||||
return;
|
||||
case "tool_execution_start":
|
||||
handleToolExecutionStart(ctx, evt as never);
|
||||
return;
|
||||
case "tool_execution_update":
|
||||
handleToolExecutionUpdate(ctx, evt as never);
|
||||
return;
|
||||
case "tool_execution_end":
|
||||
handleToolExecutionEnd(ctx, evt as never);
|
||||
return;
|
||||
case "agent_start":
|
||||
handleAgentStart(ctx);
|
||||
return;
|
||||
case "auto_compaction_start":
|
||||
handleAutoCompactionStart(ctx);
|
||||
return;
|
||||
case "auto_compaction_end":
|
||||
handleAutoCompactionEnd(ctx, evt as never);
|
||||
return;
|
||||
case "agent_end":
|
||||
handleAgentEnd(ctx);
|
||||
return;
|
||||
default:
|
||||
return;
|
||||
}
|
||||
};
|
||||
}
|
||||
83
src/agents/pi-embedded-subscribe.handlers.types.ts
Normal file
83
src/agents/pi-embedded-subscribe.handlers.types.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import type { AgentEvent, AgentMessage } from "@mariozechner/pi-agent-core";
|
||||
|
||||
import type { ReasoningLevel } from "../auto-reply/thinking.js";
|
||||
import type { EmbeddedBlockChunker } from "./pi-embedded-block-chunker.js";
|
||||
import type { MessagingToolSend } from "./pi-embedded-messaging.js";
|
||||
import type {
|
||||
BlockReplyChunking,
|
||||
SubscribeEmbeddedPiSessionParams,
|
||||
} from "./pi-embedded-subscribe.types.js";
|
||||
|
||||
export type EmbeddedSubscribeLogger = {
|
||||
debug: (message: string) => void;
|
||||
warn: (message: string) => void;
|
||||
};
|
||||
|
||||
export type EmbeddedPiSubscribeState = {
|
||||
assistantTexts: string[];
|
||||
toolMetas: Array<{ toolName?: string; meta?: string }>;
|
||||
toolMetaById: Map<string, string | undefined>;
|
||||
toolSummaryById: Set<string>;
|
||||
|
||||
blockReplyBreak: "text_end" | "message_end";
|
||||
reasoningMode: ReasoningLevel;
|
||||
includeReasoning: boolean;
|
||||
shouldEmitPartialReplies: boolean;
|
||||
streamReasoning: boolean;
|
||||
|
||||
deltaBuffer: string;
|
||||
blockBuffer: string;
|
||||
blockState: { thinking: boolean; final: boolean };
|
||||
lastStreamedAssistant?: string;
|
||||
lastStreamedReasoning?: string;
|
||||
lastBlockReplyText?: string;
|
||||
assistantTextBaseline: number;
|
||||
suppressBlockChunks: boolean;
|
||||
lastReasoningSent?: string;
|
||||
|
||||
compactionInFlight: boolean;
|
||||
pendingCompactionRetry: number;
|
||||
compactionRetryResolve?: () => void;
|
||||
compactionRetryPromise: Promise<void> | null;
|
||||
|
||||
messagingToolSentTexts: string[];
|
||||
messagingToolSentTextsNormalized: string[];
|
||||
messagingToolSentTargets: MessagingToolSend[];
|
||||
pendingMessagingTexts: Map<string, string>;
|
||||
pendingMessagingTargets: Map<string, MessagingToolSend>;
|
||||
};
|
||||
|
||||
export type EmbeddedPiSubscribeContext = {
|
||||
params: SubscribeEmbeddedPiSessionParams;
|
||||
state: EmbeddedPiSubscribeState;
|
||||
log: EmbeddedSubscribeLogger;
|
||||
blockChunking?: BlockReplyChunking;
|
||||
blockChunker: EmbeddedBlockChunker | null;
|
||||
|
||||
shouldEmitToolResult: () => boolean;
|
||||
emitToolSummary: (toolName?: string, meta?: string) => void;
|
||||
stripBlockTags: (
|
||||
text: string,
|
||||
state: { thinking: boolean; final: boolean },
|
||||
) => string;
|
||||
emitBlockChunk: (text: string) => void;
|
||||
flushBlockReplyBuffer: () => void;
|
||||
emitReasoningStream: (text: string) => void;
|
||||
resetAssistantMessageState: (nextAssistantTextBaseline: number) => void;
|
||||
resetForCompactionRetry: () => void;
|
||||
finalizeAssistantTexts: (args: {
|
||||
text: string;
|
||||
addedDuringMessage: boolean;
|
||||
chunkerHasBuffered: boolean;
|
||||
}) => void;
|
||||
trimMessagingToolSent: () => void;
|
||||
ensureCompactionPromise: () => void;
|
||||
noteCompactionRetry: () => void;
|
||||
resolveCompactionRetry: () => void;
|
||||
maybeResolveCompactionWait: () => void;
|
||||
};
|
||||
|
||||
export type EmbeddedPiSubscribeEvent =
|
||||
| AgentEvent
|
||||
| { type: string; [k: string]: unknown }
|
||||
| { type: "message_start"; message: AgentMessage };
|
||||
31
src/agents/pi-embedded-subscribe.raw-stream.ts
Normal file
31
src/agents/pi-embedded-subscribe.raw-stream.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
import { resolveStateDir } from "../config/paths.js";
|
||||
|
||||
const RAW_STREAM_ENABLED = process.env.CLAWDBOT_RAW_STREAM === "1";
|
||||
const RAW_STREAM_PATH =
|
||||
process.env.CLAWDBOT_RAW_STREAM_PATH?.trim() ||
|
||||
path.join(resolveStateDir(), "logs", "raw-stream.jsonl");
|
||||
|
||||
let rawStreamReady = false;
|
||||
|
||||
export function appendRawStream(payload: Record<string, unknown>) {
|
||||
if (!RAW_STREAM_ENABLED) return;
|
||||
if (!rawStreamReady) {
|
||||
rawStreamReady = true;
|
||||
try {
|
||||
fs.mkdirSync(path.dirname(RAW_STREAM_PATH), { recursive: true });
|
||||
} catch {
|
||||
// ignore raw stream mkdir failures
|
||||
}
|
||||
}
|
||||
try {
|
||||
void fs.promises.appendFile(
|
||||
RAW_STREAM_PATH,
|
||||
`${JSON.stringify(payload)}\n`,
|
||||
);
|
||||
} catch {
|
||||
// ignore raw stream write failures
|
||||
}
|
||||
}
|
||||
88
src/agents/pi-embedded-subscribe.tools.ts
Normal file
88
src/agents/pi-embedded-subscribe.tools.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import {
|
||||
getChannelPlugin,
|
||||
normalizeChannelId,
|
||||
} from "../channels/plugins/index.js";
|
||||
import { truncateUtf16Safe } from "../utils.js";
|
||||
import {
|
||||
type MessagingToolSend,
|
||||
normalizeTargetForProvider,
|
||||
} from "./pi-embedded-messaging.js";
|
||||
|
||||
const TOOL_RESULT_MAX_CHARS = 8000;
|
||||
|
||||
function truncateToolText(text: string): string {
|
||||
if (text.length <= TOOL_RESULT_MAX_CHARS) return text;
|
||||
return `${truncateUtf16Safe(text, TOOL_RESULT_MAX_CHARS)}\n…(truncated)…`;
|
||||
}
|
||||
|
||||
export function sanitizeToolResult(result: unknown): unknown {
|
||||
if (!result || typeof result !== "object") return result;
|
||||
const record = result as Record<string, unknown>;
|
||||
const content = Array.isArray(record.content) ? record.content : null;
|
||||
if (!content) return record;
|
||||
const sanitized = content.map((item) => {
|
||||
if (!item || typeof item !== "object") return item;
|
||||
const entry = item as Record<string, unknown>;
|
||||
const type = typeof entry.type === "string" ? entry.type : undefined;
|
||||
if (type === "text" && typeof entry.text === "string") {
|
||||
return { ...entry, text: truncateToolText(entry.text) };
|
||||
}
|
||||
if (type === "image") {
|
||||
const data = typeof entry.data === "string" ? entry.data : undefined;
|
||||
const bytes = data ? data.length : undefined;
|
||||
const cleaned = { ...entry };
|
||||
delete cleaned.data;
|
||||
return { ...cleaned, bytes, omitted: true };
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
return { ...record, content: sanitized };
|
||||
}
|
||||
|
||||
export function isToolResultError(result: unknown): boolean {
|
||||
if (!result || typeof result !== "object") return false;
|
||||
const record = result as { details?: unknown };
|
||||
const details = record.details;
|
||||
if (!details || typeof details !== "object") return false;
|
||||
const status = (details as { status?: unknown }).status;
|
||||
if (typeof status !== "string") return false;
|
||||
const normalized = status.trim().toLowerCase();
|
||||
return normalized === "error" || normalized === "timeout";
|
||||
}
|
||||
|
||||
export function extractMessagingToolSend(
|
||||
toolName: string,
|
||||
args: Record<string, unknown>,
|
||||
): MessagingToolSend | undefined {
|
||||
// Provider docking: new provider tools must implement plugin.actions.extractToolSend.
|
||||
const action = typeof args.action === "string" ? args.action.trim() : "";
|
||||
const accountIdRaw =
|
||||
typeof args.accountId === "string" ? args.accountId.trim() : undefined;
|
||||
const accountId = accountIdRaw ? accountIdRaw : undefined;
|
||||
if (toolName === "message") {
|
||||
if (action !== "send" && action !== "thread-reply") return undefined;
|
||||
const toRaw = typeof args.to === "string" ? args.to : undefined;
|
||||
if (!toRaw) return undefined;
|
||||
const providerRaw =
|
||||
typeof args.provider === "string" ? args.provider.trim() : "";
|
||||
const providerId = providerRaw ? normalizeChannelId(providerRaw) : null;
|
||||
const provider =
|
||||
providerId ?? (providerRaw ? providerRaw.toLowerCase() : "message");
|
||||
const to = normalizeTargetForProvider(provider, toRaw);
|
||||
return to ? { tool: toolName, provider, accountId, to } : undefined;
|
||||
}
|
||||
const providerId = normalizeChannelId(toolName);
|
||||
if (!providerId) return undefined;
|
||||
const plugin = getChannelPlugin(providerId);
|
||||
const extracted = plugin?.actions?.extractToolSend?.({ args });
|
||||
if (!extracted?.to) return undefined;
|
||||
const to = normalizeTargetForProvider(providerId, extracted.to);
|
||||
return to
|
||||
? {
|
||||
tool: toolName,
|
||||
provider: providerId,
|
||||
accountId: extracted.accountId ?? accountId,
|
||||
to,
|
||||
}
|
||||
: undefined;
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
41
src/agents/pi-embedded-subscribe.types.ts
Normal file
41
src/agents/pi-embedded-subscribe.types.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import type { AgentSession } from "@mariozechner/pi-coding-agent";
|
||||
|
||||
import type { ReasoningLevel } from "../auto-reply/thinking.js";
|
||||
import type { BlockReplyChunking } from "./pi-embedded-block-chunker.js";
|
||||
|
||||
export type SubscribeEmbeddedPiSessionParams = {
|
||||
session: AgentSession;
|
||||
runId: string;
|
||||
verboseLevel?: "off" | "on";
|
||||
reasoningMode?: ReasoningLevel;
|
||||
shouldEmitToolResult?: () => boolean;
|
||||
onToolResult?: (payload: {
|
||||
text?: string;
|
||||
mediaUrls?: string[];
|
||||
}) => void | Promise<void>;
|
||||
onReasoningStream?: (payload: {
|
||||
text?: string;
|
||||
mediaUrls?: string[];
|
||||
}) => void | Promise<void>;
|
||||
onBlockReply?: (payload: {
|
||||
text?: string;
|
||||
mediaUrls?: string[];
|
||||
audioAsVoice?: boolean;
|
||||
}) => void | Promise<void>;
|
||||
/** Flush pending block replies (e.g., before tool execution to preserve message boundaries). */
|
||||
onBlockReplyFlush?: () => void | Promise<void>;
|
||||
blockReplyBreak?: "text_end" | "message_end";
|
||||
blockReplyChunking?: BlockReplyChunking;
|
||||
onPartialReply?: (payload: {
|
||||
text?: string;
|
||||
mediaUrls?: string[];
|
||||
}) => void | Promise<void>;
|
||||
onAssistantMessageStart?: () => void | Promise<void>;
|
||||
onAgentEvent?: (evt: {
|
||||
stream: string;
|
||||
data: Record<string, unknown>;
|
||||
}) => void;
|
||||
enforceFinalTag?: boolean;
|
||||
};
|
||||
|
||||
export type { BlockReplyChunking } from "./pi-embedded-block-chunker.js";
|
||||
43
src/agents/pi-tools.abort.ts
Normal file
43
src/agents/pi-tools.abort.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import type { AnyAgentTool } from "./pi-tools.types.js";
|
||||
|
||||
function throwAbortError(): never {
|
||||
const err = new Error("Aborted");
|
||||
err.name = "AbortError";
|
||||
throw err;
|
||||
}
|
||||
|
||||
function combineAbortSignals(
|
||||
a?: AbortSignal,
|
||||
b?: AbortSignal,
|
||||
): AbortSignal | undefined {
|
||||
if (!a && !b) return undefined;
|
||||
if (a && !b) return a;
|
||||
if (b && !a) return b;
|
||||
if (a?.aborted) return a;
|
||||
if (b?.aborted) return b;
|
||||
if (typeof AbortSignal.any === "function") {
|
||||
return AbortSignal.any([a as AbortSignal, b as AbortSignal]);
|
||||
}
|
||||
const controller = new AbortController();
|
||||
const onAbort = () => controller.abort();
|
||||
a?.addEventListener("abort", onAbort, { once: true });
|
||||
b?.addEventListener("abort", onAbort, { once: true });
|
||||
return controller.signal;
|
||||
}
|
||||
|
||||
export function wrapToolWithAbortSignal(
|
||||
tool: AnyAgentTool,
|
||||
abortSignal?: AbortSignal,
|
||||
): AnyAgentTool {
|
||||
if (!abortSignal) return tool;
|
||||
const execute = tool.execute;
|
||||
if (!execute) return tool;
|
||||
return {
|
||||
...tool,
|
||||
execute: async (toolCallId, params, signal, onUpdate) => {
|
||||
const combined = combineAbortSignals(signal, abortSignal);
|
||||
if (combined?.aborted) throwAbortError();
|
||||
return await execute(toolCallId, params, combined, onUpdate);
|
||||
},
|
||||
};
|
||||
}
|
||||
85
src/agents/pi-tools.policy.ts
Normal file
85
src/agents/pi-tools.policy.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import type { ClawdbotConfig } from "../config/config.js";
|
||||
import {
|
||||
resolveAgentConfig,
|
||||
resolveAgentIdFromSessionKey,
|
||||
} from "./agent-scope.js";
|
||||
import type { AnyAgentTool } from "./pi-tools.types.js";
|
||||
import type { SandboxToolPolicy } from "./sandbox.js";
|
||||
import { expandToolGroups, normalizeToolName } from "./tool-policy.js";
|
||||
|
||||
const DEFAULT_SUBAGENT_TOOL_DENY = [
|
||||
"sessions_list",
|
||||
"sessions_history",
|
||||
"sessions_send",
|
||||
"sessions_spawn",
|
||||
];
|
||||
|
||||
export function resolveSubagentToolPolicy(
|
||||
cfg?: ClawdbotConfig,
|
||||
): SandboxToolPolicy {
|
||||
const configured = cfg?.tools?.subagents?.tools;
|
||||
const deny = [
|
||||
...DEFAULT_SUBAGENT_TOOL_DENY,
|
||||
...(Array.isArray(configured?.deny) ? configured.deny : []),
|
||||
];
|
||||
const allow = Array.isArray(configured?.allow) ? configured.allow : undefined;
|
||||
return { allow, deny };
|
||||
}
|
||||
|
||||
export function isToolAllowedByPolicyName(
|
||||
name: string,
|
||||
policy?: SandboxToolPolicy,
|
||||
): boolean {
|
||||
if (!policy) return true;
|
||||
const deny = new Set(expandToolGroups(policy.deny));
|
||||
const allowRaw = expandToolGroups(policy.allow);
|
||||
const allow = allowRaw.length > 0 ? new Set(allowRaw) : null;
|
||||
const normalized = normalizeToolName(name);
|
||||
if (deny.has(normalized)) return false;
|
||||
if (allow) {
|
||||
if (allow.has(normalized)) return true;
|
||||
if (normalized === "apply_patch" && allow.has("exec")) return true;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function filterToolsByPolicy(
|
||||
tools: AnyAgentTool[],
|
||||
policy?: SandboxToolPolicy,
|
||||
) {
|
||||
if (!policy) return tools;
|
||||
return tools.filter((tool) => isToolAllowedByPolicyName(tool.name, policy));
|
||||
}
|
||||
|
||||
export function resolveEffectiveToolPolicy(params: {
|
||||
config?: ClawdbotConfig;
|
||||
sessionKey?: string;
|
||||
}) {
|
||||
const agentId = params.sessionKey
|
||||
? resolveAgentIdFromSessionKey(params.sessionKey)
|
||||
: undefined;
|
||||
const agentConfig =
|
||||
params.config && agentId
|
||||
? resolveAgentConfig(params.config, agentId)
|
||||
: undefined;
|
||||
const agentTools = agentConfig?.tools;
|
||||
const hasAgentToolPolicy =
|
||||
Array.isArray(agentTools?.allow) ||
|
||||
Array.isArray(agentTools?.deny) ||
|
||||
typeof agentTools?.profile === "string";
|
||||
const globalTools = params.config?.tools;
|
||||
const profile = agentTools?.profile ?? globalTools?.profile;
|
||||
return {
|
||||
agentId,
|
||||
policy: hasAgentToolPolicy ? agentTools : globalTools,
|
||||
profile,
|
||||
};
|
||||
}
|
||||
|
||||
export function isToolAllowedByPolicies(
|
||||
name: string,
|
||||
policies: Array<SandboxToolPolicy | undefined>,
|
||||
) {
|
||||
return policies.every((policy) => isToolAllowedByPolicyName(name, policy));
|
||||
}
|
||||
312
src/agents/pi-tools.read.ts
Normal file
312
src/agents/pi-tools.read.ts
Normal file
@@ -0,0 +1,312 @@
|
||||
import type { AgentToolResult } from "@mariozechner/pi-agent-core";
|
||||
import {
|
||||
createEditTool,
|
||||
createReadTool,
|
||||
createWriteTool,
|
||||
} from "@mariozechner/pi-coding-agent";
|
||||
|
||||
import { detectMime } from "../media/mime.js";
|
||||
import type { AnyAgentTool } from "./pi-tools.types.js";
|
||||
import { assertSandboxPath } from "./sandbox-paths.js";
|
||||
import { sanitizeToolResultImages } from "./tool-images.js";
|
||||
|
||||
// NOTE(steipete): Upstream read now does file-magic MIME detection; we keep the wrapper
|
||||
// to normalize payloads and sanitize oversized images before they hit providers.
|
||||
type ToolContentBlock = AgentToolResult<unknown>["content"][number];
|
||||
type ImageContentBlock = Extract<ToolContentBlock, { type: "image" }>;
|
||||
type TextContentBlock = Extract<ToolContentBlock, { type: "text" }>;
|
||||
|
||||
async function sniffMimeFromBase64(
|
||||
base64: string,
|
||||
): Promise<string | undefined> {
|
||||
const trimmed = base64.trim();
|
||||
if (!trimmed) return undefined;
|
||||
|
||||
const take = Math.min(256, trimmed.length);
|
||||
const sliceLen = take - (take % 4);
|
||||
if (sliceLen < 8) return undefined;
|
||||
|
||||
try {
|
||||
const head = Buffer.from(trimmed.slice(0, sliceLen), "base64");
|
||||
return await detectMime({ buffer: head });
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function rewriteReadImageHeader(text: string, mimeType: string): string {
|
||||
// pi-coding-agent uses: "Read image file [image/png]"
|
||||
if (text.startsWith("Read image file [") && text.endsWith("]")) {
|
||||
return `Read image file [${mimeType}]`;
|
||||
}
|
||||
return text;
|
||||
}
|
||||
|
||||
async function normalizeReadImageResult(
|
||||
result: AgentToolResult<unknown>,
|
||||
filePath: string,
|
||||
): Promise<AgentToolResult<unknown>> {
|
||||
const content = Array.isArray(result.content) ? result.content : [];
|
||||
|
||||
const image = content.find(
|
||||
(b): b is ImageContentBlock =>
|
||||
!!b &&
|
||||
typeof b === "object" &&
|
||||
(b as { type?: unknown }).type === "image" &&
|
||||
typeof (b as { data?: unknown }).data === "string" &&
|
||||
typeof (b as { mimeType?: unknown }).mimeType === "string",
|
||||
);
|
||||
if (!image) return result;
|
||||
|
||||
if (!image.data.trim()) {
|
||||
throw new Error(`read: image payload is empty (${filePath})`);
|
||||
}
|
||||
|
||||
const sniffed = await sniffMimeFromBase64(image.data);
|
||||
if (!sniffed) return result;
|
||||
|
||||
if (!sniffed.startsWith("image/")) {
|
||||
throw new Error(
|
||||
`read: file looks like ${sniffed} but was treated as ${image.mimeType} (${filePath})`,
|
||||
);
|
||||
}
|
||||
|
||||
if (sniffed === image.mimeType) return result;
|
||||
|
||||
const nextContent = content.map((block) => {
|
||||
if (
|
||||
block &&
|
||||
typeof block === "object" &&
|
||||
(block as { type?: unknown }).type === "image"
|
||||
) {
|
||||
const b = block as ImageContentBlock & { mimeType: string };
|
||||
return { ...b, mimeType: sniffed } satisfies ImageContentBlock;
|
||||
}
|
||||
if (
|
||||
block &&
|
||||
typeof block === "object" &&
|
||||
(block as { type?: unknown }).type === "text" &&
|
||||
typeof (block as { text?: unknown }).text === "string"
|
||||
) {
|
||||
const b = block as TextContentBlock & { text: string };
|
||||
return {
|
||||
...b,
|
||||
text: rewriteReadImageHeader(b.text, sniffed),
|
||||
} satisfies TextContentBlock;
|
||||
}
|
||||
return block;
|
||||
});
|
||||
|
||||
return { ...result, content: nextContent };
|
||||
}
|
||||
|
||||
type RequiredParamGroup = {
|
||||
keys: readonly string[];
|
||||
allowEmpty?: boolean;
|
||||
label?: string;
|
||||
};
|
||||
|
||||
export const CLAUDE_PARAM_GROUPS = {
|
||||
read: [{ keys: ["path", "file_path"], label: "path (path or file_path)" }],
|
||||
write: [{ keys: ["path", "file_path"], label: "path (path or file_path)" }],
|
||||
edit: [
|
||||
{ keys: ["path", "file_path"], label: "path (path or file_path)" },
|
||||
{
|
||||
keys: ["oldText", "old_string"],
|
||||
label: "oldText (oldText or old_string)",
|
||||
},
|
||||
{
|
||||
keys: ["newText", "new_string"],
|
||||
label: "newText (newText or new_string)",
|
||||
},
|
||||
],
|
||||
} as const;
|
||||
|
||||
// Normalize tool parameters from Claude Code conventions to pi-coding-agent conventions.
|
||||
// Claude Code uses file_path/old_string/new_string while pi-coding-agent uses path/oldText/newText.
|
||||
// This prevents models trained on Claude Code from getting stuck in tool-call loops.
|
||||
export function normalizeToolParams(
|
||||
params: unknown,
|
||||
): Record<string, unknown> | undefined {
|
||||
if (!params || typeof params !== "object") return undefined;
|
||||
const record = params as Record<string, unknown>;
|
||||
const normalized = { ...record };
|
||||
// file_path → path (read, write, edit)
|
||||
if ("file_path" in normalized && !("path" in normalized)) {
|
||||
normalized.path = normalized.file_path;
|
||||
delete normalized.file_path;
|
||||
}
|
||||
// old_string → oldText (edit)
|
||||
if ("old_string" in normalized && !("oldText" in normalized)) {
|
||||
normalized.oldText = normalized.old_string;
|
||||
delete normalized.old_string;
|
||||
}
|
||||
// new_string → newText (edit)
|
||||
if ("new_string" in normalized && !("newText" in normalized)) {
|
||||
normalized.newText = normalized.new_string;
|
||||
delete normalized.new_string;
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
export function patchToolSchemaForClaudeCompatibility(
|
||||
tool: AnyAgentTool,
|
||||
): AnyAgentTool {
|
||||
const schema =
|
||||
tool.parameters && typeof tool.parameters === "object"
|
||||
? (tool.parameters as Record<string, unknown>)
|
||||
: undefined;
|
||||
|
||||
if (!schema || !schema.properties || typeof schema.properties !== "object") {
|
||||
return tool;
|
||||
}
|
||||
|
||||
const properties = { ...(schema.properties as Record<string, unknown>) };
|
||||
const required = Array.isArray(schema.required)
|
||||
? schema.required.filter((key): key is string => typeof key === "string")
|
||||
: [];
|
||||
let changed = false;
|
||||
|
||||
const aliasPairs: Array<{ original: string; alias: string }> = [
|
||||
{ original: "path", alias: "file_path" },
|
||||
{ original: "oldText", alias: "old_string" },
|
||||
{ original: "newText", alias: "new_string" },
|
||||
];
|
||||
|
||||
for (const { original, alias } of aliasPairs) {
|
||||
if (!(original in properties)) continue;
|
||||
if (!(alias in properties)) {
|
||||
properties[alias] = properties[original];
|
||||
changed = true;
|
||||
}
|
||||
const idx = required.indexOf(original);
|
||||
if (idx !== -1) {
|
||||
required.splice(idx, 1);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!changed) return tool;
|
||||
|
||||
return {
|
||||
...tool,
|
||||
parameters: {
|
||||
...schema,
|
||||
properties,
|
||||
...(required.length > 0 ? { required } : {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function assertRequiredParams(
|
||||
record: Record<string, unknown> | undefined,
|
||||
groups: readonly RequiredParamGroup[],
|
||||
toolName: string,
|
||||
): void {
|
||||
if (!record || typeof record !== "object") {
|
||||
throw new Error(`Missing parameters for ${toolName}`);
|
||||
}
|
||||
|
||||
for (const group of groups) {
|
||||
const satisfied = group.keys.some((key) => {
|
||||
if (!(key in record)) return false;
|
||||
const value = record[key];
|
||||
if (typeof value !== "string") return false;
|
||||
if (group.allowEmpty) return true;
|
||||
return value.trim().length > 0;
|
||||
});
|
||||
|
||||
if (!satisfied) {
|
||||
const label = group.label ?? group.keys.join(" or ");
|
||||
throw new Error(`Missing required parameter: ${label}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generic wrapper to normalize parameters for any tool
|
||||
export function wrapToolParamNormalization(
|
||||
tool: AnyAgentTool,
|
||||
requiredParamGroups?: readonly RequiredParamGroup[],
|
||||
): AnyAgentTool {
|
||||
const patched = patchToolSchemaForClaudeCompatibility(tool);
|
||||
return {
|
||||
...patched,
|
||||
execute: async (toolCallId, params, signal, onUpdate) => {
|
||||
const normalized = normalizeToolParams(params);
|
||||
const record =
|
||||
normalized ??
|
||||
(params && typeof params === "object"
|
||||
? (params as Record<string, unknown>)
|
||||
: undefined);
|
||||
if (requiredParamGroups?.length) {
|
||||
assertRequiredParams(record, requiredParamGroups, tool.name);
|
||||
}
|
||||
return tool.execute(toolCallId, normalized ?? params, signal, onUpdate);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function wrapSandboxPathGuard(tool: AnyAgentTool, root: string): AnyAgentTool {
|
||||
return {
|
||||
...tool,
|
||||
execute: async (toolCallId, args, signal, onUpdate) => {
|
||||
const normalized = normalizeToolParams(args);
|
||||
const record =
|
||||
normalized ??
|
||||
(args && typeof args === "object"
|
||||
? (args as Record<string, unknown>)
|
||||
: undefined);
|
||||
const filePath = record?.path;
|
||||
if (typeof filePath === "string" && filePath.trim()) {
|
||||
await assertSandboxPath({ filePath, cwd: root, root });
|
||||
}
|
||||
return tool.execute(toolCallId, normalized ?? args, signal, onUpdate);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function createSandboxedReadTool(root: string) {
|
||||
const base = createReadTool(root) as unknown as AnyAgentTool;
|
||||
return wrapSandboxPathGuard(createClawdbotReadTool(base), root);
|
||||
}
|
||||
|
||||
export function createSandboxedWriteTool(root: string) {
|
||||
const base = createWriteTool(root) as unknown as AnyAgentTool;
|
||||
return wrapSandboxPathGuard(
|
||||
wrapToolParamNormalization(base, CLAUDE_PARAM_GROUPS.write),
|
||||
root,
|
||||
);
|
||||
}
|
||||
|
||||
export function createSandboxedEditTool(root: string) {
|
||||
const base = createEditTool(root) as unknown as AnyAgentTool;
|
||||
return wrapSandboxPathGuard(
|
||||
wrapToolParamNormalization(base, CLAUDE_PARAM_GROUPS.edit),
|
||||
root,
|
||||
);
|
||||
}
|
||||
|
||||
export function createClawdbotReadTool(base: AnyAgentTool): AnyAgentTool {
|
||||
const patched = patchToolSchemaForClaudeCompatibility(base);
|
||||
return {
|
||||
...patched,
|
||||
execute: async (toolCallId, params, signal) => {
|
||||
const normalized = normalizeToolParams(params);
|
||||
const record =
|
||||
normalized ??
|
||||
(params && typeof params === "object"
|
||||
? (params as Record<string, unknown>)
|
||||
: undefined);
|
||||
assertRequiredParams(record, CLAUDE_PARAM_GROUPS.read, base.name);
|
||||
const result = (await base.execute(
|
||||
toolCallId,
|
||||
normalized ?? params,
|
||||
signal,
|
||||
)) as AgentToolResult<unknown>;
|
||||
const filePath =
|
||||
typeof record?.path === "string" ? String(record.path) : "<unknown>";
|
||||
const normalizedResult = await normalizeReadImageResult(result, filePath);
|
||||
return sanitizeToolResultImages(normalizedResult, `read:${filePath}`);
|
||||
},
|
||||
};
|
||||
}
|
||||
173
src/agents/pi-tools.schema.ts
Normal file
173
src/agents/pi-tools.schema.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import type { AnyAgentTool } from "./pi-tools.types.js";
|
||||
import { cleanSchemaForGemini } from "./schema/clean-for-gemini.js";
|
||||
|
||||
function extractEnumValues(schema: unknown): unknown[] | undefined {
|
||||
if (!schema || typeof schema !== "object") return undefined;
|
||||
const record = schema as Record<string, unknown>;
|
||||
if (Array.isArray(record.enum)) return record.enum;
|
||||
if ("const" in record) return [record.const];
|
||||
const variants = Array.isArray(record.anyOf)
|
||||
? record.anyOf
|
||||
: Array.isArray(record.oneOf)
|
||||
? record.oneOf
|
||||
: null;
|
||||
if (variants) {
|
||||
const values = variants.flatMap((variant) => {
|
||||
const extracted = extractEnumValues(variant);
|
||||
return extracted ?? [];
|
||||
});
|
||||
return values.length > 0 ? values : undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function mergePropertySchemas(existing: unknown, incoming: unknown): unknown {
|
||||
if (!existing) return incoming;
|
||||
if (!incoming) return existing;
|
||||
|
||||
const existingEnum = extractEnumValues(existing);
|
||||
const incomingEnum = extractEnumValues(incoming);
|
||||
if (existingEnum || incomingEnum) {
|
||||
const values = Array.from(
|
||||
new Set([...(existingEnum ?? []), ...(incomingEnum ?? [])]),
|
||||
);
|
||||
const merged: Record<string, unknown> = {};
|
||||
for (const source of [existing, incoming]) {
|
||||
if (!source || typeof source !== "object") continue;
|
||||
const record = source as Record<string, unknown>;
|
||||
for (const key of ["title", "description", "default"]) {
|
||||
if (!(key in merged) && key in record) merged[key] = record[key];
|
||||
}
|
||||
}
|
||||
const types = new Set(values.map((value) => typeof value));
|
||||
if (types.size === 1) merged.type = Array.from(types)[0];
|
||||
merged.enum = values;
|
||||
return merged;
|
||||
}
|
||||
|
||||
return existing;
|
||||
}
|
||||
|
||||
export function normalizeToolParameters(tool: AnyAgentTool): AnyAgentTool {
|
||||
const schema =
|
||||
tool.parameters && typeof tool.parameters === "object"
|
||||
? (tool.parameters as Record<string, unknown>)
|
||||
: undefined;
|
||||
if (!schema) return tool;
|
||||
|
||||
// Provider quirks:
|
||||
// - Gemini rejects several JSON Schema keywords, so we scrub those.
|
||||
// - OpenAI rejects function tool schemas unless the *top-level* is `type: "object"`.
|
||||
// (TypeBox root unions compile to `{ anyOf: [...] }` without `type`).
|
||||
//
|
||||
// Normalize once here so callers can always pass `tools` through unchanged.
|
||||
|
||||
// If schema already has type + properties (no top-level anyOf to merge),
|
||||
// still clean it for Gemini compatibility
|
||||
if (
|
||||
"type" in schema &&
|
||||
"properties" in schema &&
|
||||
!Array.isArray(schema.anyOf)
|
||||
) {
|
||||
return {
|
||||
...tool,
|
||||
parameters: cleanSchemaForGemini(schema),
|
||||
};
|
||||
}
|
||||
|
||||
// Some tool schemas (esp. unions) may omit `type` at the top-level. If we see
|
||||
// object-ish fields, force `type: "object"` so OpenAI accepts the schema.
|
||||
if (
|
||||
!("type" in schema) &&
|
||||
(typeof schema.properties === "object" || Array.isArray(schema.required)) &&
|
||||
!Array.isArray(schema.anyOf) &&
|
||||
!Array.isArray(schema.oneOf)
|
||||
) {
|
||||
return {
|
||||
...tool,
|
||||
parameters: cleanSchemaForGemini({ ...schema, type: "object" }),
|
||||
};
|
||||
}
|
||||
|
||||
const variantKey = Array.isArray(schema.anyOf)
|
||||
? "anyOf"
|
||||
: Array.isArray(schema.oneOf)
|
||||
? "oneOf"
|
||||
: null;
|
||||
if (!variantKey) return tool;
|
||||
const variants = schema[variantKey] as unknown[];
|
||||
const mergedProperties: Record<string, unknown> = {};
|
||||
const requiredCounts = new Map<string, number>();
|
||||
let objectVariants = 0;
|
||||
|
||||
for (const entry of variants) {
|
||||
if (!entry || typeof entry !== "object") continue;
|
||||
const props = (entry as { properties?: unknown }).properties;
|
||||
if (!props || typeof props !== "object") continue;
|
||||
objectVariants += 1;
|
||||
for (const [key, value] of Object.entries(
|
||||
props as Record<string, unknown>,
|
||||
)) {
|
||||
if (!(key in mergedProperties)) {
|
||||
mergedProperties[key] = value;
|
||||
continue;
|
||||
}
|
||||
mergedProperties[key] = mergePropertySchemas(
|
||||
mergedProperties[key],
|
||||
value,
|
||||
);
|
||||
}
|
||||
const required = Array.isArray((entry as { required?: unknown }).required)
|
||||
? (entry as { required: unknown[] }).required
|
||||
: [];
|
||||
for (const key of required) {
|
||||
if (typeof key !== "string") continue;
|
||||
requiredCounts.set(key, (requiredCounts.get(key) ?? 0) + 1);
|
||||
}
|
||||
}
|
||||
|
||||
const baseRequired = Array.isArray(schema.required)
|
||||
? schema.required.filter((key) => typeof key === "string")
|
||||
: undefined;
|
||||
const mergedRequired =
|
||||
baseRequired && baseRequired.length > 0
|
||||
? baseRequired
|
||||
: objectVariants > 0
|
||||
? Array.from(requiredCounts.entries())
|
||||
.filter(([, count]) => count === objectVariants)
|
||||
.map(([key]) => key)
|
||||
: undefined;
|
||||
|
||||
const nextSchema: Record<string, unknown> = { ...schema };
|
||||
return {
|
||||
...tool,
|
||||
// Flatten union schemas into a single object schema:
|
||||
// - Gemini doesn't allow top-level `type` together with `anyOf`.
|
||||
// - OpenAI rejects schemas without top-level `type: "object"`.
|
||||
// Merging properties preserves useful enums like `action` while keeping schemas portable.
|
||||
parameters: cleanSchemaForGemini({
|
||||
type: "object",
|
||||
...(typeof nextSchema.title === "string"
|
||||
? { title: nextSchema.title }
|
||||
: {}),
|
||||
...(typeof nextSchema.description === "string"
|
||||
? { description: nextSchema.description }
|
||||
: {}),
|
||||
properties:
|
||||
Object.keys(mergedProperties).length > 0
|
||||
? mergedProperties
|
||||
: (schema.properties ?? {}),
|
||||
...(mergedRequired && mergedRequired.length > 0
|
||||
? { required: mergedRequired }
|
||||
: {}),
|
||||
additionalProperties:
|
||||
"additionalProperties" in schema ? schema.additionalProperties : true,
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
export function cleanToolSchemaForGemini(
|
||||
schema: Record<string, unknown>,
|
||||
): unknown {
|
||||
return cleanSchemaForGemini(schema);
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
import type { AgentTool, AgentToolResult } from "@mariozechner/pi-agent-core";
|
||||
import {
|
||||
codingTools,
|
||||
createEditTool,
|
||||
@@ -7,13 +6,8 @@ import {
|
||||
readTool,
|
||||
} from "@mariozechner/pi-coding-agent";
|
||||
import type { ClawdbotConfig } from "../config/config.js";
|
||||
import { detectMime } from "../media/mime.js";
|
||||
import { isSubagentSessionKey } from "../routing/session-key.js";
|
||||
import { resolveGatewayMessageChannel } from "../utils/message-channel.js";
|
||||
import {
|
||||
resolveAgentConfig,
|
||||
resolveAgentIdFromSessionKey,
|
||||
} from "./agent-scope.js";
|
||||
import { createApplyPatchTool } from "./apply-patch.js";
|
||||
import {
|
||||
createExecTool,
|
||||
@@ -24,277 +18,31 @@ import {
|
||||
import { listChannelAgentTools } from "./channel-tools.js";
|
||||
import { createClawdbotTools } from "./clawdbot-tools.js";
|
||||
import type { ModelAuthMode } from "./model-auth.js";
|
||||
import type { SandboxContext, SandboxToolPolicy } from "./sandbox.js";
|
||||
import { assertSandboxPath } from "./sandbox-paths.js";
|
||||
import { cleanSchemaForGemini } from "./schema/clean-for-gemini.js";
|
||||
import { sanitizeToolResultImages } from "./tool-images.js";
|
||||
import { wrapToolWithAbortSignal } from "./pi-tools.abort.js";
|
||||
import {
|
||||
expandToolGroups,
|
||||
normalizeToolName,
|
||||
resolveToolProfilePolicy,
|
||||
} from "./tool-policy.js";
|
||||
|
||||
// NOTE(steipete): Upstream read now does file-magic MIME detection; we keep the wrapper
|
||||
// to normalize payloads and sanitize oversized images before they hit providers.
|
||||
type ToolContentBlock = AgentToolResult<unknown>["content"][number];
|
||||
type ImageContentBlock = Extract<ToolContentBlock, { type: "image" }>;
|
||||
type TextContentBlock = Extract<ToolContentBlock, { type: "text" }>;
|
||||
|
||||
async function sniffMimeFromBase64(
|
||||
base64: string,
|
||||
): Promise<string | undefined> {
|
||||
const trimmed = base64.trim();
|
||||
if (!trimmed) return undefined;
|
||||
|
||||
const take = Math.min(256, trimmed.length);
|
||||
const sliceLen = take - (take % 4);
|
||||
if (sliceLen < 8) return undefined;
|
||||
|
||||
try {
|
||||
const head = Buffer.from(trimmed.slice(0, sliceLen), "base64");
|
||||
return await detectMime({ buffer: head });
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function rewriteReadImageHeader(text: string, mimeType: string): string {
|
||||
// pi-coding-agent uses: "Read image file [image/png]"
|
||||
if (text.startsWith("Read image file [") && text.endsWith("]")) {
|
||||
return `Read image file [${mimeType}]`;
|
||||
}
|
||||
return text;
|
||||
}
|
||||
|
||||
async function normalizeReadImageResult(
|
||||
result: AgentToolResult<unknown>,
|
||||
filePath: string,
|
||||
): Promise<AgentToolResult<unknown>> {
|
||||
const content = Array.isArray(result.content) ? result.content : [];
|
||||
|
||||
const image = content.find(
|
||||
(b): b is ImageContentBlock =>
|
||||
!!b &&
|
||||
typeof b === "object" &&
|
||||
(b as { type?: unknown }).type === "image" &&
|
||||
typeof (b as { data?: unknown }).data === "string" &&
|
||||
typeof (b as { mimeType?: unknown }).mimeType === "string",
|
||||
);
|
||||
if (!image) return result;
|
||||
|
||||
if (!image.data.trim()) {
|
||||
throw new Error(`read: image payload is empty (${filePath})`);
|
||||
}
|
||||
|
||||
const sniffed = await sniffMimeFromBase64(image.data);
|
||||
if (!sniffed) return result;
|
||||
|
||||
if (!sniffed.startsWith("image/")) {
|
||||
throw new Error(
|
||||
`read: file looks like ${sniffed} but was treated as ${image.mimeType} (${filePath})`,
|
||||
);
|
||||
}
|
||||
|
||||
if (sniffed === image.mimeType) return result;
|
||||
|
||||
const nextContent = content.map((block) => {
|
||||
if (
|
||||
block &&
|
||||
typeof block === "object" &&
|
||||
(block as { type?: unknown }).type === "image"
|
||||
) {
|
||||
const b = block as ImageContentBlock & { mimeType: string };
|
||||
return { ...b, mimeType: sniffed } satisfies ImageContentBlock;
|
||||
}
|
||||
if (
|
||||
block &&
|
||||
typeof block === "object" &&
|
||||
(block as { type?: unknown }).type === "text" &&
|
||||
typeof (block as { text?: unknown }).text === "string"
|
||||
) {
|
||||
const b = block as TextContentBlock & { text: string };
|
||||
return {
|
||||
...b,
|
||||
text: rewriteReadImageHeader(b.text, sniffed),
|
||||
} satisfies TextContentBlock;
|
||||
}
|
||||
return block;
|
||||
});
|
||||
|
||||
return { ...result, content: nextContent };
|
||||
}
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: TypeBox schema type from pi-agent-core uses a different module instance.
|
||||
type AnyAgentTool = AgentTool<any, unknown>;
|
||||
|
||||
function extractEnumValues(schema: unknown): unknown[] | undefined {
|
||||
if (!schema || typeof schema !== "object") return undefined;
|
||||
const record = schema as Record<string, unknown>;
|
||||
if (Array.isArray(record.enum)) return record.enum;
|
||||
if ("const" in record) return [record.const];
|
||||
const variants = Array.isArray(record.anyOf)
|
||||
? record.anyOf
|
||||
: Array.isArray(record.oneOf)
|
||||
? record.oneOf
|
||||
: null;
|
||||
if (variants) {
|
||||
const values = variants.flatMap((variant) => {
|
||||
const extracted = extractEnumValues(variant);
|
||||
return extracted ?? [];
|
||||
});
|
||||
return values.length > 0 ? values : undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function mergePropertySchemas(existing: unknown, incoming: unknown): unknown {
|
||||
if (!existing) return incoming;
|
||||
if (!incoming) return existing;
|
||||
|
||||
const existingEnum = extractEnumValues(existing);
|
||||
const incomingEnum = extractEnumValues(incoming);
|
||||
if (existingEnum || incomingEnum) {
|
||||
const values = Array.from(
|
||||
new Set([...(existingEnum ?? []), ...(incomingEnum ?? [])]),
|
||||
);
|
||||
const merged: Record<string, unknown> = {};
|
||||
for (const source of [existing, incoming]) {
|
||||
if (!source || typeof source !== "object") continue;
|
||||
const record = source as Record<string, unknown>;
|
||||
for (const key of ["title", "description", "default"]) {
|
||||
if (!(key in merged) && key in record) merged[key] = record[key];
|
||||
}
|
||||
}
|
||||
const types = new Set(values.map((value) => typeof value));
|
||||
if (types.size === 1) merged.type = Array.from(types)[0];
|
||||
merged.enum = values;
|
||||
return merged;
|
||||
}
|
||||
|
||||
return existing;
|
||||
}
|
||||
|
||||
function normalizeToolParameters(tool: AnyAgentTool): AnyAgentTool {
|
||||
const schema =
|
||||
tool.parameters && typeof tool.parameters === "object"
|
||||
? (tool.parameters as Record<string, unknown>)
|
||||
: undefined;
|
||||
if (!schema) return tool;
|
||||
|
||||
// Provider quirks:
|
||||
// - Gemini rejects several JSON Schema keywords, so we scrub those.
|
||||
// - OpenAI rejects function tool schemas unless the *top-level* is `type: "object"`.
|
||||
// (TypeBox root unions compile to `{ anyOf: [...] }` without `type`).
|
||||
//
|
||||
// Normalize once here so callers can always pass `tools` through unchanged.
|
||||
|
||||
// If schema already has type + properties (no top-level anyOf to merge),
|
||||
// still clean it for Gemini compatibility
|
||||
if (
|
||||
"type" in schema &&
|
||||
"properties" in schema &&
|
||||
!Array.isArray(schema.anyOf)
|
||||
) {
|
||||
return {
|
||||
...tool,
|
||||
parameters: cleanSchemaForGemini(schema),
|
||||
};
|
||||
}
|
||||
|
||||
// Some tool schemas (esp. unions) may omit `type` at the top-level. If we see
|
||||
// object-ish fields, force `type: "object"` so OpenAI accepts the schema.
|
||||
if (
|
||||
!("type" in schema) &&
|
||||
(typeof schema.properties === "object" || Array.isArray(schema.required)) &&
|
||||
!Array.isArray(schema.anyOf) &&
|
||||
!Array.isArray(schema.oneOf)
|
||||
) {
|
||||
return {
|
||||
...tool,
|
||||
parameters: cleanSchemaForGemini({ ...schema, type: "object" }),
|
||||
};
|
||||
}
|
||||
|
||||
const variantKey = Array.isArray(schema.anyOf)
|
||||
? "anyOf"
|
||||
: Array.isArray(schema.oneOf)
|
||||
? "oneOf"
|
||||
: null;
|
||||
if (!variantKey) return tool;
|
||||
const variants = schema[variantKey] as unknown[];
|
||||
const mergedProperties: Record<string, unknown> = {};
|
||||
const requiredCounts = new Map<string, number>();
|
||||
let objectVariants = 0;
|
||||
|
||||
for (const entry of variants) {
|
||||
if (!entry || typeof entry !== "object") continue;
|
||||
const props = (entry as { properties?: unknown }).properties;
|
||||
if (!props || typeof props !== "object") continue;
|
||||
objectVariants += 1;
|
||||
for (const [key, value] of Object.entries(
|
||||
props as Record<string, unknown>,
|
||||
)) {
|
||||
if (!(key in mergedProperties)) {
|
||||
mergedProperties[key] = value;
|
||||
continue;
|
||||
}
|
||||
mergedProperties[key] = mergePropertySchemas(
|
||||
mergedProperties[key],
|
||||
value,
|
||||
);
|
||||
}
|
||||
const required = Array.isArray((entry as { required?: unknown }).required)
|
||||
? (entry as { required: unknown[] }).required
|
||||
: [];
|
||||
for (const key of required) {
|
||||
if (typeof key !== "string") continue;
|
||||
requiredCounts.set(key, (requiredCounts.get(key) ?? 0) + 1);
|
||||
}
|
||||
}
|
||||
|
||||
const baseRequired = Array.isArray(schema.required)
|
||||
? schema.required.filter((key) => typeof key === "string")
|
||||
: undefined;
|
||||
const mergedRequired =
|
||||
baseRequired && baseRequired.length > 0
|
||||
? baseRequired
|
||||
: objectVariants > 0
|
||||
? Array.from(requiredCounts.entries())
|
||||
.filter(([, count]) => count === objectVariants)
|
||||
.map(([key]) => key)
|
||||
: undefined;
|
||||
|
||||
const nextSchema: Record<string, unknown> = { ...schema };
|
||||
return {
|
||||
...tool,
|
||||
// Flatten union schemas into a single object schema:
|
||||
// - Gemini doesn't allow top-level `type` together with `anyOf`.
|
||||
// - OpenAI rejects schemas without top-level `type: "object"`.
|
||||
// Merging properties preserves useful enums like `action` while keeping schemas portable.
|
||||
parameters: cleanSchemaForGemini({
|
||||
type: "object",
|
||||
...(typeof nextSchema.title === "string"
|
||||
? { title: nextSchema.title }
|
||||
: {}),
|
||||
...(typeof nextSchema.description === "string"
|
||||
? { description: nextSchema.description }
|
||||
: {}),
|
||||
properties:
|
||||
Object.keys(mergedProperties).length > 0
|
||||
? mergedProperties
|
||||
: (schema.properties ?? {}),
|
||||
...(mergedRequired && mergedRequired.length > 0
|
||||
? { required: mergedRequired }
|
||||
: {}),
|
||||
additionalProperties:
|
||||
"additionalProperties" in schema ? schema.additionalProperties : true,
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
function cleanToolSchemaForGemini(schema: Record<string, unknown>): unknown {
|
||||
return cleanSchemaForGemini(schema);
|
||||
}
|
||||
filterToolsByPolicy,
|
||||
isToolAllowedByPolicies,
|
||||
resolveEffectiveToolPolicy,
|
||||
resolveSubagentToolPolicy,
|
||||
} from "./pi-tools.policy.js";
|
||||
import {
|
||||
assertRequiredParams,
|
||||
CLAUDE_PARAM_GROUPS,
|
||||
createClawdbotReadTool,
|
||||
createSandboxedEditTool,
|
||||
createSandboxedReadTool,
|
||||
createSandboxedWriteTool,
|
||||
normalizeToolParams,
|
||||
patchToolSchemaForClaudeCompatibility,
|
||||
wrapToolParamNormalization,
|
||||
} from "./pi-tools.read.js";
|
||||
import {
|
||||
cleanToolSchemaForGemini,
|
||||
normalizeToolParameters,
|
||||
} from "./pi-tools.schema.js";
|
||||
import type { AnyAgentTool } from "./pi-tools.types.js";
|
||||
import type { SandboxContext } from "./sandbox.js";
|
||||
import { resolveToolProfilePolicy } from "./tool-policy.js";
|
||||
|
||||
function isOpenAIProvider(provider?: string) {
|
||||
const normalized = provider?.trim().toLowerCase();
|
||||
@@ -325,295 +73,6 @@ function isApplyPatchAllowedForModel(params: {
|
||||
});
|
||||
}
|
||||
|
||||
const DEFAULT_SUBAGENT_TOOL_DENY = [
|
||||
"sessions_list",
|
||||
"sessions_history",
|
||||
"sessions_send",
|
||||
"sessions_spawn",
|
||||
];
|
||||
|
||||
function resolveSubagentToolPolicy(cfg?: ClawdbotConfig): SandboxToolPolicy {
|
||||
const configured = cfg?.tools?.subagents?.tools;
|
||||
const deny = [
|
||||
...DEFAULT_SUBAGENT_TOOL_DENY,
|
||||
...(Array.isArray(configured?.deny) ? configured.deny : []),
|
||||
];
|
||||
const allow = Array.isArray(configured?.allow) ? configured.allow : undefined;
|
||||
return { allow, deny };
|
||||
}
|
||||
|
||||
function isToolAllowedByPolicyName(
|
||||
name: string,
|
||||
policy?: SandboxToolPolicy,
|
||||
): boolean {
|
||||
if (!policy) return true;
|
||||
const deny = new Set(expandToolGroups(policy.deny));
|
||||
const allowRaw = expandToolGroups(policy.allow);
|
||||
const allow = allowRaw.length > 0 ? new Set(allowRaw) : null;
|
||||
const normalized = normalizeToolName(name);
|
||||
if (deny.has(normalized)) return false;
|
||||
if (allow) {
|
||||
if (allow.has(normalized)) return true;
|
||||
if (normalized === "apply_patch" && allow.has("exec")) return true;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function filterToolsByPolicy(
|
||||
tools: AnyAgentTool[],
|
||||
policy?: SandboxToolPolicy,
|
||||
) {
|
||||
if (!policy) return tools;
|
||||
return tools.filter((tool) => isToolAllowedByPolicyName(tool.name, policy));
|
||||
}
|
||||
|
||||
function resolveEffectiveToolPolicy(params: {
|
||||
config?: ClawdbotConfig;
|
||||
sessionKey?: string;
|
||||
}) {
|
||||
const agentId = params.sessionKey
|
||||
? resolveAgentIdFromSessionKey(params.sessionKey)
|
||||
: undefined;
|
||||
const agentConfig =
|
||||
params.config && agentId
|
||||
? resolveAgentConfig(params.config, agentId)
|
||||
: undefined;
|
||||
const agentTools = agentConfig?.tools;
|
||||
const hasAgentToolPolicy =
|
||||
Array.isArray(agentTools?.allow) ||
|
||||
Array.isArray(agentTools?.deny) ||
|
||||
typeof agentTools?.profile === "string";
|
||||
const globalTools = params.config?.tools;
|
||||
const profile = agentTools?.profile ?? globalTools?.profile;
|
||||
return {
|
||||
agentId,
|
||||
policy: hasAgentToolPolicy ? agentTools : globalTools,
|
||||
profile,
|
||||
};
|
||||
}
|
||||
|
||||
function isToolAllowedByPolicy(name: string, policy?: SandboxToolPolicy) {
|
||||
return isToolAllowedByPolicyName(name, policy);
|
||||
}
|
||||
|
||||
function isToolAllowedByPolicies(
|
||||
name: string,
|
||||
policies: Array<SandboxToolPolicy | undefined>,
|
||||
) {
|
||||
return policies.every((policy) => isToolAllowedByPolicy(name, policy));
|
||||
}
|
||||
|
||||
function wrapSandboxPathGuard(tool: AnyAgentTool, root: string): AnyAgentTool {
|
||||
return {
|
||||
...tool,
|
||||
execute: async (toolCallId, args, signal, onUpdate) => {
|
||||
const normalized = normalizeToolParams(args);
|
||||
const record =
|
||||
normalized ??
|
||||
(args && typeof args === "object"
|
||||
? (args as Record<string, unknown>)
|
||||
: undefined);
|
||||
const filePath = record?.path;
|
||||
if (typeof filePath === "string" && filePath.trim()) {
|
||||
await assertSandboxPath({ filePath, cwd: root, root });
|
||||
}
|
||||
return tool.execute(toolCallId, normalized ?? args, signal, onUpdate);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
type RequiredParamGroup = {
|
||||
keys: readonly string[];
|
||||
allowEmpty?: boolean;
|
||||
label?: string;
|
||||
};
|
||||
|
||||
const CLAUDE_PARAM_GROUPS = {
|
||||
read: [{ keys: ["path", "file_path"], label: "path (path or file_path)" }],
|
||||
write: [{ keys: ["path", "file_path"], label: "path (path or file_path)" }],
|
||||
edit: [
|
||||
{ keys: ["path", "file_path"], label: "path (path or file_path)" },
|
||||
{
|
||||
keys: ["oldText", "old_string"],
|
||||
label: "oldText (oldText or old_string)",
|
||||
},
|
||||
{
|
||||
keys: ["newText", "new_string"],
|
||||
label: "newText (newText or new_string)",
|
||||
},
|
||||
],
|
||||
} as const;
|
||||
|
||||
function patchToolSchemaForClaudeCompatibility(
|
||||
tool: AnyAgentTool,
|
||||
): AnyAgentTool {
|
||||
const schema =
|
||||
tool.parameters && typeof tool.parameters === "object"
|
||||
? (tool.parameters as Record<string, unknown>)
|
||||
: undefined;
|
||||
|
||||
if (!schema || !schema.properties || typeof schema.properties !== "object") {
|
||||
return tool;
|
||||
}
|
||||
|
||||
const properties = { ...(schema.properties as Record<string, unknown>) };
|
||||
const required = Array.isArray(schema.required)
|
||||
? schema.required.filter((key): key is string => typeof key === "string")
|
||||
: [];
|
||||
let changed = false;
|
||||
|
||||
const aliasPairs: Array<{ original: string; alias: string }> = [
|
||||
{ original: "path", alias: "file_path" },
|
||||
{ original: "oldText", alias: "old_string" },
|
||||
{ original: "newText", alias: "new_string" },
|
||||
];
|
||||
|
||||
for (const { original, alias } of aliasPairs) {
|
||||
if (!(original in properties)) continue;
|
||||
if (!(alias in properties)) {
|
||||
properties[alias] = properties[original];
|
||||
changed = true;
|
||||
}
|
||||
const idx = required.indexOf(original);
|
||||
if (idx !== -1) {
|
||||
required.splice(idx, 1);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!changed) return tool;
|
||||
|
||||
return {
|
||||
...tool,
|
||||
parameters: {
|
||||
...schema,
|
||||
properties,
|
||||
...(required.length > 0 ? { required } : {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function assertRequiredParams(
|
||||
record: Record<string, unknown> | undefined,
|
||||
groups: readonly RequiredParamGroup[],
|
||||
toolName: string,
|
||||
): void {
|
||||
if (!record || typeof record !== "object") {
|
||||
throw new Error(`Missing parameters for ${toolName}`);
|
||||
}
|
||||
|
||||
for (const group of groups) {
|
||||
const satisfied = group.keys.some((key) => {
|
||||
if (!(key in record)) return false;
|
||||
const value = record[key];
|
||||
if (typeof value !== "string") return false;
|
||||
if (group.allowEmpty) return true;
|
||||
return value.trim().length > 0;
|
||||
});
|
||||
|
||||
if (!satisfied) {
|
||||
const label = group.label ?? group.keys.join(" or ");
|
||||
throw new Error(`Missing required parameter: ${label}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createSandboxedReadTool(root: string) {
|
||||
const base = createReadTool(root);
|
||||
return wrapSandboxPathGuard(createClawdbotReadTool(base), root);
|
||||
}
|
||||
|
||||
function createSandboxedWriteTool(root: string) {
|
||||
const base = createWriteTool(root);
|
||||
return wrapSandboxPathGuard(
|
||||
wrapToolParamNormalization(base, CLAUDE_PARAM_GROUPS.write),
|
||||
root,
|
||||
);
|
||||
}
|
||||
|
||||
function createSandboxedEditTool(root: string) {
|
||||
const base = createEditTool(root);
|
||||
return wrapSandboxPathGuard(
|
||||
wrapToolParamNormalization(base, CLAUDE_PARAM_GROUPS.edit),
|
||||
root,
|
||||
);
|
||||
}
|
||||
|
||||
// Normalize tool parameters from Claude Code conventions to pi-coding-agent conventions.
|
||||
// Claude Code uses file_path/old_string/new_string while pi-coding-agent uses path/oldText/newText.
|
||||
// This prevents models trained on Claude Code from getting stuck in tool-call loops.
|
||||
function normalizeToolParams(
|
||||
params: unknown,
|
||||
): Record<string, unknown> | undefined {
|
||||
if (!params || typeof params !== "object") return undefined;
|
||||
const record = params as Record<string, unknown>;
|
||||
const normalized = { ...record };
|
||||
// file_path → path (read, write, edit)
|
||||
if ("file_path" in normalized && !("path" in normalized)) {
|
||||
normalized.path = normalized.file_path;
|
||||
delete normalized.file_path;
|
||||
}
|
||||
// old_string → oldText (edit)
|
||||
if ("old_string" in normalized && !("oldText" in normalized)) {
|
||||
normalized.oldText = normalized.old_string;
|
||||
delete normalized.old_string;
|
||||
}
|
||||
// new_string → newText (edit)
|
||||
if ("new_string" in normalized && !("newText" in normalized)) {
|
||||
normalized.newText = normalized.new_string;
|
||||
delete normalized.new_string;
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
// Generic wrapper to normalize parameters for any tool
|
||||
function wrapToolParamNormalization(
|
||||
tool: AnyAgentTool,
|
||||
requiredParamGroups?: readonly RequiredParamGroup[],
|
||||
): AnyAgentTool {
|
||||
const patched = patchToolSchemaForClaudeCompatibility(tool);
|
||||
return {
|
||||
...patched,
|
||||
execute: async (toolCallId, params, signal, onUpdate) => {
|
||||
const normalized = normalizeToolParams(params);
|
||||
const record =
|
||||
normalized ??
|
||||
(params && typeof params === "object"
|
||||
? (params as Record<string, unknown>)
|
||||
: undefined);
|
||||
if (requiredParamGroups?.length) {
|
||||
assertRequiredParams(record, requiredParamGroups, tool.name);
|
||||
}
|
||||
return tool.execute(toolCallId, normalized ?? params, signal, onUpdate);
|
||||
},
|
||||
};
|
||||
}
|
||||
function createClawdbotReadTool(base: AnyAgentTool): AnyAgentTool {
|
||||
const patched = patchToolSchemaForClaudeCompatibility(base);
|
||||
return {
|
||||
...patched,
|
||||
execute: async (toolCallId, params, signal) => {
|
||||
const normalized = normalizeToolParams(params);
|
||||
const record =
|
||||
normalized ??
|
||||
(params && typeof params === "object"
|
||||
? (params as Record<string, unknown>)
|
||||
: undefined);
|
||||
assertRequiredParams(record, CLAUDE_PARAM_GROUPS.read, base.name);
|
||||
const result = (await base.execute(
|
||||
toolCallId,
|
||||
normalized ?? params,
|
||||
signal,
|
||||
)) as AgentToolResult<unknown>;
|
||||
const filePath =
|
||||
typeof record?.path === "string" ? String(record.path) : "<unknown>";
|
||||
const normalizedResult = await normalizeReadImageResult(result, filePath);
|
||||
return sanitizeToolResultImages(normalizedResult, `read:${filePath}`);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const __testing = {
|
||||
cleanToolSchemaForGemini,
|
||||
normalizeToolParams,
|
||||
@@ -622,48 +81,6 @@ export const __testing = {
|
||||
assertRequiredParams,
|
||||
} as const;
|
||||
|
||||
function throwAbortError(): never {
|
||||
const err = new Error("Aborted");
|
||||
err.name = "AbortError";
|
||||
throw err;
|
||||
}
|
||||
|
||||
function combineAbortSignals(
|
||||
a?: AbortSignal,
|
||||
b?: AbortSignal,
|
||||
): AbortSignal | undefined {
|
||||
if (!a && !b) return undefined;
|
||||
if (a && !b) return a;
|
||||
if (b && !a) return b;
|
||||
if (a?.aborted) return a;
|
||||
if (b?.aborted) return b;
|
||||
if (typeof AbortSignal.any === "function") {
|
||||
return AbortSignal.any([a as AbortSignal, b as AbortSignal]);
|
||||
}
|
||||
const controller = new AbortController();
|
||||
const onAbort = () => controller.abort();
|
||||
a?.addEventListener("abort", onAbort, { once: true });
|
||||
b?.addEventListener("abort", onAbort, { once: true });
|
||||
return controller.signal;
|
||||
}
|
||||
|
||||
function wrapToolWithAbortSignal(
|
||||
tool: AnyAgentTool,
|
||||
abortSignal?: AbortSignal,
|
||||
): AnyAgentTool {
|
||||
if (!abortSignal) return tool;
|
||||
const execute = tool.execute;
|
||||
if (!execute) return tool;
|
||||
return {
|
||||
...tool,
|
||||
execute: async (toolCallId, params, signal, onUpdate) => {
|
||||
const combined = combineAbortSignals(signal, abortSignal);
|
||||
if (combined?.aborted) throwAbortError();
|
||||
return await execute(toolCallId, params, combined, onUpdate);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function createClawdbotCodingTools(options?: {
|
||||
exec?: ExecToolDefaults & ProcessToolDefaults;
|
||||
messageProvider?: string;
|
||||
|
||||
4
src/agents/pi-tools.types.ts
Normal file
4
src/agents/pi-tools.types.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: TypeBox schema type from pi-agent-core uses a different module instance.
|
||||
export type AnyAgentTool = AgentTool<any, unknown>;
|
||||
@@ -264,7 +264,7 @@ export async function handleDiscordGuildAction(
|
||||
name: name ?? undefined,
|
||||
topic: topic ?? undefined,
|
||||
position: position ?? undefined,
|
||||
parentId: parentId === undefined ? undefined : parentId,
|
||||
parentId,
|
||||
nsfw,
|
||||
rateLimitPerUser: rateLimitPerUser ?? undefined,
|
||||
});
|
||||
@@ -293,7 +293,7 @@ export async function handleDiscordGuildAction(
|
||||
await moveChannelDiscord({
|
||||
guildId,
|
||||
channelId,
|
||||
parentId: parentId === undefined ? undefined : parentId,
|
||||
parentId,
|
||||
position: position ?? undefined,
|
||||
});
|
||||
return jsonResult({ ok: true });
|
||||
|
||||
Reference in New Issue
Block a user