feat: add apply_patch tool (exec-gated)

This commit is contained in:
Peter Steinberger
2026-01-12 03:42:49 +00:00
parent 221c0b4cf8
commit 8b4bdaa8a4
25 changed files with 1055 additions and 41 deletions

View File

@@ -0,0 +1,74 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { describe, expect, it } from "vitest";
import { applyPatch } from "./apply-patch.js";
async function withTempDir<T>(fn: (dir: string) => Promise<T>) {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-patch-"));
try {
return await fn(dir);
} finally {
await fs.rm(dir, { recursive: true, force: true });
}
}
describe("applyPatch", () => {
it("adds a file", async () => {
await withTempDir(async (dir) => {
const patch = `*** Begin Patch
*** Add File: hello.txt
+hello
*** End Patch`;
const result = await applyPatch(patch, { cwd: dir });
const contents = await fs.readFile(path.join(dir, "hello.txt"), "utf8");
expect(contents).toBe("hello\n");
expect(result.summary.added).toEqual(["hello.txt"]);
});
});
it("updates and moves a file", async () => {
await withTempDir(async (dir) => {
const source = path.join(dir, "source.txt");
await fs.writeFile(source, "foo\nbar\n", "utf8");
const patch = `*** Begin Patch
*** Update File: source.txt
*** Move to: dest.txt
@@
foo
-bar
+baz
*** End Patch`;
const result = await applyPatch(patch, { cwd: dir });
const dest = path.join(dir, "dest.txt");
const contents = await fs.readFile(dest, "utf8");
expect(contents).toBe("foo\nbaz\n");
await expect(fs.stat(source)).rejects.toBeDefined();
expect(result.summary.modified).toEqual(["dest.txt"]);
});
});
it("supports end-of-file inserts", async () => {
await withTempDir(async (dir) => {
const target = path.join(dir, "end.txt");
await fs.writeFile(target, "line1\n", "utf8");
const patch = `*** Begin Patch
*** Update File: end.txt
@@
+line2
*** End of File
*** End Patch`;
await applyPatch(patch, { cwd: dir });
const contents = await fs.readFile(target, "utf8");
expect(contents).toBe("line1\nline2\n");
});
});
});

689
src/agents/apply-patch.ts Normal file
View File

@@ -0,0 +1,689 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import type { AgentTool } from "@mariozechner/pi-agent-core";
import { Type } from "@sinclair/typebox";
import { assertSandboxPath } from "./sandbox-paths.js";
const BEGIN_PATCH_MARKER = "*** Begin Patch";
const END_PATCH_MARKER = "*** End Patch";
const ADD_FILE_MARKER = "*** Add File: ";
const DELETE_FILE_MARKER = "*** Delete File: ";
const UPDATE_FILE_MARKER = "*** Update File: ";
const MOVE_TO_MARKER = "*** Move to: ";
const EOF_MARKER = "*** End of File";
const CHANGE_CONTEXT_MARKER = "@@ ";
const EMPTY_CHANGE_CONTEXT_MARKER = "@@";
const UNICODE_SPACES = /[\u00A0\u2000-\u200A\u202F\u205F\u3000]/g;
type AddFileHunk = {
kind: "add";
path: string;
contents: string;
};
type DeleteFileHunk = {
kind: "delete";
path: string;
};
type UpdateFileChunk = {
changeContext?: string;
oldLines: string[];
newLines: string[];
isEndOfFile: boolean;
};
type UpdateFileHunk = {
kind: "update";
path: string;
movePath?: string;
chunks: UpdateFileChunk[];
};
type Hunk = AddFileHunk | DeleteFileHunk | UpdateFileHunk;
export type ApplyPatchSummary = {
added: string[];
modified: string[];
deleted: string[];
};
export type ApplyPatchResult = {
summary: ApplyPatchSummary;
text: string;
};
export type ApplyPatchToolDetails = {
summary: ApplyPatchSummary;
};
type ApplyPatchOptions = {
cwd: string;
sandboxRoot?: string;
signal?: AbortSignal;
};
const applyPatchSchema = Type.Object({
input: Type.String({
description: "Patch content using the *** Begin Patch/End Patch format.",
}),
});
export function createApplyPatchTool(
options: { cwd?: string; sandboxRoot?: string } = {},
// biome-ignore lint/suspicious/noExplicitAny: TypeBox schema type from pi-agent-core uses a different module instance.
): AgentTool<any, ApplyPatchToolDetails> {
const cwd = options.cwd ?? process.cwd();
const sandboxRoot = options.sandboxRoot;
return {
name: "apply_patch",
label: "apply_patch",
description:
"Apply a patch to one or more files using the apply_patch format. The input should include *** Begin Patch and *** End Patch markers.",
parameters: applyPatchSchema,
execute: async (_toolCallId, args, signal) => {
const params = args as { input?: string };
const input = typeof params.input === "string" ? params.input : "";
if (!input.trim()) {
throw new Error("Provide a patch input.");
}
if (signal?.aborted) {
const err = new Error("Aborted");
err.name = "AbortError";
throw err;
}
const result = await applyPatch(input, {
cwd,
sandboxRoot,
signal,
});
return {
content: [{ type: "text", text: result.text }],
details: { summary: result.summary },
};
},
};
}
export async function applyPatch(
input: string,
options: ApplyPatchOptions,
): Promise<ApplyPatchResult> {
const parsed = parsePatchText(input);
if (parsed.hunks.length === 0) {
throw new Error("No files were modified.");
}
const summary: ApplyPatchSummary = {
added: [],
modified: [],
deleted: [],
};
const seen = {
added: new Set<string>(),
modified: new Set<string>(),
deleted: new Set<string>(),
};
for (const hunk of parsed.hunks) {
if (options.signal?.aborted) {
const err = new Error("Aborted");
err.name = "AbortError";
throw err;
}
if (hunk.kind === "add") {
const target = await resolvePatchPath(hunk.path, options);
await ensureDir(target.resolved);
await fs.writeFile(target.resolved, hunk.contents, "utf8");
recordSummary(summary, seen, "added", target.display);
continue;
}
if (hunk.kind === "delete") {
const target = await resolvePatchPath(hunk.path, options);
await fs.rm(target.resolved);
recordSummary(summary, seen, "deleted", target.display);
continue;
}
const target = await resolvePatchPath(hunk.path, options);
const applied = await applyUpdateHunk(target.resolved, hunk.chunks);
if (hunk.movePath) {
const moveTarget = await resolvePatchPath(hunk.movePath, options);
await ensureDir(moveTarget.resolved);
await fs.writeFile(moveTarget.resolved, applied, "utf8");
await fs.rm(target.resolved);
recordSummary(summary, seen, "modified", moveTarget.display);
} else {
await fs.writeFile(target.resolved, applied, "utf8");
recordSummary(summary, seen, "modified", target.display);
}
}
return {
summary,
text: formatSummary(summary),
};
}
function recordSummary(
summary: ApplyPatchSummary,
seen: {
added: Set<string>;
modified: Set<string>;
deleted: Set<string>;
},
bucket: keyof ApplyPatchSummary,
value: string,
) {
if (seen[bucket].has(value)) return;
seen[bucket].add(value);
summary[bucket].push(value);
}
function formatSummary(summary: ApplyPatchSummary): string {
const lines = ["Success. Updated the following files:"];
for (const file of summary.added) lines.push(`A ${file}`);
for (const file of summary.modified) lines.push(`M ${file}`);
for (const file of summary.deleted) lines.push(`D ${file}`);
return lines.join("\n");
}
async function ensureDir(filePath: string) {
const parent = path.dirname(filePath);
if (!parent || parent === ".") return;
await fs.mkdir(parent, { recursive: true });
}
async function resolvePatchPath(
filePath: string,
options: ApplyPatchOptions,
): Promise<{ resolved: string; display: string }> {
if (options.sandboxRoot) {
const resolved = await assertSandboxPath({
filePath,
cwd: options.cwd,
root: options.sandboxRoot,
});
return {
resolved: resolved.resolved,
display: resolved.relative || resolved.resolved,
};
}
const resolved = resolvePathFromCwd(filePath, options.cwd);
return {
resolved,
display: toDisplayPath(resolved, options.cwd),
};
}
function normalizeUnicodeSpaces(value: string): string {
return value.replace(UNICODE_SPACES, " ");
}
function expandPath(filePath: string): string {
const normalized = normalizeUnicodeSpaces(filePath);
if (normalized === "~") return os.homedir();
if (normalized.startsWith("~/")) return os.homedir() + normalized.slice(1);
return normalized;
}
function resolvePathFromCwd(filePath: string, cwd: string): string {
const expanded = expandPath(filePath);
if (path.isAbsolute(expanded)) return path.normalize(expanded);
return path.resolve(cwd, expanded);
}
function toDisplayPath(resolved: string, cwd: string): string {
const relative = path.relative(cwd, resolved);
if (!relative || relative === "") return path.basename(resolved);
if (relative.startsWith("..") || path.isAbsolute(relative)) return resolved;
return relative;
}
function parsePatchText(input: string): { hunks: Hunk[]; patch: string } {
const trimmed = input.trim();
if (!trimmed) {
throw new Error("Invalid patch: input is empty.");
}
const lines = trimmed.split(/\r?\n/);
const validated = checkPatchBoundariesLenient(lines);
const hunks: Hunk[] = [];
const lastLineIndex = validated.length - 1;
let remaining = validated.slice(1, lastLineIndex);
let lineNumber = 2;
while (remaining.length > 0) {
const { hunk, consumed } = parseOneHunk(remaining, lineNumber);
hunks.push(hunk);
lineNumber += consumed;
remaining = remaining.slice(consumed);
}
return { hunks, patch: validated.join("\n") };
}
function checkPatchBoundariesLenient(lines: string[]): string[] {
const strictError = checkPatchBoundariesStrict(lines);
if (!strictError) return lines;
if (lines.length < 4) {
throw new Error(strictError);
}
const first = lines[0];
const last = lines[lines.length - 1];
if (
(first === "<<EOF" || first === "<<'EOF'" || first === '<<"EOF"') &&
last.endsWith("EOF")
) {
const inner = lines.slice(1, lines.length - 1);
const innerError = checkPatchBoundariesStrict(inner);
if (!innerError) return inner;
throw new Error(innerError);
}
throw new Error(strictError);
}
function checkPatchBoundariesStrict(lines: string[]): string | null {
const firstLine = lines[0]?.trim();
const lastLine = lines[lines.length - 1]?.trim();
if (firstLine === BEGIN_PATCH_MARKER && lastLine === END_PATCH_MARKER) {
return null;
}
if (firstLine !== BEGIN_PATCH_MARKER) {
return "The first line of the patch must be '*** Begin Patch'";
}
return "The last line of the patch must be '*** End Patch'";
}
function parseOneHunk(
lines: string[],
lineNumber: number,
): { hunk: Hunk; consumed: number } {
if (lines.length === 0) {
throw new Error(`Invalid patch hunk at line ${lineNumber}: empty hunk`);
}
const firstLine = lines[0].trim();
if (firstLine.startsWith(ADD_FILE_MARKER)) {
const targetPath = firstLine.slice(ADD_FILE_MARKER.length);
let contents = "";
let consumed = 1;
for (const addLine of lines.slice(1)) {
if (addLine.startsWith("+")) {
contents += `${addLine.slice(1)}\n`;
consumed += 1;
} else {
break;
}
}
return {
hunk: { kind: "add", path: targetPath, contents },
consumed,
};
}
if (firstLine.startsWith(DELETE_FILE_MARKER)) {
const targetPath = firstLine.slice(DELETE_FILE_MARKER.length);
return {
hunk: { kind: "delete", path: targetPath },
consumed: 1,
};
}
if (firstLine.startsWith(UPDATE_FILE_MARKER)) {
const targetPath = firstLine.slice(UPDATE_FILE_MARKER.length);
let remaining = lines.slice(1);
let consumed = 1;
let movePath: string | undefined;
const moveCandidate = remaining[0]?.trim();
if (moveCandidate?.startsWith(MOVE_TO_MARKER)) {
movePath = moveCandidate.slice(MOVE_TO_MARKER.length);
remaining = remaining.slice(1);
consumed += 1;
}
const chunks: UpdateFileChunk[] = [];
while (remaining.length > 0) {
if (remaining[0].trim() === "") {
remaining = remaining.slice(1);
consumed += 1;
continue;
}
if (remaining[0].startsWith("***")) {
break;
}
const { chunk, consumed: chunkLines } = parseUpdateFileChunk(
remaining,
lineNumber + consumed,
chunks.length === 0,
);
chunks.push(chunk);
remaining = remaining.slice(chunkLines);
consumed += chunkLines;
}
if (chunks.length === 0) {
throw new Error(
`Invalid patch hunk at line ${lineNumber}: Update file hunk for path '${targetPath}' is empty`,
);
}
return {
hunk: {
kind: "update",
path: targetPath,
movePath,
chunks,
},
consumed,
};
}
throw new Error(
`Invalid patch hunk at line ${lineNumber}: '${lines[0]}' is not a valid hunk header. Valid hunk headers: '*** Add File: {path}', '*** Delete File: {path}', '*** Update File: {path}'`,
);
}
function parseUpdateFileChunk(
lines: string[],
lineNumber: number,
allowMissingContext: boolean,
): { chunk: UpdateFileChunk; consumed: number } {
if (lines.length === 0) {
throw new Error(
`Invalid patch hunk at line ${lineNumber}: Update hunk does not contain any lines`,
);
}
let changeContext: string | undefined;
let startIndex = 0;
if (lines[0] === EMPTY_CHANGE_CONTEXT_MARKER) {
startIndex = 1;
} else if (lines[0].startsWith(CHANGE_CONTEXT_MARKER)) {
changeContext = lines[0].slice(CHANGE_CONTEXT_MARKER.length);
startIndex = 1;
} else if (!allowMissingContext) {
throw new Error(
`Invalid patch hunk at line ${lineNumber}: Expected update hunk to start with a @@ context marker, got: '${lines[0]}'`,
);
}
if (startIndex >= lines.length) {
throw new Error(
`Invalid patch hunk at line ${lineNumber + 1}: Update hunk does not contain any lines`,
);
}
const chunk: UpdateFileChunk = {
changeContext,
oldLines: [],
newLines: [],
isEndOfFile: false,
};
let parsedLines = 0;
for (const line of lines.slice(startIndex)) {
if (line === EOF_MARKER) {
if (parsedLines === 0) {
throw new Error(
`Invalid patch hunk at line ${lineNumber + 1}: Update hunk does not contain any lines`,
);
}
chunk.isEndOfFile = true;
parsedLines += 1;
break;
}
const marker = line[0];
if (!marker) {
chunk.oldLines.push("");
chunk.newLines.push("");
parsedLines += 1;
continue;
}
if (marker === " ") {
const content = line.slice(1);
chunk.oldLines.push(content);
chunk.newLines.push(content);
parsedLines += 1;
continue;
}
if (marker === "+") {
chunk.newLines.push(line.slice(1));
parsedLines += 1;
continue;
}
if (marker === "-") {
chunk.oldLines.push(line.slice(1));
parsedLines += 1;
continue;
}
if (parsedLines === 0) {
throw new Error(
`Invalid patch hunk at line ${lineNumber + 1}: Unexpected line found in update hunk: '${line}'. Every line should start with ' ' (context line), '+' (added line), or '-' (removed line)`,
);
}
break;
}
return { chunk, consumed: parsedLines + startIndex };
}
async function applyUpdateHunk(
filePath: string,
chunks: UpdateFileChunk[],
): Promise<string> {
const originalContents = await fs.readFile(filePath, "utf8").catch((err) => {
throw new Error(`Failed to read file to update ${filePath}: ${err}`);
});
const originalLines = originalContents.split("\n");
if (
originalLines.length > 0 &&
originalLines[originalLines.length - 1] === ""
) {
originalLines.pop();
}
const replacements = computeReplacements(originalLines, filePath, chunks);
let newLines = applyReplacements(originalLines, replacements);
if (newLines.length === 0 || newLines[newLines.length - 1] !== "") {
newLines = [...newLines, ""];
}
return newLines.join("\n");
}
function computeReplacements(
originalLines: string[],
filePath: string,
chunks: UpdateFileChunk[],
): Array<[number, number, string[]]> {
const replacements: Array<[number, number, string[]]> = [];
let lineIndex = 0;
for (const chunk of chunks) {
if (chunk.changeContext) {
const ctxIndex = seekSequence(
originalLines,
[chunk.changeContext],
lineIndex,
false,
);
if (ctxIndex === null) {
throw new Error(
`Failed to find context '${chunk.changeContext}' in ${filePath}`,
);
}
lineIndex = ctxIndex + 1;
}
if (chunk.oldLines.length === 0) {
const insertionIndex =
originalLines.length > 0 &&
originalLines[originalLines.length - 1] === ""
? originalLines.length - 1
: originalLines.length;
replacements.push([insertionIndex, 0, chunk.newLines]);
continue;
}
let pattern = chunk.oldLines;
let newSlice = chunk.newLines;
let found = seekSequence(
originalLines,
pattern,
lineIndex,
chunk.isEndOfFile,
);
if (found === null && pattern[pattern.length - 1] === "") {
pattern = pattern.slice(0, -1);
if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") {
newSlice = newSlice.slice(0, -1);
}
found = seekSequence(
originalLines,
pattern,
lineIndex,
chunk.isEndOfFile,
);
}
if (found === null) {
throw new Error(
`Failed to find expected lines in ${filePath}:\n${chunk.oldLines.join("\n")}`,
);
}
replacements.push([found, pattern.length, newSlice]);
lineIndex = found + pattern.length;
}
replacements.sort((a, b) => a[0] - b[0]);
return replacements;
}
function applyReplacements(
lines: string[],
replacements: Array<[number, number, string[]]>,
): string[] {
const result = [...lines];
for (const [startIndex, oldLen, newLines] of [...replacements].reverse()) {
for (let i = 0; i < oldLen; i += 1) {
if (startIndex < result.length) {
result.splice(startIndex, 1);
}
}
for (let i = 0; i < newLines.length; i += 1) {
result.splice(startIndex + i, 0, newLines[i]);
}
}
return result;
}
function seekSequence(
lines: string[],
pattern: string[],
start: number,
eof: boolean,
): number | null {
if (pattern.length === 0) return start;
if (pattern.length > lines.length) return null;
const maxStart = lines.length - pattern.length;
const searchStart = eof && lines.length >= pattern.length ? maxStart : start;
if (searchStart > maxStart) return null;
for (let i = searchStart; i <= maxStart; i += 1) {
if (linesMatch(lines, pattern, i, (value) => value)) return i;
}
for (let i = searchStart; i <= maxStart; i += 1) {
if (linesMatch(lines, pattern, i, (value) => value.trimEnd())) return i;
}
for (let i = searchStart; i <= maxStart; i += 1) {
if (linesMatch(lines, pattern, i, (value) => value.trim())) return i;
}
for (let i = searchStart; i <= maxStart; i += 1) {
if (
linesMatch(lines, pattern, i, (value) =>
normalizePunctuation(value.trim()),
)
) {
return i;
}
}
return null;
}
function linesMatch(
lines: string[],
pattern: string[],
start: number,
normalize: (value: string) => string,
): boolean {
for (let idx = 0; idx < pattern.length; idx += 1) {
if (normalize(lines[start + idx]) !== normalize(pattern[idx])) {
return false;
}
}
return true;
}
function normalizePunctuation(value: string): string {
return Array.from(value)
.map((char) => {
switch (char) {
case "\u2010":
case "\u2011":
case "\u2012":
case "\u2013":
case "\u2014":
case "\u2015":
case "\u2212":
return "-";
case "\u2018":
case "\u2019":
case "\u201A":
case "\u201B":
return "'";
case "\u201C":
case "\u201D":
case "\u201E":
case "\u201F":
return '"';
case "\u00A0":
case "\u2002":
case "\u2003":
case "\u2004":
case "\u2005":
case "\u2006":
case "\u2007":
case "\u2008":
case "\u2009":
case "\u200A":
case "\u202F":
case "\u205F":
case "\u3000":
return " ";
default:
return char;
}
})
.join("");
}

View File

@@ -1110,6 +1110,7 @@ export async function compactEmbeddedPiSession(params: {
config: params.config,
abortSignal: runAbortController.signal,
modelProvider: model.provider,
modelId,
modelAuthMode: resolveModelAuthMode(model.provider, params.config),
// No currentChannelId/currentThreadTs for compaction - not in message context
});
@@ -1524,6 +1525,7 @@ export async function runEmbeddedPiAgent(params: {
config: params.config,
abortSignal: runAbortController.signal,
modelProvider: model.provider,
modelId,
modelAuthMode: resolveModelAuthMode(model.provider, params.config),
currentChannelId: params.currentChannelId,
currentThreadTs: params.currentThreadTs,

View File

@@ -31,6 +31,7 @@ describe("Agent-specific tool filtering", () => {
expect(toolNames).toContain("read");
expect(toolNames).toContain("write");
expect(toolNames).not.toContain("exec");
expect(toolNames).not.toContain("apply_patch");
});
it("should keep global tool policy when agent only sets tools.elevated", () => {
@@ -65,6 +66,32 @@ describe("Agent-specific tool filtering", () => {
expect(toolNames).toContain("exec");
expect(toolNames).toContain("read");
expect(toolNames).not.toContain("write");
expect(toolNames).not.toContain("apply_patch");
});
it("should allow apply_patch when exec is allow-listed and applyPatch is enabled", () => {
const cfg: ClawdbotConfig = {
tools: {
allow: ["read", "exec"],
exec: {
applyPatch: { enabled: true },
},
},
};
const tools = createClawdbotCodingTools({
config: cfg,
sessionKey: "agent:main:main",
workspaceDir: "/tmp/test",
agentDir: "/tmp/agent",
modelProvider: "openai",
modelId: "gpt-5.2",
});
const toolNames = tools.map((t) => t.name);
expect(toolNames).toContain("read");
expect(toolNames).toContain("exec");
expect(toolNames).toContain("apply_patch");
});
it("should apply agent-specific tool policy", () => {
@@ -98,6 +125,7 @@ describe("Agent-specific tool filtering", () => {
expect(toolNames).toContain("read");
expect(toolNames).not.toContain("exec");
expect(toolNames).not.toContain("write");
expect(toolNames).not.toContain("apply_patch");
expect(toolNames).not.toContain("edit");
});
@@ -133,6 +161,7 @@ describe("Agent-specific tool filtering", () => {
expect(mainToolNames).toContain("exec");
expect(mainToolNames).toContain("write");
expect(mainToolNames).toContain("edit");
expect(mainToolNames).not.toContain("apply_patch");
// family agent: restricted
const familyTools = createClawdbotCodingTools({
@@ -146,6 +175,7 @@ describe("Agent-specific tool filtering", () => {
expect(familyToolNames).not.toContain("exec");
expect(familyToolNames).not.toContain("write");
expect(familyToolNames).not.toContain("edit");
expect(familyToolNames).not.toContain("apply_patch");
});
it("should prefer agent-specific tool policy over global", () => {
@@ -178,6 +208,7 @@ describe("Agent-specific tool filtering", () => {
expect(toolNames).toContain("browser");
expect(toolNames).not.toContain("exec");
expect(toolNames).not.toContain("process");
expect(toolNames).not.toContain("apply_patch");
});
it("should work with sandbox tools filtering", () => {

View File

@@ -4,6 +4,7 @@ import path from "node:path";
import sharp from "sharp";
import { describe, expect, it } from "vitest";
import type { ClawdbotConfig } from "../config/config.js";
import { __testing, createClawdbotCodingTools } from "./pi-tools.js";
import { createBrowserTool } from "./tools/browser-tool.js";
@@ -153,10 +154,59 @@ describe("createClawdbotCodingTools", () => {
}
});
it("includes exec and process tools", () => {
it("includes exec and process tools by default", () => {
const tools = createClawdbotCodingTools();
expect(tools.some((tool) => tool.name === "exec")).toBe(true);
expect(tools.some((tool) => tool.name === "process")).toBe(true);
expect(tools.some((tool) => tool.name === "apply_patch")).toBe(false);
});
it("gates apply_patch behind tools.exec.applyPatch for OpenAI models", () => {
const config: ClawdbotConfig = {
tools: {
exec: {
applyPatch: { enabled: true },
},
},
};
const openAiTools = createClawdbotCodingTools({
config,
modelProvider: "openai",
modelId: "gpt-5.2",
});
expect(openAiTools.some((tool) => tool.name === "apply_patch")).toBe(true);
const anthropicTools = createClawdbotCodingTools({
config,
modelProvider: "anthropic",
modelId: "claude-opus-4-5",
});
expect(anthropicTools.some((tool) => tool.name === "apply_patch")).toBe(
false,
);
});
it("respects apply_patch allowModels", () => {
const config: ClawdbotConfig = {
tools: {
exec: {
applyPatch: { enabled: true, allowModels: ["gpt-5.2"] },
},
},
};
const allowed = createClawdbotCodingTools({
config,
modelProvider: "openai",
modelId: "gpt-5.2",
});
expect(allowed.some((tool) => tool.name === "apply_patch")).toBe(true);
const denied = createClawdbotCodingTools({
config,
modelProvider: "openai",
modelId: "gpt-5-mini",
});
expect(denied.some((tool) => tool.name === "apply_patch")).toBe(false);
});
it("keeps canonical tool names for Anthropic OAuth (pi-ai remaps on the wire)", () => {
@@ -169,6 +219,7 @@ describe("createClawdbotCodingTools", () => {
expect(names.has("read")).toBe(true);
expect(names.has("write")).toBe(true);
expect(names.has("edit")).toBe(true);
expect(names.has("apply_patch")).toBe(false);
});
it("provides top-level object schemas for all tools", () => {
@@ -212,6 +263,7 @@ describe("createClawdbotCodingTools", () => {
expect(names.has("read")).toBe(true);
expect(names.has("exec")).toBe(true);
expect(names.has("process")).toBe(true);
expect(names.has("apply_patch")).toBe(false);
});
it("supports allow-only sub-agent tool policy", () => {

View File

@@ -14,6 +14,7 @@ import {
resolveAgentConfig,
resolveAgentIdFromSessionKey,
} from "./agent-scope.js";
import { createApplyPatchTool } from "./apply-patch.js";
import {
createExecTool,
createProcessTool,
@@ -292,6 +293,7 @@ function cleanToolSchemaForGemini(schema: Record<string, unknown>): unknown {
const TOOL_NAME_ALIASES: Record<string, string> = {
bash: "exec",
"apply-patch": "apply_patch",
};
function normalizeToolName(name: string) {
@@ -304,6 +306,35 @@ function normalizeToolNames(list?: string[]) {
return list.map(normalizeToolName).filter(Boolean);
}
function isOpenAIProvider(provider?: string) {
const normalized = provider?.trim().toLowerCase();
return normalized === "openai" || normalized === "openai-codex";
}
function isApplyPatchAllowedForModel(params: {
modelProvider?: string;
modelId?: string;
allowModels?: string[];
}) {
const allowModels = Array.isArray(params.allowModels)
? params.allowModels
: [];
if (allowModels.length === 0) return true;
const modelId = params.modelId?.trim();
if (!modelId) return false;
const normalizedModelId = modelId.toLowerCase();
const provider = params.modelProvider?.trim().toLowerCase();
const normalizedFull =
provider && !normalizedModelId.includes("/")
? `${provider}/${normalizedModelId}`
: normalizedModelId;
return allowModels.some((entry) => {
const normalized = entry.trim().toLowerCase();
if (!normalized) return false;
return normalized === normalizedModelId || normalized === normalizedFull;
});
}
const DEFAULT_SUBAGENT_TOOL_DENY = [
"sessions_list",
"sessions_history",
@@ -321,20 +352,30 @@ function resolveSubagentToolPolicy(cfg?: ClawdbotConfig): SandboxToolPolicy {
return { allow, deny };
}
function isToolAllowedByPolicyName(
name: string,
policy?: SandboxToolPolicy,
): boolean {
if (!policy) return true;
const deny = new Set(normalizeToolNames(policy.deny));
const allowRaw = normalizeToolNames(policy.allow);
const allow = allowRaw.length > 0 ? new Set(allowRaw) : null;
const normalized = normalizeToolName(name);
if (deny.has(normalized)) return false;
if (allow) {
if (allow.has(normalized)) return true;
if (normalized === "apply_patch" && allow.has("exec")) return true;
return false;
}
return true;
}
function filterToolsByPolicy(
tools: AnyAgentTool[],
policy?: SandboxToolPolicy,
) {
if (!policy) return tools;
const deny = new Set(normalizeToolNames(policy.deny));
const allowRaw = normalizeToolNames(policy.allow);
const allow = allowRaw.length > 0 ? new Set(allowRaw) : null;
return tools.filter((tool) => {
const name = tool.name.toLowerCase();
if (deny.has(name)) return false;
if (allow) return allow.has(name);
return true;
});
return tools.filter((tool) => isToolAllowedByPolicyName(tool.name, policy));
}
function resolveEffectiveToolPolicy(params: {
@@ -359,14 +400,7 @@ function resolveEffectiveToolPolicy(params: {
}
function isToolAllowedByPolicy(name: string, policy?: SandboxToolPolicy) {
if (!policy) return true;
const deny = new Set(normalizeToolNames(policy.deny));
const allowRaw = normalizeToolNames(policy.allow);
const allow = allowRaw.length > 0 ? new Set(allowRaw) : null;
const normalized = normalizeToolName(name);
if (deny.has(normalized)) return false;
if (allow) return allow.has(normalized);
return true;
return isToolAllowedByPolicyName(name, policy);
}
function isToolAllowedByPolicies(
@@ -490,6 +524,8 @@ export function createClawdbotCodingTools(options?: {
* Example: "anthropic", "openai", "google", "openai-codex".
*/
modelProvider?: string;
/** Model id for the current provider (used for model-specific tool gating). */
modelId?: string;
/**
* Auth mode for the current provider. We only need this for Anthropic OAuth
* tool-name blocking quirks.
@@ -524,6 +560,15 @@ export function createClawdbotCodingTools(options?: {
const sandboxRoot = sandbox?.workspaceDir;
const allowWorkspaceWrites = sandbox?.workspaceAccess !== "ro";
const workspaceRoot = options?.workspaceDir ?? process.cwd();
const applyPatchConfig = options?.config?.tools?.exec?.applyPatch;
const applyPatchEnabled =
!!applyPatchConfig?.enabled &&
isOpenAIProvider(options?.modelProvider) &&
isApplyPatchAllowedForModel({
modelProvider: options?.modelProvider,
modelId: options?.modelId,
allowModels: applyPatchConfig?.allowModels,
});
const base = (codingTools as unknown as AnyAgentTool[]).flatMap((tool) => {
if (tool.name === readTool.name) {
@@ -562,6 +607,14 @@ export function createClawdbotCodingTools(options?: {
cleanupMs: options?.exec?.cleanupMs,
scopeKey,
});
const applyPatchTool =
!applyPatchEnabled || (sandboxRoot && !allowWorkspaceWrites)
? null
: createApplyPatchTool({
cwd: sandboxRoot ?? workspaceRoot,
sandboxRoot:
sandboxRoot && allowWorkspaceWrites ? sandboxRoot : undefined,
});
const tools: AnyAgentTool[] = [
...base,
...(sandboxRoot
@@ -572,6 +625,7 @@ export function createClawdbotCodingTools(options?: {
]
: []
: []),
...(applyPatchTool ? [applyPatchTool as unknown as AnyAgentTool] : []),
execTool as unknown as AnyAgentTool,
processTool as unknown as AnyAgentTool,
// Provider docking: include provider-defined agent tools (login, etc.).

View File

@@ -171,6 +171,7 @@ const DEFAULT_TOOL_ALLOW = [
"read",
"write",
"edit",
"apply_patch",
"sessions_list",
"sessions_history",
"sessions_send",

View File

@@ -50,6 +50,7 @@ export function buildAgentSystemPrompt(params: {
read: "Read file contents",
write: "Create or overwrite files",
edit: "Make precise edits to files",
apply_patch: "Apply multi-file patches",
grep: "Search file contents for patterns",
find: "Find files by glob pattern",
ls: "List directory contents",
@@ -77,6 +78,7 @@ export function buildAgentSystemPrompt(params: {
"read",
"write",
"edit",
"apply_patch",
"grep",
"find",
"ls",
@@ -195,6 +197,7 @@ export function buildAgentSystemPrompt(params: {
"- grep: search file contents for patterns",
"- find: find files by glob pattern",
"- ls: list directory contents",
"- apply_patch: apply multi-file patches",
`- ${execToolName}: run shell commands (supports background via yieldMs/background)`,
`- ${processToolName}: manage background exec sessions`,
"- browser: control clawd's dedicated browser",

View File

@@ -50,6 +50,11 @@
"title": "Edit",
"detailKeys": ["path"]
},
"apply_patch": {
"emoji": "🩹",
"title": "Apply Patch",
"detailKeys": []
},
"attach": {
"emoji": "📎",
"title": "Attach",

View File

@@ -107,6 +107,8 @@ const FIELD_LABELS: Record<string, string> = {
"tools.audio.transcription.args": "Audio Transcription Args",
"tools.audio.transcription.timeoutSeconds":
"Audio Transcription Timeout (sec)",
"tools.exec.applyPatch.enabled": "Enable apply_patch",
"tools.exec.applyPatch.allowModels": "apply_patch Model Allowlist",
"gateway.controlUi.basePath": "Control UI Base Path",
"gateway.http.endpoints.chatCompletions.enabled":
"OpenAI Chat Completions Endpoint",
@@ -194,6 +196,10 @@ const FIELD_HELP: Record<string, string> = {
'Hot reload strategy for config changes ("hybrid" recommended).',
"gateway.reload.debounceMs":
"Debounce window (ms) before applying config changes.",
"tools.exec.applyPatch.enabled":
"Experimental. Enables apply_patch for OpenAI models when allowed by tool policy.",
"tools.exec.applyPatch.allowModels":
'Optional allowlist of model ids (e.g. "gpt-5.2" or "openai/gpt-5.2").',
"slack.allowBots":
"Allow bot-authored messages to trigger Slack replies (default: false).",
"auth.profiles": "Named auth profiles (provider + mode + optional email).",

View File

@@ -1018,6 +1018,16 @@ export type ToolsConfig = {
timeoutSec?: number;
/** How long to keep finished sessions in memory (ms). */
cleanupMs?: number;
/** apply_patch subtool configuration (experimental). */
applyPatch?: {
/** Enable apply_patch for OpenAI models (default: false). */
enabled?: boolean;
/**
* Optional allowlist of model ids that can use apply_patch.
* Accepts either raw ids (e.g. "gpt-5.2") or full ids (e.g. "openai/gpt-5.2").
*/
allowModels?: string[];
};
};
/** @deprecated Use tools.exec. */
bash?: {

View File

@@ -910,6 +910,12 @@ const ToolsSchema = z
backgroundMs: z.number().int().positive().optional(),
timeoutSec: z.number().int().positive().optional(),
cleanupMs: z.number().int().positive().optional(),
applyPatch: z
.object({
enabled: z.boolean().optional(),
allowModels: z.array(z.string()).optional(),
})
.optional(),
})
.optional(),
bash: z