690 lines
18 KiB
TypeScript
690 lines
18 KiB
TypeScript
import fs from "node:fs/promises";
|
|
import os from "node:os";
|
|
import path from "node:path";
|
|
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
|
import { Type } from "@sinclair/typebox";
|
|
|
|
import { assertSandboxPath } from "./sandbox-paths.js";
|
|
|
|
const BEGIN_PATCH_MARKER = "*** Begin Patch";
|
|
const END_PATCH_MARKER = "*** End Patch";
|
|
const ADD_FILE_MARKER = "*** Add File: ";
|
|
const DELETE_FILE_MARKER = "*** Delete File: ";
|
|
const UPDATE_FILE_MARKER = "*** Update File: ";
|
|
const MOVE_TO_MARKER = "*** Move to: ";
|
|
const EOF_MARKER = "*** End of File";
|
|
const CHANGE_CONTEXT_MARKER = "@@ ";
|
|
const EMPTY_CHANGE_CONTEXT_MARKER = "@@";
|
|
const UNICODE_SPACES = /[\u00A0\u2000-\u200A\u202F\u205F\u3000]/g;
|
|
|
|
type AddFileHunk = {
|
|
kind: "add";
|
|
path: string;
|
|
contents: string;
|
|
};
|
|
|
|
type DeleteFileHunk = {
|
|
kind: "delete";
|
|
path: string;
|
|
};
|
|
|
|
type UpdateFileChunk = {
|
|
changeContext?: string;
|
|
oldLines: string[];
|
|
newLines: string[];
|
|
isEndOfFile: boolean;
|
|
};
|
|
|
|
type UpdateFileHunk = {
|
|
kind: "update";
|
|
path: string;
|
|
movePath?: string;
|
|
chunks: UpdateFileChunk[];
|
|
};
|
|
|
|
type Hunk = AddFileHunk | DeleteFileHunk | UpdateFileHunk;
|
|
|
|
export type ApplyPatchSummary = {
|
|
added: string[];
|
|
modified: string[];
|
|
deleted: string[];
|
|
};
|
|
|
|
export type ApplyPatchResult = {
|
|
summary: ApplyPatchSummary;
|
|
text: string;
|
|
};
|
|
|
|
export type ApplyPatchToolDetails = {
|
|
summary: ApplyPatchSummary;
|
|
};
|
|
|
|
type ApplyPatchOptions = {
|
|
cwd: string;
|
|
sandboxRoot?: string;
|
|
signal?: AbortSignal;
|
|
};
|
|
|
|
const applyPatchSchema = Type.Object({
|
|
input: Type.String({
|
|
description: "Patch content using the *** Begin Patch/End Patch format.",
|
|
}),
|
|
});
|
|
|
|
export function createApplyPatchTool(
|
|
options: { cwd?: string; sandboxRoot?: string } = {},
|
|
// biome-ignore lint/suspicious/noExplicitAny: TypeBox schema type from pi-agent-core uses a different module instance.
|
|
): AgentTool<any, ApplyPatchToolDetails> {
|
|
const cwd = options.cwd ?? process.cwd();
|
|
const sandboxRoot = options.sandboxRoot;
|
|
|
|
return {
|
|
name: "apply_patch",
|
|
label: "apply_patch",
|
|
description:
|
|
"Apply a patch to one or more files using the apply_patch format. The input should include *** Begin Patch and *** End Patch markers.",
|
|
parameters: applyPatchSchema,
|
|
execute: async (_toolCallId, args, signal) => {
|
|
const params = args as { input?: string };
|
|
const input = typeof params.input === "string" ? params.input : "";
|
|
if (!input.trim()) {
|
|
throw new Error("Provide a patch input.");
|
|
}
|
|
if (signal?.aborted) {
|
|
const err = new Error("Aborted");
|
|
err.name = "AbortError";
|
|
throw err;
|
|
}
|
|
|
|
const result = await applyPatch(input, {
|
|
cwd,
|
|
sandboxRoot,
|
|
signal,
|
|
});
|
|
|
|
return {
|
|
content: [{ type: "text", text: result.text }],
|
|
details: { summary: result.summary },
|
|
};
|
|
},
|
|
};
|
|
}
|
|
|
|
export async function applyPatch(
|
|
input: string,
|
|
options: ApplyPatchOptions,
|
|
): Promise<ApplyPatchResult> {
|
|
const parsed = parsePatchText(input);
|
|
if (parsed.hunks.length === 0) {
|
|
throw new Error("No files were modified.");
|
|
}
|
|
|
|
const summary: ApplyPatchSummary = {
|
|
added: [],
|
|
modified: [],
|
|
deleted: [],
|
|
};
|
|
const seen = {
|
|
added: new Set<string>(),
|
|
modified: new Set<string>(),
|
|
deleted: new Set<string>(),
|
|
};
|
|
|
|
for (const hunk of parsed.hunks) {
|
|
if (options.signal?.aborted) {
|
|
const err = new Error("Aborted");
|
|
err.name = "AbortError";
|
|
throw err;
|
|
}
|
|
|
|
if (hunk.kind === "add") {
|
|
const target = await resolvePatchPath(hunk.path, options);
|
|
await ensureDir(target.resolved);
|
|
await fs.writeFile(target.resolved, hunk.contents, "utf8");
|
|
recordSummary(summary, seen, "added", target.display);
|
|
continue;
|
|
}
|
|
|
|
if (hunk.kind === "delete") {
|
|
const target = await resolvePatchPath(hunk.path, options);
|
|
await fs.rm(target.resolved);
|
|
recordSummary(summary, seen, "deleted", target.display);
|
|
continue;
|
|
}
|
|
|
|
const target = await resolvePatchPath(hunk.path, options);
|
|
const applied = await applyUpdateHunk(target.resolved, hunk.chunks);
|
|
|
|
if (hunk.movePath) {
|
|
const moveTarget = await resolvePatchPath(hunk.movePath, options);
|
|
await ensureDir(moveTarget.resolved);
|
|
await fs.writeFile(moveTarget.resolved, applied, "utf8");
|
|
await fs.rm(target.resolved);
|
|
recordSummary(summary, seen, "modified", moveTarget.display);
|
|
} else {
|
|
await fs.writeFile(target.resolved, applied, "utf8");
|
|
recordSummary(summary, seen, "modified", target.display);
|
|
}
|
|
}
|
|
|
|
return {
|
|
summary,
|
|
text: formatSummary(summary),
|
|
};
|
|
}
|
|
|
|
function recordSummary(
|
|
summary: ApplyPatchSummary,
|
|
seen: {
|
|
added: Set<string>;
|
|
modified: Set<string>;
|
|
deleted: Set<string>;
|
|
},
|
|
bucket: keyof ApplyPatchSummary,
|
|
value: string,
|
|
) {
|
|
if (seen[bucket].has(value)) return;
|
|
seen[bucket].add(value);
|
|
summary[bucket].push(value);
|
|
}
|
|
|
|
function formatSummary(summary: ApplyPatchSummary): string {
|
|
const lines = ["Success. Updated the following files:"];
|
|
for (const file of summary.added) lines.push(`A ${file}`);
|
|
for (const file of summary.modified) lines.push(`M ${file}`);
|
|
for (const file of summary.deleted) lines.push(`D ${file}`);
|
|
return lines.join("\n");
|
|
}
|
|
|
|
async function ensureDir(filePath: string) {
|
|
const parent = path.dirname(filePath);
|
|
if (!parent || parent === ".") return;
|
|
await fs.mkdir(parent, { recursive: true });
|
|
}
|
|
|
|
async function resolvePatchPath(
|
|
filePath: string,
|
|
options: ApplyPatchOptions,
|
|
): Promise<{ resolved: string; display: string }> {
|
|
if (options.sandboxRoot) {
|
|
const resolved = await assertSandboxPath({
|
|
filePath,
|
|
cwd: options.cwd,
|
|
root: options.sandboxRoot,
|
|
});
|
|
return {
|
|
resolved: resolved.resolved,
|
|
display: resolved.relative || resolved.resolved,
|
|
};
|
|
}
|
|
|
|
const resolved = resolvePathFromCwd(filePath, options.cwd);
|
|
return {
|
|
resolved,
|
|
display: toDisplayPath(resolved, options.cwd),
|
|
};
|
|
}
|
|
|
|
function normalizeUnicodeSpaces(value: string): string {
|
|
return value.replace(UNICODE_SPACES, " ");
|
|
}
|
|
|
|
function expandPath(filePath: string): string {
|
|
const normalized = normalizeUnicodeSpaces(filePath);
|
|
if (normalized === "~") return os.homedir();
|
|
if (normalized.startsWith("~/")) return os.homedir() + normalized.slice(1);
|
|
return normalized;
|
|
}
|
|
|
|
function resolvePathFromCwd(filePath: string, cwd: string): string {
|
|
const expanded = expandPath(filePath);
|
|
if (path.isAbsolute(expanded)) return path.normalize(expanded);
|
|
return path.resolve(cwd, expanded);
|
|
}
|
|
|
|
function toDisplayPath(resolved: string, cwd: string): string {
|
|
const relative = path.relative(cwd, resolved);
|
|
if (!relative || relative === "") return path.basename(resolved);
|
|
if (relative.startsWith("..") || path.isAbsolute(relative)) return resolved;
|
|
return relative;
|
|
}
|
|
|
|
function parsePatchText(input: string): { hunks: Hunk[]; patch: string } {
|
|
const trimmed = input.trim();
|
|
if (!trimmed) {
|
|
throw new Error("Invalid patch: input is empty.");
|
|
}
|
|
|
|
const lines = trimmed.split(/\r?\n/);
|
|
const validated = checkPatchBoundariesLenient(lines);
|
|
const hunks: Hunk[] = [];
|
|
|
|
const lastLineIndex = validated.length - 1;
|
|
let remaining = validated.slice(1, lastLineIndex);
|
|
let lineNumber = 2;
|
|
|
|
while (remaining.length > 0) {
|
|
const { hunk, consumed } = parseOneHunk(remaining, lineNumber);
|
|
hunks.push(hunk);
|
|
lineNumber += consumed;
|
|
remaining = remaining.slice(consumed);
|
|
}
|
|
|
|
return { hunks, patch: validated.join("\n") };
|
|
}
|
|
|
|
function checkPatchBoundariesLenient(lines: string[]): string[] {
|
|
const strictError = checkPatchBoundariesStrict(lines);
|
|
if (!strictError) return lines;
|
|
|
|
if (lines.length < 4) {
|
|
throw new Error(strictError);
|
|
}
|
|
const first = lines[0];
|
|
const last = lines[lines.length - 1];
|
|
if (
|
|
(first === "<<EOF" || first === "<<'EOF'" || first === '<<"EOF"') &&
|
|
last.endsWith("EOF")
|
|
) {
|
|
const inner = lines.slice(1, lines.length - 1);
|
|
const innerError = checkPatchBoundariesStrict(inner);
|
|
if (!innerError) return inner;
|
|
throw new Error(innerError);
|
|
}
|
|
|
|
throw new Error(strictError);
|
|
}
|
|
|
|
function checkPatchBoundariesStrict(lines: string[]): string | null {
|
|
const firstLine = lines[0]?.trim();
|
|
const lastLine = lines[lines.length - 1]?.trim();
|
|
|
|
if (firstLine === BEGIN_PATCH_MARKER && lastLine === END_PATCH_MARKER) {
|
|
return null;
|
|
}
|
|
if (firstLine !== BEGIN_PATCH_MARKER) {
|
|
return "The first line of the patch must be '*** Begin Patch'";
|
|
}
|
|
return "The last line of the patch must be '*** End Patch'";
|
|
}
|
|
|
|
function parseOneHunk(
|
|
lines: string[],
|
|
lineNumber: number,
|
|
): { hunk: Hunk; consumed: number } {
|
|
if (lines.length === 0) {
|
|
throw new Error(`Invalid patch hunk at line ${lineNumber}: empty hunk`);
|
|
}
|
|
const firstLine = lines[0].trim();
|
|
if (firstLine.startsWith(ADD_FILE_MARKER)) {
|
|
const targetPath = firstLine.slice(ADD_FILE_MARKER.length);
|
|
let contents = "";
|
|
let consumed = 1;
|
|
for (const addLine of lines.slice(1)) {
|
|
if (addLine.startsWith("+")) {
|
|
contents += `${addLine.slice(1)}\n`;
|
|
consumed += 1;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
return {
|
|
hunk: { kind: "add", path: targetPath, contents },
|
|
consumed,
|
|
};
|
|
}
|
|
|
|
if (firstLine.startsWith(DELETE_FILE_MARKER)) {
|
|
const targetPath = firstLine.slice(DELETE_FILE_MARKER.length);
|
|
return {
|
|
hunk: { kind: "delete", path: targetPath },
|
|
consumed: 1,
|
|
};
|
|
}
|
|
|
|
if (firstLine.startsWith(UPDATE_FILE_MARKER)) {
|
|
const targetPath = firstLine.slice(UPDATE_FILE_MARKER.length);
|
|
let remaining = lines.slice(1);
|
|
let consumed = 1;
|
|
let movePath: string | undefined;
|
|
|
|
const moveCandidate = remaining[0]?.trim();
|
|
if (moveCandidate?.startsWith(MOVE_TO_MARKER)) {
|
|
movePath = moveCandidate.slice(MOVE_TO_MARKER.length);
|
|
remaining = remaining.slice(1);
|
|
consumed += 1;
|
|
}
|
|
|
|
const chunks: UpdateFileChunk[] = [];
|
|
while (remaining.length > 0) {
|
|
if (remaining[0].trim() === "") {
|
|
remaining = remaining.slice(1);
|
|
consumed += 1;
|
|
continue;
|
|
}
|
|
if (remaining[0].startsWith("***")) {
|
|
break;
|
|
}
|
|
const { chunk, consumed: chunkLines } = parseUpdateFileChunk(
|
|
remaining,
|
|
lineNumber + consumed,
|
|
chunks.length === 0,
|
|
);
|
|
chunks.push(chunk);
|
|
remaining = remaining.slice(chunkLines);
|
|
consumed += chunkLines;
|
|
}
|
|
|
|
if (chunks.length === 0) {
|
|
throw new Error(
|
|
`Invalid patch hunk at line ${lineNumber}: Update file hunk for path '${targetPath}' is empty`,
|
|
);
|
|
}
|
|
|
|
return {
|
|
hunk: {
|
|
kind: "update",
|
|
path: targetPath,
|
|
movePath,
|
|
chunks,
|
|
},
|
|
consumed,
|
|
};
|
|
}
|
|
|
|
throw new Error(
|
|
`Invalid patch hunk at line ${lineNumber}: '${lines[0]}' is not a valid hunk header. Valid hunk headers: '*** Add File: {path}', '*** Delete File: {path}', '*** Update File: {path}'`,
|
|
);
|
|
}
|
|
|
|
function parseUpdateFileChunk(
|
|
lines: string[],
|
|
lineNumber: number,
|
|
allowMissingContext: boolean,
|
|
): { chunk: UpdateFileChunk; consumed: number } {
|
|
if (lines.length === 0) {
|
|
throw new Error(
|
|
`Invalid patch hunk at line ${lineNumber}: Update hunk does not contain any lines`,
|
|
);
|
|
}
|
|
|
|
let changeContext: string | undefined;
|
|
let startIndex = 0;
|
|
if (lines[0] === EMPTY_CHANGE_CONTEXT_MARKER) {
|
|
startIndex = 1;
|
|
} else if (lines[0].startsWith(CHANGE_CONTEXT_MARKER)) {
|
|
changeContext = lines[0].slice(CHANGE_CONTEXT_MARKER.length);
|
|
startIndex = 1;
|
|
} else if (!allowMissingContext) {
|
|
throw new Error(
|
|
`Invalid patch hunk at line ${lineNumber}: Expected update hunk to start with a @@ context marker, got: '${lines[0]}'`,
|
|
);
|
|
}
|
|
|
|
if (startIndex >= lines.length) {
|
|
throw new Error(
|
|
`Invalid patch hunk at line ${lineNumber + 1}: Update hunk does not contain any lines`,
|
|
);
|
|
}
|
|
|
|
const chunk: UpdateFileChunk = {
|
|
changeContext,
|
|
oldLines: [],
|
|
newLines: [],
|
|
isEndOfFile: false,
|
|
};
|
|
|
|
let parsedLines = 0;
|
|
for (const line of lines.slice(startIndex)) {
|
|
if (line === EOF_MARKER) {
|
|
if (parsedLines === 0) {
|
|
throw new Error(
|
|
`Invalid patch hunk at line ${lineNumber + 1}: Update hunk does not contain any lines`,
|
|
);
|
|
}
|
|
chunk.isEndOfFile = true;
|
|
parsedLines += 1;
|
|
break;
|
|
}
|
|
|
|
const marker = line[0];
|
|
if (!marker) {
|
|
chunk.oldLines.push("");
|
|
chunk.newLines.push("");
|
|
parsedLines += 1;
|
|
continue;
|
|
}
|
|
|
|
if (marker === " ") {
|
|
const content = line.slice(1);
|
|
chunk.oldLines.push(content);
|
|
chunk.newLines.push(content);
|
|
parsedLines += 1;
|
|
continue;
|
|
}
|
|
if (marker === "+") {
|
|
chunk.newLines.push(line.slice(1));
|
|
parsedLines += 1;
|
|
continue;
|
|
}
|
|
if (marker === "-") {
|
|
chunk.oldLines.push(line.slice(1));
|
|
parsedLines += 1;
|
|
continue;
|
|
}
|
|
|
|
if (parsedLines === 0) {
|
|
throw new Error(
|
|
`Invalid patch hunk at line ${lineNumber + 1}: Unexpected line found in update hunk: '${line}'. Every line should start with ' ' (context line), '+' (added line), or '-' (removed line)`,
|
|
);
|
|
}
|
|
break;
|
|
}
|
|
|
|
return { chunk, consumed: parsedLines + startIndex };
|
|
}
|
|
|
|
async function applyUpdateHunk(
|
|
filePath: string,
|
|
chunks: UpdateFileChunk[],
|
|
): Promise<string> {
|
|
const originalContents = await fs.readFile(filePath, "utf8").catch((err) => {
|
|
throw new Error(`Failed to read file to update ${filePath}: ${err}`);
|
|
});
|
|
|
|
const originalLines = originalContents.split("\n");
|
|
if (
|
|
originalLines.length > 0 &&
|
|
originalLines[originalLines.length - 1] === ""
|
|
) {
|
|
originalLines.pop();
|
|
}
|
|
|
|
const replacements = computeReplacements(originalLines, filePath, chunks);
|
|
let newLines = applyReplacements(originalLines, replacements);
|
|
if (newLines.length === 0 || newLines[newLines.length - 1] !== "") {
|
|
newLines = [...newLines, ""];
|
|
}
|
|
return newLines.join("\n");
|
|
}
|
|
|
|
function computeReplacements(
|
|
originalLines: string[],
|
|
filePath: string,
|
|
chunks: UpdateFileChunk[],
|
|
): Array<[number, number, string[]]> {
|
|
const replacements: Array<[number, number, string[]]> = [];
|
|
let lineIndex = 0;
|
|
|
|
for (const chunk of chunks) {
|
|
if (chunk.changeContext) {
|
|
const ctxIndex = seekSequence(
|
|
originalLines,
|
|
[chunk.changeContext],
|
|
lineIndex,
|
|
false,
|
|
);
|
|
if (ctxIndex === null) {
|
|
throw new Error(
|
|
`Failed to find context '${chunk.changeContext}' in ${filePath}`,
|
|
);
|
|
}
|
|
lineIndex = ctxIndex + 1;
|
|
}
|
|
|
|
if (chunk.oldLines.length === 0) {
|
|
const insertionIndex =
|
|
originalLines.length > 0 &&
|
|
originalLines[originalLines.length - 1] === ""
|
|
? originalLines.length - 1
|
|
: originalLines.length;
|
|
replacements.push([insertionIndex, 0, chunk.newLines]);
|
|
continue;
|
|
}
|
|
|
|
let pattern = chunk.oldLines;
|
|
let newSlice = chunk.newLines;
|
|
let found = seekSequence(
|
|
originalLines,
|
|
pattern,
|
|
lineIndex,
|
|
chunk.isEndOfFile,
|
|
);
|
|
|
|
if (found === null && pattern[pattern.length - 1] === "") {
|
|
pattern = pattern.slice(0, -1);
|
|
if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") {
|
|
newSlice = newSlice.slice(0, -1);
|
|
}
|
|
found = seekSequence(
|
|
originalLines,
|
|
pattern,
|
|
lineIndex,
|
|
chunk.isEndOfFile,
|
|
);
|
|
}
|
|
|
|
if (found === null) {
|
|
throw new Error(
|
|
`Failed to find expected lines in ${filePath}:\n${chunk.oldLines.join("\n")}`,
|
|
);
|
|
}
|
|
|
|
replacements.push([found, pattern.length, newSlice]);
|
|
lineIndex = found + pattern.length;
|
|
}
|
|
|
|
replacements.sort((a, b) => a[0] - b[0]);
|
|
return replacements;
|
|
}
|
|
|
|
function applyReplacements(
|
|
lines: string[],
|
|
replacements: Array<[number, number, string[]]>,
|
|
): string[] {
|
|
const result = [...lines];
|
|
for (const [startIndex, oldLen, newLines] of [...replacements].reverse()) {
|
|
for (let i = 0; i < oldLen; i += 1) {
|
|
if (startIndex < result.length) {
|
|
result.splice(startIndex, 1);
|
|
}
|
|
}
|
|
for (let i = 0; i < newLines.length; i += 1) {
|
|
result.splice(startIndex + i, 0, newLines[i]);
|
|
}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
function seekSequence(
|
|
lines: string[],
|
|
pattern: string[],
|
|
start: number,
|
|
eof: boolean,
|
|
): number | null {
|
|
if (pattern.length === 0) return start;
|
|
if (pattern.length > lines.length) return null;
|
|
|
|
const maxStart = lines.length - pattern.length;
|
|
const searchStart = eof && lines.length >= pattern.length ? maxStart : start;
|
|
if (searchStart > maxStart) return null;
|
|
|
|
for (let i = searchStart; i <= maxStart; i += 1) {
|
|
if (linesMatch(lines, pattern, i, (value) => value)) return i;
|
|
}
|
|
for (let i = searchStart; i <= maxStart; i += 1) {
|
|
if (linesMatch(lines, pattern, i, (value) => value.trimEnd())) return i;
|
|
}
|
|
for (let i = searchStart; i <= maxStart; i += 1) {
|
|
if (linesMatch(lines, pattern, i, (value) => value.trim())) return i;
|
|
}
|
|
for (let i = searchStart; i <= maxStart; i += 1) {
|
|
if (
|
|
linesMatch(lines, pattern, i, (value) =>
|
|
normalizePunctuation(value.trim()),
|
|
)
|
|
) {
|
|
return i;
|
|
}
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
function linesMatch(
|
|
lines: string[],
|
|
pattern: string[],
|
|
start: number,
|
|
normalize: (value: string) => string,
|
|
): boolean {
|
|
for (let idx = 0; idx < pattern.length; idx += 1) {
|
|
if (normalize(lines[start + idx]) !== normalize(pattern[idx])) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
|
|
function normalizePunctuation(value: string): string {
|
|
return Array.from(value)
|
|
.map((char) => {
|
|
switch (char) {
|
|
case "\u2010":
|
|
case "\u2011":
|
|
case "\u2012":
|
|
case "\u2013":
|
|
case "\u2014":
|
|
case "\u2015":
|
|
case "\u2212":
|
|
return "-";
|
|
case "\u2018":
|
|
case "\u2019":
|
|
case "\u201A":
|
|
case "\u201B":
|
|
return "'";
|
|
case "\u201C":
|
|
case "\u201D":
|
|
case "\u201E":
|
|
case "\u201F":
|
|
return '"';
|
|
case "\u00A0":
|
|
case "\u2002":
|
|
case "\u2003":
|
|
case "\u2004":
|
|
case "\u2005":
|
|
case "\u2006":
|
|
case "\u2007":
|
|
case "\u2008":
|
|
case "\u2009":
|
|
case "\u200A":
|
|
case "\u202F":
|
|
case "\u205F":
|
|
case "\u3000":
|
|
return " ";
|
|
default:
|
|
return char;
|
|
}
|
|
})
|
|
.join("");
|
|
}
|