fix: harden memory indexing and embedded session locks

This commit is contained in:
Peter Steinberger
2026-01-18 04:29:42 +00:00
parent b7575a889e
commit cf8b3ed988
2 changed files with 66 additions and 69 deletions

View File

@@ -146,83 +146,78 @@ const readSessionMessages = async (sessionFile: string) => {
};
describe("runEmbeddedPiAgent", () => {
it(
"appends new user + assistant after existing transcript entries",
{ timeout: 90_000 },
async () => {
const { SessionManager } = await import("@mariozechner/pi-coding-agent");
it("appends new user + assistant after existing transcript entries", { timeout: 90_000 }, async () => {
const { SessionManager } = await import("@mariozechner/pi-coding-agent");
const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-agent-"));
const workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-workspace-"));
const sessionFile = path.join(workspaceDir, "session.jsonl");
const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-agent-"));
const workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-workspace-"));
const sessionFile = path.join(workspaceDir, "session.jsonl");
const sessionManager = SessionManager.open(sessionFile);
sessionManager.appendMessage({
role: "user",
content: [{ type: "text", text: "seed user" }],
});
sessionManager.appendMessage({
role: "assistant",
content: [{ type: "text", text: "seed assistant" }],
stopReason: "stop",
api: "openai-responses",
provider: "openai",
model: "mock-1",
usage: {
input: 1,
output: 1,
const sessionManager = SessionManager.open(sessionFile);
sessionManager.appendMessage({
role: "user",
content: [{ type: "text", text: "seed user" }],
});
sessionManager.appendMessage({
role: "assistant",
content: [{ type: "text", text: "seed assistant" }],
stopReason: "stop",
api: "openai-responses",
provider: "openai",
model: "mock-1",
usage: {
input: 1,
output: 1,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 2,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 2,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
total: 0,
},
timestamp: Date.now(),
});
},
timestamp: Date.now(),
});
const cfg = makeOpenAiConfig(["mock-1"]);
await ensureModels(cfg, agentDir);
const cfg = makeOpenAiConfig(["mock-1"]);
await ensureModels(cfg, agentDir);
await runEmbeddedPiAgent({
sessionId: "session:test",
sessionKey: testSessionKey,
sessionFile,
workspaceDir,
config: cfg,
prompt: "hello",
provider: "openai",
model: "mock-1",
timeoutMs: 5_000,
agentDir,
enqueue: immediateEnqueue,
});
await runEmbeddedPiAgent({
sessionId: "session:test",
sessionKey: testSessionKey,
sessionFile,
workspaceDir,
config: cfg,
prompt: "hello",
provider: "openai",
model: "mock-1",
timeoutMs: 5_000,
agentDir,
enqueue: immediateEnqueue,
});
const messages = await readSessionMessages(sessionFile);
const seedUserIndex = messages.findIndex(
(message) => message?.role === "user" && textFromContent(message.content) === "seed user",
);
const seedAssistantIndex = messages.findIndex(
(message) =>
message?.role === "assistant" && textFromContent(message.content) === "seed assistant",
);
const newUserIndex = messages.findIndex(
(message) => message?.role === "user" && textFromContent(message.content) === "hello",
);
const newAssistantIndex = messages.findIndex(
(message, index) => index > newUserIndex && message?.role === "assistant",
);
expect(seedUserIndex).toBeGreaterThanOrEqual(0);
expect(seedAssistantIndex).toBeGreaterThan(seedUserIndex);
expect(newUserIndex).toBeGreaterThan(seedAssistantIndex);
expect(newAssistantIndex).toBeGreaterThan(newUserIndex);
},
45_000,
);
const messages = await readSessionMessages(sessionFile);
const seedUserIndex = messages.findIndex(
(message) => message?.role === "user" && textFromContent(message.content) === "seed user",
);
const seedAssistantIndex = messages.findIndex(
(message) =>
message?.role === "assistant" && textFromContent(message.content) === "seed assistant",
);
const newUserIndex = messages.findIndex(
(message) => message?.role === "user" && textFromContent(message.content) === "hello",
);
const newAssistantIndex = messages.findIndex(
(message, index) => index > newUserIndex && message?.role === "assistant",
);
expect(seedUserIndex).toBeGreaterThanOrEqual(0);
expect(seedAssistantIndex).toBeGreaterThan(seedUserIndex);
expect(newUserIndex).toBeGreaterThan(seedAssistantIndex);
expect(newAssistantIndex).toBeGreaterThan(newUserIndex);
});
it("persists multi-turn user/assistant ordering across runs", async () => {
const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-agent-"));
const workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-workspace-"));

View File

@@ -1,4 +1,5 @@
import fs from "node:fs/promises";
import path from "node:path";
type LockFilePayload = {
pid: number;
@@ -46,6 +47,7 @@ export async function acquireSessionWriteLock(params: {
const staleMs = params.staleMs ?? 30 * 60 * 1000;
const sessionFile = params.sessionFile;
const lockPath = `${sessionFile}.lock`;
await fs.mkdir(path.dirname(lockPath), { recursive: true });
const held = HELD_LOCKS.get(sessionFile);
if (held) {