perf: speed up memory batch polling

This commit is contained in:
Peter Steinberger
2026-01-18 03:55:09 +00:00
parent 0d9172d761
commit afb877a96b
10 changed files with 62 additions and 10 deletions

View File

@@ -5,6 +5,7 @@ Docs: https://docs.clawd.bot
## 2026.1.17-6 ## 2026.1.17-6
### Changes ### Changes
- Memory: render progress immediately and poll OpenAI batch status more frequently (default 500ms).
- Plugins: add exclusive plugin slots with a dedicated memory slot selector. - Plugins: add exclusive plugin slots with a dedicated memory slot selector.
- Memory: ship core memory tools + CLI as the bundled `memory-core` plugin. - Memory: ship core memory tools + CLI as the bundled `memory-core` plugin.
- Docs: document plugin slots and memory plugin behavior. - Docs: document plugin slots and memory plugin behavior.

View File

@@ -82,7 +82,7 @@ describe("memory search config", () => {
enabled: true, enabled: true,
wait: true, wait: true,
concurrency: 2, concurrency: 2,
pollIntervalMs: 5000, pollIntervalMs: 500,
timeoutMinutes: 60, timeoutMinutes: 60,
}); });
}); });
@@ -135,7 +135,7 @@ describe("memory search config", () => {
enabled: true, enabled: true,
wait: true, wait: true,
concurrency: 2, concurrency: 2,
pollIntervalMs: 5000, pollIntervalMs: 500,
timeoutMinutes: 60, timeoutMinutes: 60,
}, },
}); });

View File

@@ -120,7 +120,7 @@ function mergeConfig(
overrides?.remote?.batch?.concurrency ?? defaults?.remote?.batch?.concurrency ?? 2, overrides?.remote?.batch?.concurrency ?? defaults?.remote?.batch?.concurrency ?? 2,
), ),
pollIntervalMs: pollIntervalMs:
overrides?.remote?.batch?.pollIntervalMs ?? defaults?.remote?.batch?.pollIntervalMs ?? 5000, overrides?.remote?.batch?.pollIntervalMs ?? defaults?.remote?.batch?.pollIntervalMs ?? 500,
timeoutMinutes: timeoutMinutes:
overrides?.remote?.batch?.timeoutMinutes ?? defaults?.remote?.batch?.timeoutMinutes ?? 60, overrides?.remote?.batch?.timeoutMinutes ?? defaults?.remote?.batch?.timeoutMinutes ?? 60,
}; };

View File

@@ -71,7 +71,11 @@ export function registerMemoryCli(program: Command) {
}); });
if (opts.index) { if (opts.index) {
await withProgressTotals( await withProgressTotals(
{ label: "Indexing memory…", total: 0 }, {
label: "Indexing memory…",
total: 0,
fallback: opts.verbose ? "line" : undefined,
},
async (update, progress) => { async (update, progress) => {
try { try {
await manager.sync({ await manager.sync({

View File

@@ -1,8 +1,13 @@
import { spinner } from "@clack/prompts"; import { spinner } from "@clack/prompts";
import { createOscProgressController, supportsOscProgress } from "osc-progress"; import { createOscProgressController, supportsOscProgress } from "osc-progress";
import { theme } from "../terminal/theme.js"; import { theme } from "../terminal/theme.js";
import {
clearActiveProgressLine,
registerActiveProgressLine,
unregisterActiveProgressLine,
} from "../terminal/progress-line.js";
const DEFAULT_DELAY_MS = 300; const DEFAULT_DELAY_MS = 0;
let activeProgress = 0; let activeProgress = 0;
type ProgressOptions = { type ProgressOptions = {
@@ -12,7 +17,7 @@ type ProgressOptions = {
enabled?: boolean; enabled?: boolean;
delayMs?: number; delayMs?: number;
stream?: NodeJS.WriteStream; stream?: NodeJS.WriteStream;
fallback?: "spinner" | "none"; fallback?: "spinner" | "line" | "none";
}; };
export type ProgressReporter = { export type ProgressReporter = {
@@ -45,6 +50,7 @@ export function createCliProgress(options: ProgressOptions): ProgressReporter {
const delayMs = typeof options.delayMs === "number" ? options.delayMs : DEFAULT_DELAY_MS; const delayMs = typeof options.delayMs === "number" ? options.delayMs : DEFAULT_DELAY_MS;
const canOsc = supportsOscProgress(process.env, stream.isTTY); const canOsc = supportsOscProgress(process.env, stream.isTTY);
const allowSpinner = options.fallback === undefined || options.fallback === "spinner"; const allowSpinner = options.fallback === undefined || options.fallback === "spinner";
const allowLine = options.fallback === "line";
let started = false; let started = false;
let label = options.label; let label = options.label;
@@ -55,6 +61,7 @@ export function createCliProgress(options: ProgressOptions): ProgressReporter {
options.indeterminate ?? (options.total === undefined || options.total === null); options.indeterminate ?? (options.total === undefined || options.total === null);
activeProgress += 1; activeProgress += 1;
registerActiveProgressLine(stream);
const controller = canOsc const controller = canOsc
? createOscProgressController({ ? createOscProgressController({
@@ -65,6 +72,14 @@ export function createCliProgress(options: ProgressOptions): ProgressReporter {
: null; : null;
const spin = allowSpinner ? spinner() : null; const spin = allowSpinner ? spinner() : null;
const renderLine = allowLine
? () => {
if (!started) return;
const suffix = indeterminate ? "" : ` ${percent}%`;
clearActiveProgressLine();
stream.write(`${theme.accent(label)}${suffix}`);
}
: null;
let timer: NodeJS.Timeout | null = null; let timer: NodeJS.Timeout | null = null;
const applyState = () => { const applyState = () => {
@@ -76,6 +91,9 @@ export function createCliProgress(options: ProgressOptions): ProgressReporter {
if (spin) { if (spin) {
spin.message(theme.accent(label)); spin.message(theme.accent(label));
} }
if (renderLine) {
renderLine();
}
}; };
const start = () => { const start = () => {
@@ -122,6 +140,8 @@ export function createCliProgress(options: ProgressOptions): ProgressReporter {
} }
if (controller) controller.clear(); if (controller) controller.clear();
if (spin) spin.stop(); if (spin) spin.stop();
clearActiveProgressLine();
unregisterActiveProgressLine(stream);
activeProgress = Math.max(0, activeProgress - 1); activeProgress = Math.max(0, activeProgress - 1);
}; };

View File

@@ -383,7 +383,7 @@ const FIELD_HELP: Record<string, string> = {
"agents.defaults.memorySearch.remote.batch.concurrency": "agents.defaults.memorySearch.remote.batch.concurrency":
"Max concurrent OpenAI batch jobs for memory indexing (default: 2).", "Max concurrent OpenAI batch jobs for memory indexing (default: 2).",
"agents.defaults.memorySearch.remote.batch.pollIntervalMs": "agents.defaults.memorySearch.remote.batch.pollIntervalMs":
"Polling interval in ms for OpenAI batch status (default: 5000).", "Polling interval in ms for OpenAI batch status (default: 500).",
"agents.defaults.memorySearch.remote.batch.timeoutMinutes": "agents.defaults.memorySearch.remote.batch.timeoutMinutes":
"Timeout in minutes for OpenAI batch indexing (default: 60).", "Timeout in minutes for OpenAI batch indexing (default: 60).",
"agents.defaults.memorySearch.local.modelPath": "agents.defaults.memorySearch.local.modelPath":

View File

@@ -7,6 +7,7 @@ import { getConsoleSettings, shouldLogSubsystemToConsole } from "./console.js";
import { type LogLevel, levelToMinLevel } from "./levels.js"; import { type LogLevel, levelToMinLevel } from "./levels.js";
import { getChildLogger } from "./logger.js"; import { getChildLogger } from "./logger.js";
import { loggingState } from "./state.js"; import { loggingState } from "./state.js";
import { clearActiveProgressLine } from "../terminal/progress-line.js";
type LogObj = { date?: Date } & Record<string, unknown>; type LogObj = { date?: Date } & Record<string, unknown>;
@@ -163,6 +164,7 @@ function formatConsoleLine(opts: {
} }
function writeConsoleLine(level: LogLevel, line: string) { function writeConsoleLine(level: LogLevel, line: string) {
clearActiveProgressLine();
const sanitized = const sanitized =
process.platform === "win32" && process.env.GITHUB_ACTIONS === "true" process.platform === "win32" && process.env.GITHUB_ACTIONS === "true"
? line.replace(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g, "?").replace(/[\uD800-\uDFFF]/g, "?") ? line.replace(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g, "?").replace(/[\uD800-\uDFFF]/g, "?")

View File

@@ -252,7 +252,7 @@ export class MemoryIndexManager {
enabled: Boolean(batch?.enabled && this.openAi && this.provider.id === "openai"), enabled: Boolean(batch?.enabled && this.openAi && this.provider.id === "openai"),
wait: batch?.wait ?? true, wait: batch?.wait ?? true,
concurrency: Math.max(1, batch?.concurrency ?? 2), concurrency: Math.max(1, batch?.concurrency ?? 2),
pollIntervalMs: batch?.pollIntervalMs ?? 5000, pollIntervalMs: batch?.pollIntervalMs ?? 500,
timeoutMs: (batch?.timeoutMinutes ?? 60) * 60 * 1000, timeoutMs: (batch?.timeoutMinutes ?? 60) * 60 * 1000,
}; };
} }

View File

@@ -1,3 +1,5 @@
import { clearActiveProgressLine } from "./terminal/progress-line.js";
export type RuntimeEnv = { export type RuntimeEnv = {
log: typeof console.log; log: typeof console.log;
error: typeof console.error; error: typeof console.error;
@@ -5,8 +7,14 @@ export type RuntimeEnv = {
}; };
export const defaultRuntime: RuntimeEnv = { export const defaultRuntime: RuntimeEnv = {
log: console.log, log: (...args: Parameters<typeof console.log>) => {
error: console.error, clearActiveProgressLine();
console.log(...args);
},
error: (...args: Parameters<typeof console.error>) => {
clearActiveProgressLine();
console.error(...args);
},
exit: (code) => { exit: (code) => {
process.exit(code); process.exit(code);
throw new Error("unreachable"); // satisfies tests when mocked throw new Error("unreachable"); // satisfies tests when mocked

View File

@@ -0,0 +1,17 @@
let activeStream: NodeJS.WriteStream | null = null;
export function registerActiveProgressLine(stream: NodeJS.WriteStream): void {
if (!stream.isTTY) return;
activeStream = stream;
}
export function clearActiveProgressLine(): void {
if (!activeStream?.isTTY) return;
activeStream.write("\r\x1b[2K");
}
export function unregisterActiveProgressLine(stream?: NodeJS.WriteStream): void {
if (!activeStream) return;
if (stream && activeStream !== stream) return;
activeStream = null;
}