perf: speed up memory batch polling
This commit is contained in:
@@ -5,6 +5,7 @@ Docs: https://docs.clawd.bot
|
||||
## 2026.1.17-6
|
||||
|
||||
### Changes
|
||||
- Memory: render progress immediately and poll OpenAI batch status more frequently (default 500ms).
|
||||
- Plugins: add exclusive plugin slots with a dedicated memory slot selector.
|
||||
- Memory: ship core memory tools + CLI as the bundled `memory-core` plugin.
|
||||
- Docs: document plugin slots and memory plugin behavior.
|
||||
|
||||
@@ -82,7 +82,7 @@ describe("memory search config", () => {
|
||||
enabled: true,
|
||||
wait: true,
|
||||
concurrency: 2,
|
||||
pollIntervalMs: 5000,
|
||||
pollIntervalMs: 500,
|
||||
timeoutMinutes: 60,
|
||||
});
|
||||
});
|
||||
@@ -135,7 +135,7 @@ describe("memory search config", () => {
|
||||
enabled: true,
|
||||
wait: true,
|
||||
concurrency: 2,
|
||||
pollIntervalMs: 5000,
|
||||
pollIntervalMs: 500,
|
||||
timeoutMinutes: 60,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -120,7 +120,7 @@ function mergeConfig(
|
||||
overrides?.remote?.batch?.concurrency ?? defaults?.remote?.batch?.concurrency ?? 2,
|
||||
),
|
||||
pollIntervalMs:
|
||||
overrides?.remote?.batch?.pollIntervalMs ?? defaults?.remote?.batch?.pollIntervalMs ?? 5000,
|
||||
overrides?.remote?.batch?.pollIntervalMs ?? defaults?.remote?.batch?.pollIntervalMs ?? 500,
|
||||
timeoutMinutes:
|
||||
overrides?.remote?.batch?.timeoutMinutes ?? defaults?.remote?.batch?.timeoutMinutes ?? 60,
|
||||
};
|
||||
|
||||
@@ -71,7 +71,11 @@ export function registerMemoryCli(program: Command) {
|
||||
});
|
||||
if (opts.index) {
|
||||
await withProgressTotals(
|
||||
{ label: "Indexing memory…", total: 0 },
|
||||
{
|
||||
label: "Indexing memory…",
|
||||
total: 0,
|
||||
fallback: opts.verbose ? "line" : undefined,
|
||||
},
|
||||
async (update, progress) => {
|
||||
try {
|
||||
await manager.sync({
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import { spinner } from "@clack/prompts";
|
||||
import { createOscProgressController, supportsOscProgress } from "osc-progress";
|
||||
import { theme } from "../terminal/theme.js";
|
||||
import {
|
||||
clearActiveProgressLine,
|
||||
registerActiveProgressLine,
|
||||
unregisterActiveProgressLine,
|
||||
} from "../terminal/progress-line.js";
|
||||
|
||||
const DEFAULT_DELAY_MS = 300;
|
||||
const DEFAULT_DELAY_MS = 0;
|
||||
let activeProgress = 0;
|
||||
|
||||
type ProgressOptions = {
|
||||
@@ -12,7 +17,7 @@ type ProgressOptions = {
|
||||
enabled?: boolean;
|
||||
delayMs?: number;
|
||||
stream?: NodeJS.WriteStream;
|
||||
fallback?: "spinner" | "none";
|
||||
fallback?: "spinner" | "line" | "none";
|
||||
};
|
||||
|
||||
export type ProgressReporter = {
|
||||
@@ -45,6 +50,7 @@ export function createCliProgress(options: ProgressOptions): ProgressReporter {
|
||||
const delayMs = typeof options.delayMs === "number" ? options.delayMs : DEFAULT_DELAY_MS;
|
||||
const canOsc = supportsOscProgress(process.env, stream.isTTY);
|
||||
const allowSpinner = options.fallback === undefined || options.fallback === "spinner";
|
||||
const allowLine = options.fallback === "line";
|
||||
|
||||
let started = false;
|
||||
let label = options.label;
|
||||
@@ -55,6 +61,7 @@ export function createCliProgress(options: ProgressOptions): ProgressReporter {
|
||||
options.indeterminate ?? (options.total === undefined || options.total === null);
|
||||
|
||||
activeProgress += 1;
|
||||
registerActiveProgressLine(stream);
|
||||
|
||||
const controller = canOsc
|
||||
? createOscProgressController({
|
||||
@@ -65,6 +72,14 @@ export function createCliProgress(options: ProgressOptions): ProgressReporter {
|
||||
: null;
|
||||
|
||||
const spin = allowSpinner ? spinner() : null;
|
||||
const renderLine = allowLine
|
||||
? () => {
|
||||
if (!started) return;
|
||||
const suffix = indeterminate ? "" : ` ${percent}%`;
|
||||
clearActiveProgressLine();
|
||||
stream.write(`${theme.accent(label)}${suffix}`);
|
||||
}
|
||||
: null;
|
||||
let timer: NodeJS.Timeout | null = null;
|
||||
|
||||
const applyState = () => {
|
||||
@@ -76,6 +91,9 @@ export function createCliProgress(options: ProgressOptions): ProgressReporter {
|
||||
if (spin) {
|
||||
spin.message(theme.accent(label));
|
||||
}
|
||||
if (renderLine) {
|
||||
renderLine();
|
||||
}
|
||||
};
|
||||
|
||||
const start = () => {
|
||||
@@ -122,6 +140,8 @@ export function createCliProgress(options: ProgressOptions): ProgressReporter {
|
||||
}
|
||||
if (controller) controller.clear();
|
||||
if (spin) spin.stop();
|
||||
clearActiveProgressLine();
|
||||
unregisterActiveProgressLine(stream);
|
||||
activeProgress = Math.max(0, activeProgress - 1);
|
||||
};
|
||||
|
||||
|
||||
@@ -383,7 +383,7 @@ const FIELD_HELP: Record<string, string> = {
|
||||
"agents.defaults.memorySearch.remote.batch.concurrency":
|
||||
"Max concurrent OpenAI batch jobs for memory indexing (default: 2).",
|
||||
"agents.defaults.memorySearch.remote.batch.pollIntervalMs":
|
||||
"Polling interval in ms for OpenAI batch status (default: 5000).",
|
||||
"Polling interval in ms for OpenAI batch status (default: 500).",
|
||||
"agents.defaults.memorySearch.remote.batch.timeoutMinutes":
|
||||
"Timeout in minutes for OpenAI batch indexing (default: 60).",
|
||||
"agents.defaults.memorySearch.local.modelPath":
|
||||
|
||||
@@ -7,6 +7,7 @@ import { getConsoleSettings, shouldLogSubsystemToConsole } from "./console.js";
|
||||
import { type LogLevel, levelToMinLevel } from "./levels.js";
|
||||
import { getChildLogger } from "./logger.js";
|
||||
import { loggingState } from "./state.js";
|
||||
import { clearActiveProgressLine } from "../terminal/progress-line.js";
|
||||
|
||||
type LogObj = { date?: Date } & Record<string, unknown>;
|
||||
|
||||
@@ -163,6 +164,7 @@ function formatConsoleLine(opts: {
|
||||
}
|
||||
|
||||
function writeConsoleLine(level: LogLevel, line: string) {
|
||||
clearActiveProgressLine();
|
||||
const sanitized =
|
||||
process.platform === "win32" && process.env.GITHUB_ACTIONS === "true"
|
||||
? line.replace(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g, "?").replace(/[\uD800-\uDFFF]/g, "?")
|
||||
|
||||
@@ -252,7 +252,7 @@ export class MemoryIndexManager {
|
||||
enabled: Boolean(batch?.enabled && this.openAi && this.provider.id === "openai"),
|
||||
wait: batch?.wait ?? true,
|
||||
concurrency: Math.max(1, batch?.concurrency ?? 2),
|
||||
pollIntervalMs: batch?.pollIntervalMs ?? 5000,
|
||||
pollIntervalMs: batch?.pollIntervalMs ?? 500,
|
||||
timeoutMs: (batch?.timeoutMinutes ?? 60) * 60 * 1000,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { clearActiveProgressLine } from "./terminal/progress-line.js";
|
||||
|
||||
export type RuntimeEnv = {
|
||||
log: typeof console.log;
|
||||
error: typeof console.error;
|
||||
@@ -5,8 +7,14 @@ export type RuntimeEnv = {
|
||||
};
|
||||
|
||||
export const defaultRuntime: RuntimeEnv = {
|
||||
log: console.log,
|
||||
error: console.error,
|
||||
log: (...args: Parameters<typeof console.log>) => {
|
||||
clearActiveProgressLine();
|
||||
console.log(...args);
|
||||
},
|
||||
error: (...args: Parameters<typeof console.error>) => {
|
||||
clearActiveProgressLine();
|
||||
console.error(...args);
|
||||
},
|
||||
exit: (code) => {
|
||||
process.exit(code);
|
||||
throw new Error("unreachable"); // satisfies tests when mocked
|
||||
|
||||
17
src/terminal/progress-line.ts
Normal file
17
src/terminal/progress-line.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
let activeStream: NodeJS.WriteStream | null = null;
|
||||
|
||||
export function registerActiveProgressLine(stream: NodeJS.WriteStream): void {
|
||||
if (!stream.isTTY) return;
|
||||
activeStream = stream;
|
||||
}
|
||||
|
||||
export function clearActiveProgressLine(): void {
|
||||
if (!activeStream?.isTTY) return;
|
||||
activeStream.write("\r\x1b[2K");
|
||||
}
|
||||
|
||||
export function unregisterActiveProgressLine(stream?: NodeJS.WriteStream): void {
|
||||
if (!activeStream) return;
|
||||
if (stream && activeStream !== stream) return;
|
||||
activeStream = null;
|
||||
}
|
||||
Reference in New Issue
Block a user