perf: improve batch status logging

This commit is contained in:
Peter Steinberger
2026-01-18 04:27:58 +00:00
parent 331b8157b0
commit e4e1396a98
5 changed files with 36 additions and 6 deletions

View File

@@ -19,6 +19,7 @@ Docs: https://docs.clawd.bot
- Memory: add hybrid BM25 + vector search (FTS5) with weighted merging and fallback.
- Memory: add SQLite embedding cache to speed up reindexing and frequent updates.
- CLI: surface FTS + embedding cache state in `clawdbot memory status`.
- Memory: render progress immediately, color batch statuses in verbose logs, and poll OpenAI batch status every 2s by default.
- Plugins: allow optional agent tools with explicit allowlists and add plugin tool authoring guide. https://docs.clawd.bot/plugins/agent-tools
- Tools: centralize plugin tool policy helpers.
- Docs: clarify plugin agent tool configuration. https://docs.clawd.bot/plugins/agent-tools

View File

@@ -82,7 +82,7 @@ describe("memory search config", () => {
enabled: true,
wait: true,
concurrency: 2,
pollIntervalMs: 500,
pollIntervalMs: 2000,
timeoutMinutes: 60,
});
});
@@ -135,7 +135,7 @@ describe("memory search config", () => {
enabled: true,
wait: true,
concurrency: 2,
pollIntervalMs: 500,
pollIntervalMs: 2000,
timeoutMinutes: 60,
},
});

View File

@@ -120,7 +120,7 @@ function mergeConfig(
overrides?.remote?.batch?.concurrency ?? defaults?.remote?.batch?.concurrency ?? 2,
),
pollIntervalMs:
overrides?.remote?.batch?.pollIntervalMs ?? defaults?.remote?.batch?.pollIntervalMs ?? 500,
overrides?.remote?.batch?.pollIntervalMs ?? defaults?.remote?.batch?.pollIntervalMs ?? 2000,
timeoutMinutes:
overrides?.remote?.batch?.timeoutMinutes ?? defaults?.remote?.batch?.timeoutMinutes ?? 60,
};

View File

@@ -387,7 +387,7 @@ const FIELD_HELP: Record<string, string> = {
"agents.defaults.memorySearch.remote.batch.concurrency":
"Max concurrent OpenAI batch jobs for memory indexing (default: 2).",
"agents.defaults.memorySearch.remote.batch.pollIntervalMs":
"Polling interval in ms for OpenAI batch status (default: 500).",
"Polling interval in ms for OpenAI batch status (default: 2000).",
"agents.defaults.memorySearch.remote.batch.timeoutMinutes":
"Timeout in minutes for OpenAI batch indexing (default: 60).",
"agents.defaults.memorySearch.local.modelPath":

View File

@@ -12,6 +12,7 @@ import { resolveSessionTranscriptsDirForAgent } from "../config/sessions/paths.j
import { createSubsystemLogger } from "../logging.js";
import { onSessionTranscriptUpdate } from "../sessions/transcript-events.js";
import { resolveUserPath, truncateUtf16Safe } from "../utils.js";
import { colorize, isRich, theme } from "../terminal/theme.js";
import {
createEmbeddingProvider,
type EmbeddingProvider,
@@ -252,7 +253,7 @@ export class MemoryIndexManager {
enabled: Boolean(batch?.enabled && this.openAi && this.provider.id === "openai"),
wait: batch?.wait ?? true,
concurrency: Math.max(1, batch?.concurrency ?? 2),
pollIntervalMs: batch?.pollIntervalMs ?? 500,
pollIntervalMs: batch?.pollIntervalMs ?? 2000,
timeoutMs: (batch?.timeoutMinutes ?? 60) * 60 * 1000,
};
}
@@ -1647,6 +1648,9 @@ export class MemoryIndexManager {
const status = current ?? (await this.fetchOpenAiBatchStatus(batchId));
const state = status.status ?? "unknown";
if (state === "completed") {
log.debug(`openai batch ${batchId} ${state}`, {
consoleMessage: this.formatOpenAiBatchConsoleMessage({ batchId, state }),
});
if (!status.output_file_id) {
throw new Error(`openai batch ${batchId} completed without output file`);
}
@@ -1668,12 +1672,37 @@ export class MemoryIndexManager {
if (Date.now() - start > this.batch.timeoutMs) {
throw new Error(`openai batch ${batchId} timed out after ${this.batch.timeoutMs}ms`);
}
log.debug(`openai batch ${batchId} ${state}; waiting ${this.batch.pollIntervalMs}ms`);
log.debug(`openai batch ${batchId} ${state}; waiting ${this.batch.pollIntervalMs}ms`, {
consoleMessage: this.formatOpenAiBatchConsoleMessage({
batchId,
state,
waitMs: this.batch.pollIntervalMs,
}),
});
await new Promise((resolve) => setTimeout(resolve, this.batch.pollIntervalMs));
current = undefined;
}
}
private formatOpenAiBatchConsoleMessage(params: {
batchId: string;
state: string;
waitMs?: number;
}): string {
const rich = isRich();
const normalized = params.state.toLowerCase();
const successStates = new Set(["completed", "succeeded"]);
const errorStates = new Set(["failed", "expired", "cancelled", "canceled"]);
const warnStates = new Set(["finalizing", "validating"]);
let color = theme.info;
if (successStates.has(normalized)) color = theme.success;
else if (errorStates.has(normalized)) color = theme.error;
else if (warnStates.has(normalized)) color = theme.warn;
const status = colorize(rich, color, params.state);
const suffix = typeof params.waitMs === "number" ? `; waiting ${params.waitMs}ms` : "";
return `openai batch ${params.batchId} ${status}${suffix}`;
}
private async embedChunksWithBatch(
chunks: MemoryChunk[],
entry: MemoryFileEntry | SessionFileEntry,