diff --git a/CHANGELOG.md b/CHANGELOG.md index 842c02545..9ab88baba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ Docs: https://docs.clawd.bot - Memory: add hybrid BM25 + vector search (FTS5) with weighted merging and fallback. - Memory: add SQLite embedding cache to speed up reindexing and frequent updates. - CLI: surface FTS + embedding cache state in `clawdbot memory status`. +- Memory: render progress immediately, color batch statuses in verbose logs, and poll OpenAI batch status every 2s by default. - Plugins: allow optional agent tools with explicit allowlists and add plugin tool authoring guide. https://docs.clawd.bot/plugins/agent-tools - Tools: centralize plugin tool policy helpers. - Docs: clarify plugin agent tool configuration. https://docs.clawd.bot/plugins/agent-tools diff --git a/src/agents/memory-search.test.ts b/src/agents/memory-search.test.ts index 31e7f5c04..ef58a9116 100644 --- a/src/agents/memory-search.test.ts +++ b/src/agents/memory-search.test.ts @@ -82,7 +82,7 @@ describe("memory search config", () => { enabled: true, wait: true, concurrency: 2, - pollIntervalMs: 500, + pollIntervalMs: 2000, timeoutMinutes: 60, }); }); @@ -135,7 +135,7 @@ describe("memory search config", () => { enabled: true, wait: true, concurrency: 2, - pollIntervalMs: 500, + pollIntervalMs: 2000, timeoutMinutes: 60, }, }); diff --git a/src/agents/memory-search.ts b/src/agents/memory-search.ts index dfd7ac5d5..2f4a5db60 100644 --- a/src/agents/memory-search.ts +++ b/src/agents/memory-search.ts @@ -120,7 +120,7 @@ function mergeConfig( overrides?.remote?.batch?.concurrency ?? defaults?.remote?.batch?.concurrency ?? 2, ), pollIntervalMs: - overrides?.remote?.batch?.pollIntervalMs ?? defaults?.remote?.batch?.pollIntervalMs ?? 500, + overrides?.remote?.batch?.pollIntervalMs ?? defaults?.remote?.batch?.pollIntervalMs ?? 2000, timeoutMinutes: overrides?.remote?.batch?.timeoutMinutes ?? defaults?.remote?.batch?.timeoutMinutes ?? 60, }; diff --git a/src/config/schema.ts b/src/config/schema.ts index 0c860d086..721b3ab39 100644 --- a/src/config/schema.ts +++ b/src/config/schema.ts @@ -387,7 +387,7 @@ const FIELD_HELP: Record = { "agents.defaults.memorySearch.remote.batch.concurrency": "Max concurrent OpenAI batch jobs for memory indexing (default: 2).", "agents.defaults.memorySearch.remote.batch.pollIntervalMs": - "Polling interval in ms for OpenAI batch status (default: 500).", + "Polling interval in ms for OpenAI batch status (default: 2000).", "agents.defaults.memorySearch.remote.batch.timeoutMinutes": "Timeout in minutes for OpenAI batch indexing (default: 60).", "agents.defaults.memorySearch.local.modelPath": diff --git a/src/memory/manager.ts b/src/memory/manager.ts index 1ee58cfa5..0df8b8805 100644 --- a/src/memory/manager.ts +++ b/src/memory/manager.ts @@ -12,6 +12,7 @@ import { resolveSessionTranscriptsDirForAgent } from "../config/sessions/paths.j import { createSubsystemLogger } from "../logging.js"; import { onSessionTranscriptUpdate } from "../sessions/transcript-events.js"; import { resolveUserPath, truncateUtf16Safe } from "../utils.js"; +import { colorize, isRich, theme } from "../terminal/theme.js"; import { createEmbeddingProvider, type EmbeddingProvider, @@ -252,7 +253,7 @@ export class MemoryIndexManager { enabled: Boolean(batch?.enabled && this.openAi && this.provider.id === "openai"), wait: batch?.wait ?? true, concurrency: Math.max(1, batch?.concurrency ?? 2), - pollIntervalMs: batch?.pollIntervalMs ?? 500, + pollIntervalMs: batch?.pollIntervalMs ?? 2000, timeoutMs: (batch?.timeoutMinutes ?? 60) * 60 * 1000, }; } @@ -1647,6 +1648,9 @@ export class MemoryIndexManager { const status = current ?? (await this.fetchOpenAiBatchStatus(batchId)); const state = status.status ?? "unknown"; if (state === "completed") { + log.debug(`openai batch ${batchId} ${state}`, { + consoleMessage: this.formatOpenAiBatchConsoleMessage({ batchId, state }), + }); if (!status.output_file_id) { throw new Error(`openai batch ${batchId} completed without output file`); } @@ -1668,12 +1672,37 @@ export class MemoryIndexManager { if (Date.now() - start > this.batch.timeoutMs) { throw new Error(`openai batch ${batchId} timed out after ${this.batch.timeoutMs}ms`); } - log.debug(`openai batch ${batchId} ${state}; waiting ${this.batch.pollIntervalMs}ms`); + log.debug(`openai batch ${batchId} ${state}; waiting ${this.batch.pollIntervalMs}ms`, { + consoleMessage: this.formatOpenAiBatchConsoleMessage({ + batchId, + state, + waitMs: this.batch.pollIntervalMs, + }), + }); await new Promise((resolve) => setTimeout(resolve, this.batch.pollIntervalMs)); current = undefined; } } + private formatOpenAiBatchConsoleMessage(params: { + batchId: string; + state: string; + waitMs?: number; + }): string { + const rich = isRich(); + const normalized = params.state.toLowerCase(); + const successStates = new Set(["completed", "succeeded"]); + const errorStates = new Set(["failed", "expired", "cancelled", "canceled"]); + const warnStates = new Set(["finalizing", "validating"]); + let color = theme.info; + if (successStates.has(normalized)) color = theme.success; + else if (errorStates.has(normalized)) color = theme.error; + else if (warnStates.has(normalized)) color = theme.warn; + const status = colorize(rich, color, params.state); + const suffix = typeof params.waitMs === "number" ? `; waiting ${params.waitMs}ms` : ""; + return `openai batch ${params.batchId} ${status}${suffix}`; + } + private async embedChunksWithBatch( chunks: MemoryChunk[], entry: MemoryFileEntry | SessionFileEntry,