Handle 413 context overflow errors gracefully

When the conversation context exceeds the model's limit, instead of
throwing an opaque error or returning raw JSON, we now:

1. Detect context overflow errors (413, request_too_large, etc.)
2. Return a user-friendly message explaining the issue
3. Suggest using /new or /reset to start fresh

This prevents the assistant from becoming completely unresponsive
when context grows too large (e.g., from many screenshots or long
tool outputs).

Addresses issue #394
This commit is contained in:
alejandro maza
2026-01-07 07:51:04 -06:00
committed by Peter Steinberger
parent 42b637bbc8
commit 579828b2d5
3 changed files with 80 additions and 1 deletions

View File

@@ -126,6 +126,18 @@ export function buildBootstrapContextFiles(
return result;
}
export function isContextOverflowError(errorMessage?: string): boolean {
if (!errorMessage) return false;
const lower = errorMessage.toLowerCase();
return (
lower.includes("request_too_large") ||
lower.includes("request exceeds the maximum size") ||
lower.includes("context length exceeded") ||
lower.includes("maximum context length") ||
(lower.includes("413") && lower.includes("too large"))
);
}
export function formatAssistantErrorText(
msg: AssistantMessage,
): string | undefined {
@@ -133,6 +145,14 @@ export function formatAssistantErrorText(
const raw = (msg.errorMessage ?? "").trim();
if (!raw) return "LLM request failed with an unknown error.";
// Check for context overflow (413) errors
if (isContextOverflowError(raw)) {
return (
"Context overflow: the conversation history is too large. " +
"Use /new or /reset to start a fresh session."
);
}
const invalidRequest = raw.match(
/"type":"invalid_request_error".*?"message":"([^"]+)"/,
);