fix(status): account cached prompt tokens

This commit is contained in:
Peter Steinberger
2025-12-12 23:22:05 +00:00
parent e502ad13f9
commit c3aed2543e
6 changed files with 171 additions and 27 deletions

View File

@@ -822,12 +822,15 @@ export async function getReplyFromConfig(
if (entry) {
const input = usage.input ?? 0;
const output = usage.output ?? 0;
const total = usage.total ?? input + output;
const promptTokens =
input + (usage.cacheRead ?? 0) + (usage.cacheWrite ?? 0);
sessionEntry = {
...entry,
inputTokens: (entry.inputTokens ?? 0) + input,
outputTokens: (entry.outputTokens ?? 0) + output,
totalTokens: (entry.totalTokens ?? 0) + total,
inputTokens: input,
outputTokens: output,
// Track the effective prompt/context size (cached + uncached input).
totalTokens:
promptTokens > 0 ? promptTokens : (usage.total ?? input),
model,
contextTokens: contextTokens ?? entry.contextTokens,
updatedAt: Date.now(),