fix: handle MiniMax coding plan usage payloads

This commit is contained in:
Peter Steinberger
2026-01-16 07:28:29 +00:00
parent f2db894685
commit e479c870fd
3 changed files with 194 additions and 4 deletions

View File

@@ -26,6 +26,7 @@
- Fix: guard model fallback against undefined provider/model values. (#954) — thanks @roshanasingh4.
- Fix: refactor session store updates, add chat.inject, and harden subagent cleanup flow. (#944) — thanks @tyler6204.
- Fix: clean up suspended CLI processes across backends. (#978) — thanks @Nachx639.
- Fix: support MiniMax coding plan usage responses with `model_remains`/`current_interval_*` payloads.
- CLI: add `--json` output for `clawdbot daemon` lifecycle/install commands.
- Memory: make `node-llama-cpp` an optional dependency (avoid Node 25 install failures) and improve local-embeddings fallback/errors.
- Browser: add `snapshot refs=aria` (Playwright aria-ref ids) for self-resolving refs across `snapshot``act`.

View File

@@ -18,6 +18,10 @@ const RESET_KEYS = [
"resetAt",
"reset_time",
"resetTime",
"next_reset_at",
"nextResetAt",
"next_reset_time",
"nextResetTime",
"expires_at",
"expiresAt",
"expire_at",
@@ -52,6 +56,14 @@ const USED_KEYS = [
"usedQuota",
"used_times",
"usedTimes",
"prompt_used",
"promptUsed",
"used_prompt",
"usedPrompt",
"prompts_used",
"promptsUsed",
"current_interval_usage_count",
"currentIntervalUsageCount",
"consumed",
] as const;
@@ -65,6 +77,20 @@ const TOTAL_KEYS = [
"totalQuota",
"total_times",
"totalTimes",
"prompt_total",
"promptTotal",
"total_prompt",
"totalPrompt",
"prompt_limit",
"promptLimit",
"limit_prompt",
"limitPrompt",
"prompts_total",
"promptsTotal",
"total_prompts",
"totalPrompts",
"current_interval_total_count",
"currentIntervalTotalCount",
"limit",
"quota",
"quota_limit",
@@ -87,6 +113,20 @@ const REMAINING_KEYS = [
"remain_times",
"remainingTimes",
"remaining_times",
"prompt_remain",
"promptRemain",
"remain_prompt",
"remainPrompt",
"prompt_remaining",
"promptRemaining",
"remaining_prompt",
"remainingPrompt",
"prompts_remaining",
"promptsRemaining",
"prompt_left",
"promptLeft",
"prompts_left",
"promptsLeft",
"left",
] as const;
@@ -144,6 +184,62 @@ function parseEpoch(value: unknown): number | undefined {
return undefined;
}
function hasAny(record: Record<string, unknown>, keys: readonly string[]): boolean {
return keys.some((key) => key in record);
}
function scoreUsageRecord(record: Record<string, unknown>): number {
let score = 0;
if (hasAny(record, PERCENT_KEYS)) score += 4;
if (hasAny(record, TOTAL_KEYS)) score += 3;
if (hasAny(record, USED_KEYS) || hasAny(record, REMAINING_KEYS)) score += 2;
if (hasAny(record, RESET_KEYS)) score += 1;
if (hasAny(record, PLAN_KEYS)) score += 1;
return score;
}
function collectUsageCandidates(root: Record<string, unknown>): Record<string, unknown>[] {
const MAX_SCAN_DEPTH = 4;
const MAX_SCAN_NODES = 60;
const queue: Array<{ value: unknown; depth: number }> = [{ value: root, depth: 0 }];
const seen = new Set<object>();
const candidates: Array<{ record: Record<string, unknown>; score: number; depth: number }> = [];
let scanned = 0;
while (queue.length && scanned < MAX_SCAN_NODES) {
const next = queue.shift();
if (!next) break;
scanned += 1;
const { value, depth } = next;
if (isRecord(value)) {
if (seen.has(value)) continue;
seen.add(value);
const score = scoreUsageRecord(value);
if (score > 0) candidates.push({ record: value, score, depth });
if (depth < MAX_SCAN_DEPTH) {
for (const nested of Object.values(value)) {
if (isRecord(nested) || Array.isArray(nested)) {
queue.push({ value: nested, depth: depth + 1 });
}
}
}
continue;
}
if (Array.isArray(value) && depth < MAX_SCAN_DEPTH) {
for (const nested of value) {
if (isRecord(nested) || Array.isArray(nested)) {
queue.push({ value: nested, depth: depth + 1 });
}
}
}
}
candidates.sort((a, b) => b.score - a.score || a.depth - b.depth);
return candidates.map((candidate) => candidate.record);
}
function deriveWindowLabel(payload: Record<string, unknown>): string {
const hours = pickNumber(payload, WINDOW_HOUR_KEYS);
if (hours && Number.isFinite(hours)) return `${hours}h`;
@@ -219,7 +315,20 @@ export async function fetchMinimaxUsage(
}
const payload = isRecord(data.data) ? data.data : data;
const usedPercent = deriveUsedPercent(payload);
const candidates = collectUsageCandidates(payload);
let usageRecord: Record<string, unknown> = payload;
let usedPercent: number | null = null;
for (const candidate of candidates) {
const candidatePercent = deriveUsedPercent(candidate);
if (candidatePercent !== null) {
usageRecord = candidate;
usedPercent = candidatePercent;
break;
}
}
if (usedPercent === null) {
usedPercent = deriveUsedPercent(payload);
}
if (usedPercent === null) {
return {
provider: "minimax",
@@ -230,10 +339,13 @@ export async function fetchMinimaxUsage(
}
const resetAt =
parseEpoch(pickString(payload, RESET_KEYS)) ?? parseEpoch(pickNumber(payload, RESET_KEYS));
parseEpoch(pickString(usageRecord, RESET_KEYS)) ??
parseEpoch(pickNumber(usageRecord, RESET_KEYS)) ??
parseEpoch(pickString(payload, RESET_KEYS)) ??
parseEpoch(pickNumber(payload, RESET_KEYS));
const windows: UsageWindow[] = [
{
label: deriveWindowLabel(payload),
label: deriveWindowLabel(usageRecord),
usedPercent,
resetAt,
},
@@ -243,6 +355,6 @@ export async function fetchMinimaxUsage(
provider: "minimax",
displayName: PROVIDER_LABELS.minimax,
windows,
plan: pickString(payload, PLAN_KEYS),
plan: pickString(usageRecord, PLAN_KEYS) ?? pickString(payload, PLAN_KEYS),
};
}

View File

@@ -137,6 +137,83 @@ describe("provider usage loading", () => {
expect(mockFetch).toHaveBeenCalled();
});
it("handles nested MiniMax usage payloads", async () => {
const makeResponse = (status: number, body: unknown): Response => {
const payload = typeof body === "string" ? body : JSON.stringify(body);
const headers = typeof body === "string" ? undefined : { "Content-Type": "application/json" };
return new Response(payload, { status, headers });
};
const mockFetch = vi.fn<Parameters<typeof fetch>, ReturnType<typeof fetch>>(async (input) => {
const url =
typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
if (url.includes("api.minimax.io/v1/coding_plan/remains")) {
return makeResponse(200, {
base_resp: { status_code: 0, status_msg: "ok" },
data: {
plan_name: "Coding Plan",
usage: {
prompt_limit: 200,
prompt_remain: 50,
next_reset_time: "2026-01-07T05:00:00Z",
},
},
});
}
return makeResponse(404, "not found");
});
const summary = await loadProviderUsageSummary({
now: Date.UTC(2026, 0, 7, 0, 0, 0),
auth: [{ provider: "minimax", token: "token-1b" }],
fetch: mockFetch,
});
const minimax = summary.providers.find((p) => p.provider === "minimax");
expect(minimax?.windows[0]?.usedPercent).toBe(75);
expect(minimax?.plan).toBe("Coding Plan");
expect(mockFetch).toHaveBeenCalled();
});
it("handles MiniMax model_remains usage payloads", async () => {
const makeResponse = (status: number, body: unknown): Response => {
const payload = typeof body === "string" ? body : JSON.stringify(body);
const headers = typeof body === "string" ? undefined : { "Content-Type": "application/json" };
return new Response(payload, { status, headers });
};
const mockFetch = vi.fn<Parameters<typeof fetch>, ReturnType<typeof fetch>>(async (input) => {
const url =
typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
if (url.includes("api.minimax.io/v1/coding_plan/remains")) {
return makeResponse(200, {
base_resp: { status_code: 0, status_msg: "ok" },
model_remains: [
{
start_time: 1736217600,
end_time: 1736235600,
remains_time: 600,
current_interval_total_count: 120,
current_interval_usage_count: 30,
model_name: "MiniMax-M2.1",
},
],
});
}
return makeResponse(404, "not found");
});
const summary = await loadProviderUsageSummary({
now: Date.UTC(2026, 0, 7, 0, 0, 0),
auth: [{ provider: "minimax", token: "token-1b" }],
fetch: mockFetch,
});
const minimax = summary.providers.find((p) => p.provider === "minimax");
expect(minimax?.windows[0]?.usedPercent).toBe(25);
expect(mockFetch).toHaveBeenCalled();
});
it("discovers Claude usage from token auth profiles", async () => {
await withTempHome(
async (tempHome) => {