From 44e1f271c86cd60a7658275beedb500257a9f6fc Mon Sep 17 00:00:00 2001 From: Peter Steinberger Date: Mon, 12 Jan 2026 17:55:33 +0000 Subject: [PATCH] fix: keep image sanitizer scoped --- src/agents/pi-embedded-helpers.ts | 5 +---- src/types/node-llama-cpp.d.ts | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 4 deletions(-) create mode 100644 src/types/node-llama-cpp.d.ts diff --git a/src/agents/pi-embedded-helpers.ts b/src/agents/pi-embedded-helpers.ts index 384286e1f..4a2368696 100644 --- a/src/agents/pi-embedded-helpers.ts +++ b/src/agents/pi-embedded-helpers.ts @@ -12,7 +12,6 @@ import { } from "../auto-reply/thinking.js"; import type { ClawdbotConfig } from "../config/config.js"; import { formatSandboxToolPolicyBlockedMessage } from "./sandbox.js"; -import { repairToolUseResultPairing } from "./session-transcript-repair.js"; import { isValidCloudCodeAssistToolId, sanitizeToolCallId, @@ -96,11 +95,9 @@ export async function sanitizeSessionMessagesImages( ): Promise { // We sanitize historical session messages because Anthropic can reject a request // if the transcript contains oversized base64 images (see MAX_IMAGE_DIMENSION_PX). - const sanitizedIds = options?.sanitizeToolCallIds + const base = options?.sanitizeToolCallIds ? sanitizeToolCallIdsForCloudCodeAssist(messages) : messages; - const repaired = repairToolUseResultPairing(sanitizedIds); - const base = repaired.messages; const out: AgentMessage[] = []; for (const msg of base) { if (!msg || typeof msg !== "object") { diff --git a/src/types/node-llama-cpp.d.ts b/src/types/node-llama-cpp.d.ts new file mode 100644 index 000000000..23f361c1e --- /dev/null +++ b/src/types/node-llama-cpp.d.ts @@ -0,0 +1,25 @@ +declare module "node-llama-cpp" { + export enum LlamaLogLevel { + error = 0, + } + + export type LlamaEmbedding = { vector: Float32Array | number[] }; + + export type LlamaEmbeddingContext = { + getEmbeddingFor: (text: string) => Promise; + }; + + export type LlamaModel = { + createEmbeddingContext: () => Promise; + }; + + export type Llama = { + loadModel: (params: { modelPath: string }) => Promise; + }; + + export function getLlama(params: { logLevel: LlamaLogLevel }): Promise; + export function resolveModelFile( + modelPath: string, + cacheDir?: string, + ): Promise; +}