Merge pull request #567 from erikpr1994/fix/gemini-schema-sanitization
fix(agents): remove unsupported JSON Schema keywords for Cloud Code Assist API
This commit is contained in:
@@ -50,35 +50,40 @@ beforeEach(() => {
|
||||
});
|
||||
|
||||
describe("bash tool backgrounding", () => {
|
||||
it("backgrounds after yield and can be polled", async () => {
|
||||
const result = await bashTool.execute("call1", {
|
||||
command: joinCommands([yieldDelayCmd, "echo done"]),
|
||||
yieldMs: 10,
|
||||
});
|
||||
|
||||
expect(result.details.status).toBe("running");
|
||||
const sessionId = (result.details as { sessionId: string }).sessionId;
|
||||
|
||||
let status = "running";
|
||||
let output = "";
|
||||
const deadline = Date.now() + (process.platform === "win32" ? 8000 : 2000);
|
||||
|
||||
while (Date.now() < deadline && status === "running") {
|
||||
const poll = await processTool.execute("call2", {
|
||||
action: "poll",
|
||||
sessionId,
|
||||
it(
|
||||
"backgrounds after yield and can be polled",
|
||||
async () => {
|
||||
const result = await bashTool.execute("call1", {
|
||||
command: joinCommands([yieldDelayCmd, "echo done"]),
|
||||
yieldMs: 10,
|
||||
});
|
||||
status = (poll.details as { status: string }).status;
|
||||
const textBlock = poll.content.find((c) => c.type === "text");
|
||||
output = textBlock?.text ?? "";
|
||||
if (status === "running") {
|
||||
await sleep(20);
|
||||
}
|
||||
}
|
||||
|
||||
expect(status).toBe("completed");
|
||||
expect(output).toContain("done");
|
||||
});
|
||||
expect(result.details.status).toBe("running");
|
||||
const sessionId = (result.details as { sessionId: string }).sessionId;
|
||||
|
||||
let status = "running";
|
||||
let output = "";
|
||||
const deadline =
|
||||
Date.now() + (process.platform === "win32" ? 8000 : 2000);
|
||||
|
||||
while (Date.now() < deadline && status === "running") {
|
||||
const poll = await processTool.execute("call2", {
|
||||
action: "poll",
|
||||
sessionId,
|
||||
});
|
||||
status = (poll.details as { status: string }).status;
|
||||
const textBlock = poll.content.find((c) => c.type === "text");
|
||||
output = textBlock?.text ?? "";
|
||||
if (status === "running") {
|
||||
await sleep(20);
|
||||
}
|
||||
}
|
||||
|
||||
expect(status).toBe("completed");
|
||||
expect(output).toContain("done");
|
||||
},
|
||||
isWin ? 15_000 : 5_000,
|
||||
);
|
||||
|
||||
it("supports explicit background", async () => {
|
||||
const result = await bashTool.execute("call1", {
|
||||
|
||||
@@ -8,6 +8,7 @@ import type { AgentTool, AgentToolResult } from "@mariozechner/pi-agent-core";
|
||||
import { Type } from "@sinclair/typebox";
|
||||
|
||||
import { logInfo } from "../logger.js";
|
||||
import { sliceUtf16Safe } from "../utils.js";
|
||||
import {
|
||||
addSession,
|
||||
appendOutput,
|
||||
@@ -1041,7 +1042,7 @@ function chunkString(input: string, limit = CHUNK_LIMIT) {
|
||||
function truncateMiddle(str: string, max: number) {
|
||||
if (str.length <= max) return str;
|
||||
const half = Math.floor((max - 3) / 2);
|
||||
return `${str.slice(0, half)}...${str.slice(str.length - half)}`;
|
||||
return `${sliceUtf16Safe(str, 0, half)}...${sliceUtf16Safe(str, -half)}`;
|
||||
}
|
||||
|
||||
function sliceLogLines(
|
||||
|
||||
@@ -9,6 +9,7 @@ import { resolveStateDir } from "../config/paths.js";
|
||||
import { emitAgentEvent } from "../infra/agent-events.js";
|
||||
import { createSubsystemLogger } from "../logging.js";
|
||||
import { splitMediaFromOutput } from "../media/parse.js";
|
||||
import { truncateUtf16Safe } from "../utils.js";
|
||||
import type { BlockReplyChunking } from "./pi-embedded-block-chunker.js";
|
||||
import { EmbeddedBlockChunker } from "./pi-embedded-block-chunker.js";
|
||||
import { isMessagingToolDuplicate } from "./pi-embedded-helpers.js";
|
||||
@@ -64,7 +65,7 @@ type MessagingToolSend = {
|
||||
|
||||
function truncateToolText(text: string): string {
|
||||
if (text.length <= TOOL_RESULT_MAX_CHARS) return text;
|
||||
return `${text.slice(0, TOOL_RESULT_MAX_CHARS)}\n…(truncated)…`;
|
||||
return `${truncateUtf16Safe(text, TOOL_RESULT_MAX_CHARS)}\n…(truncated)…`;
|
||||
}
|
||||
|
||||
function sanitizeToolResult(result: unknown): unknown {
|
||||
|
||||
@@ -4,7 +4,7 @@ import path from "node:path";
|
||||
|
||||
import sharp from "sharp";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { createClawdbotCodingTools } from "./pi-tools.js";
|
||||
import { __testing, createClawdbotCodingTools } from "./pi-tools.js";
|
||||
import { createBrowserTool } from "./tools/browser-tool.js";
|
||||
|
||||
describe("createClawdbotCodingTools", () => {
|
||||
@@ -64,6 +64,28 @@ describe("createClawdbotCodingTools", () => {
|
||||
expect(format?.enum).toEqual(["aria", "ai"]);
|
||||
});
|
||||
|
||||
it("inlines local $ref before removing unsupported keywords", () => {
|
||||
const cleaned = __testing.cleanToolSchemaForGemini({
|
||||
type: "object",
|
||||
properties: {
|
||||
foo: { $ref: "#/$defs/Foo" },
|
||||
},
|
||||
$defs: {
|
||||
Foo: { type: "string", enum: ["a", "b"] },
|
||||
},
|
||||
}) as {
|
||||
$defs?: unknown;
|
||||
properties?: Record<string, unknown>;
|
||||
};
|
||||
|
||||
expect(cleaned.$defs).toBeUndefined();
|
||||
expect(cleaned.properties).toBeDefined();
|
||||
expect(cleaned.properties?.foo).toMatchObject({
|
||||
type: "string",
|
||||
enum: ["a", "b"],
|
||||
});
|
||||
});
|
||||
|
||||
it("preserves action enums in normalized schemas", () => {
|
||||
const tools = createClawdbotCodingTools();
|
||||
const toolNames = [
|
||||
@@ -331,4 +353,52 @@ describe("createClawdbotCodingTools", () => {
|
||||
expect(tools.some((tool) => tool.name === "Bash")).toBe(true);
|
||||
expect(tools.some((tool) => tool.name === "browser")).toBe(false);
|
||||
});
|
||||
|
||||
it("removes unsupported JSON Schema keywords for Cloud Code Assist API compatibility", () => {
|
||||
const tools = createClawdbotCodingTools();
|
||||
|
||||
// Helper to recursively check schema for unsupported keywords
|
||||
const unsupportedKeywords = new Set([
|
||||
"patternProperties",
|
||||
"additionalProperties",
|
||||
"$schema",
|
||||
"$id",
|
||||
"$ref",
|
||||
"$defs",
|
||||
"definitions",
|
||||
]);
|
||||
|
||||
const findUnsupportedKeywords = (
|
||||
schema: unknown,
|
||||
path: string,
|
||||
): string[] => {
|
||||
const found: string[] = [];
|
||||
if (!schema || typeof schema !== "object") return found;
|
||||
if (Array.isArray(schema)) {
|
||||
schema.forEach((item, i) => {
|
||||
found.push(...findUnsupportedKeywords(item, `${path}[${i}]`));
|
||||
});
|
||||
return found;
|
||||
}
|
||||
for (const [key, value] of Object.entries(
|
||||
schema as Record<string, unknown>,
|
||||
)) {
|
||||
if (unsupportedKeywords.has(key)) {
|
||||
found.push(`${path}.${key}`);
|
||||
}
|
||||
if (value && typeof value === "object") {
|
||||
found.push(...findUnsupportedKeywords(value, `${path}.${key}`));
|
||||
}
|
||||
}
|
||||
return found;
|
||||
};
|
||||
|
||||
for (const tool of tools) {
|
||||
const violations = findUnsupportedKeywords(
|
||||
tool.parameters,
|
||||
`${tool.name}.parameters`,
|
||||
);
|
||||
expect(violations).toEqual([]);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -195,12 +195,122 @@ function tryFlattenLiteralAnyOf(
|
||||
return null;
|
||||
}
|
||||
|
||||
function cleanSchemaForGemini(schema: unknown): unknown {
|
||||
// Keywords that Cloud Code Assist API rejects (not compliant with their JSON Schema subset)
|
||||
const UNSUPPORTED_SCHEMA_KEYWORDS = new Set([
|
||||
"patternProperties",
|
||||
"additionalProperties",
|
||||
"$schema",
|
||||
"$id",
|
||||
"$ref",
|
||||
"$defs",
|
||||
"definitions",
|
||||
]);
|
||||
|
||||
type SchemaDefs = Map<string, unknown>;
|
||||
|
||||
function extendSchemaDefs(
|
||||
defs: SchemaDefs | undefined,
|
||||
schema: Record<string, unknown>,
|
||||
): SchemaDefs | undefined {
|
||||
const defsEntry =
|
||||
schema.$defs &&
|
||||
typeof schema.$defs === "object" &&
|
||||
!Array.isArray(schema.$defs)
|
||||
? (schema.$defs as Record<string, unknown>)
|
||||
: undefined;
|
||||
const legacyDefsEntry =
|
||||
schema.definitions &&
|
||||
typeof schema.definitions === "object" &&
|
||||
!Array.isArray(schema.definitions)
|
||||
? (schema.definitions as Record<string, unknown>)
|
||||
: undefined;
|
||||
|
||||
if (!defsEntry && !legacyDefsEntry) return defs;
|
||||
|
||||
const next = defs ? new Map(defs) : new Map<string, unknown>();
|
||||
if (defsEntry) {
|
||||
for (const [key, value] of Object.entries(defsEntry)) next.set(key, value);
|
||||
}
|
||||
if (legacyDefsEntry) {
|
||||
for (const [key, value] of Object.entries(legacyDefsEntry))
|
||||
next.set(key, value);
|
||||
}
|
||||
return next;
|
||||
}
|
||||
|
||||
function decodeJsonPointerSegment(segment: string): string {
|
||||
return segment.replaceAll("~1", "/").replaceAll("~0", "~");
|
||||
}
|
||||
|
||||
function tryResolveLocalRef(
|
||||
ref: string,
|
||||
defs: SchemaDefs | undefined,
|
||||
): unknown | undefined {
|
||||
if (!defs) return undefined;
|
||||
const match = ref.match(/^#\/(?:\$defs|definitions)\/(.+)$/);
|
||||
if (!match) return undefined;
|
||||
const name = decodeJsonPointerSegment(match[1] ?? "");
|
||||
if (!name) return undefined;
|
||||
return defs.get(name);
|
||||
}
|
||||
|
||||
function cleanSchemaForGeminiWithDefs(
|
||||
schema: unknown,
|
||||
defs: SchemaDefs | undefined,
|
||||
refStack: Set<string> | undefined,
|
||||
): unknown {
|
||||
if (!schema || typeof schema !== "object") return schema;
|
||||
if (Array.isArray(schema)) return schema.map(cleanSchemaForGemini);
|
||||
if (Array.isArray(schema)) {
|
||||
return schema.map((item) =>
|
||||
cleanSchemaForGeminiWithDefs(item, defs, refStack),
|
||||
);
|
||||
}
|
||||
|
||||
const obj = schema as Record<string, unknown>;
|
||||
const nextDefs = extendSchemaDefs(defs, obj);
|
||||
|
||||
const refValue = typeof obj.$ref === "string" ? obj.$ref : undefined;
|
||||
if (refValue) {
|
||||
if (refStack?.has(refValue)) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const resolved = tryResolveLocalRef(refValue, nextDefs);
|
||||
if (resolved) {
|
||||
const nextRefStack = refStack ? new Set(refStack) : new Set<string>();
|
||||
nextRefStack.add(refValue);
|
||||
|
||||
const cleaned = cleanSchemaForGeminiWithDefs(
|
||||
resolved,
|
||||
nextDefs,
|
||||
nextRefStack,
|
||||
);
|
||||
if (!cleaned || typeof cleaned !== "object" || Array.isArray(cleaned)) {
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
const result: Record<string, unknown> = {
|
||||
...(cleaned as Record<string, unknown>),
|
||||
};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) {
|
||||
result[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) {
|
||||
result[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const hasAnyOf = "anyOf" in obj && Array.isArray(obj.anyOf);
|
||||
const hasOneOf = "oneOf" in obj && Array.isArray(obj.oneOf);
|
||||
|
||||
// Try to flatten anyOf of literals to a single enum BEFORE processing
|
||||
// This handles Type.Union([Type.Literal("a"), Type.Literal("b")]) patterns
|
||||
@@ -221,14 +331,28 @@ function cleanSchemaForGemini(schema: unknown): unknown {
|
||||
}
|
||||
}
|
||||
|
||||
// Try to flatten oneOf of literals similarly
|
||||
if (hasOneOf) {
|
||||
const flattened = tryFlattenLiteralAnyOf(obj.oneOf as unknown[]);
|
||||
if (flattened) {
|
||||
const result: Record<string, unknown> = {
|
||||
type: flattened.type,
|
||||
enum: flattened.enum,
|
||||
};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) {
|
||||
result[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
const cleaned: Record<string, unknown> = {};
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
// Skip unsupported schema features for Gemini:
|
||||
// - patternProperties: not in OpenAPI 3.0 subset
|
||||
// - const: convert to enum with single value instead
|
||||
if (key === "patternProperties") {
|
||||
// Gemini doesn't support patternProperties - skip it
|
||||
// Skip keywords that Cloud Code Assist API doesn't support
|
||||
if (UNSUPPORTED_SCHEMA_KEYWORDS.has(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -238,8 +362,8 @@ function cleanSchemaForGemini(schema: unknown): unknown {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip 'type' if we have 'anyOf' — Gemini doesn't allow both
|
||||
if (key === "type" && hasAnyOf) {
|
||||
// Skip 'type' if we have 'anyOf' or 'oneOf' — Gemini doesn't allow both
|
||||
if (key === "type" && (hasAnyOf || hasOneOf)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -247,27 +371,29 @@ function cleanSchemaForGemini(schema: unknown): unknown {
|
||||
// Recursively clean nested properties
|
||||
const props = value as Record<string, unknown>;
|
||||
cleaned[key] = Object.fromEntries(
|
||||
Object.entries(props).map(([k, v]) => [k, cleanSchemaForGemini(v)]),
|
||||
Object.entries(props).map(([k, v]) => [
|
||||
k,
|
||||
cleanSchemaForGeminiWithDefs(v, nextDefs, refStack),
|
||||
]),
|
||||
);
|
||||
} else if (key === "items" && value && typeof value === "object") {
|
||||
// Recursively clean array items schema
|
||||
cleaned[key] = cleanSchemaForGemini(value);
|
||||
cleaned[key] = cleanSchemaForGeminiWithDefs(value, nextDefs, refStack);
|
||||
} else if (key === "anyOf" && Array.isArray(value)) {
|
||||
// Clean each anyOf variant
|
||||
cleaned[key] = value.map((variant) => cleanSchemaForGemini(variant));
|
||||
cleaned[key] = value.map((variant) =>
|
||||
cleanSchemaForGeminiWithDefs(variant, nextDefs, refStack),
|
||||
);
|
||||
} else if (key === "oneOf" && Array.isArray(value)) {
|
||||
// Clean each oneOf variant
|
||||
cleaned[key] = value.map((variant) => cleanSchemaForGemini(variant));
|
||||
cleaned[key] = value.map((variant) =>
|
||||
cleanSchemaForGeminiWithDefs(variant, nextDefs, refStack),
|
||||
);
|
||||
} else if (key === "allOf" && Array.isArray(value)) {
|
||||
// Clean each allOf variant
|
||||
cleaned[key] = value.map((variant) => cleanSchemaForGemini(variant));
|
||||
} else if (
|
||||
key === "additionalProperties" &&
|
||||
value &&
|
||||
typeof value === "object"
|
||||
) {
|
||||
// Recursively clean additionalProperties schema
|
||||
cleaned[key] = cleanSchemaForGemini(value);
|
||||
cleaned[key] = value.map((variant) =>
|
||||
cleanSchemaForGeminiWithDefs(variant, nextDefs, refStack),
|
||||
);
|
||||
} else {
|
||||
cleaned[key] = value;
|
||||
}
|
||||
@@ -276,6 +402,18 @@ function cleanSchemaForGemini(schema: unknown): unknown {
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
function cleanSchemaForGemini(schema: unknown): unknown {
|
||||
if (!schema || typeof schema !== "object") return schema;
|
||||
if (Array.isArray(schema)) return schema.map(cleanSchemaForGemini);
|
||||
|
||||
const defs = extendSchemaDefs(undefined, schema as Record<string, unknown>);
|
||||
return cleanSchemaForGeminiWithDefs(schema, defs, undefined);
|
||||
}
|
||||
|
||||
function cleanToolSchemaForGemini(schema: Record<string, unknown>): unknown {
|
||||
return cleanSchemaForGemini(schema);
|
||||
}
|
||||
|
||||
function normalizeToolParameters(tool: AnyAgentTool): AnyAgentTool {
|
||||
const schema =
|
||||
tool.parameters && typeof tool.parameters === "object"
|
||||
@@ -613,6 +751,10 @@ function createClawdbotReadTool(base: AnyAgentTool): AnyAgentTool {
|
||||
};
|
||||
}
|
||||
|
||||
export const __testing = {
|
||||
cleanToolSchemaForGemini,
|
||||
} as const;
|
||||
|
||||
export function createClawdbotCodingTools(options?: {
|
||||
bash?: BashToolDefaults & ProcessToolDefaults;
|
||||
messageProvider?: string;
|
||||
|
||||
Reference in New Issue
Block a user