refactor: consolidate schema scrub + test harness
This commit is contained in:
@@ -24,6 +24,7 @@ import {
|
||||
import { createClawdbotTools } from "./clawdbot-tools.js";
|
||||
import type { SandboxContext, SandboxToolPolicy } from "./sandbox.js";
|
||||
import { assertSandboxPath } from "./sandbox-paths.js";
|
||||
import { cleanSchemaForGemini } from "./schema/clean-for-gemini.js";
|
||||
import { sanitizeToolResultImages } from "./tool-images.js";
|
||||
|
||||
// NOTE(steipete): Upstream read now does file-magic MIME detection; we keep the wrapper
|
||||
@@ -154,266 +155,6 @@ function mergePropertySchemas(existing: unknown, incoming: unknown): unknown {
|
||||
return existing;
|
||||
}
|
||||
|
||||
// Check if an anyOf array contains only literal values that can be flattened
|
||||
// TypeBox Type.Literal generates { const: "value", type: "string" }
|
||||
// Some schemas may use { enum: ["value"], type: "string" }
|
||||
// Both patterns are flattened to { type: "string", enum: ["a", "b", ...] }
|
||||
function tryFlattenLiteralAnyOf(
|
||||
anyOf: unknown[],
|
||||
): { type: string; enum: unknown[] } | null {
|
||||
if (anyOf.length === 0) return null;
|
||||
|
||||
const allValues: unknown[] = [];
|
||||
let commonType: string | null = null;
|
||||
|
||||
for (const variant of anyOf) {
|
||||
if (!variant || typeof variant !== "object") return null;
|
||||
const v = variant as Record<string, unknown>;
|
||||
|
||||
// Extract the literal value - either from const or single-element enum
|
||||
let literalValue: unknown;
|
||||
if ("const" in v) {
|
||||
literalValue = v.const;
|
||||
} else if (Array.isArray(v.enum) && v.enum.length === 1) {
|
||||
literalValue = v.enum[0];
|
||||
} else {
|
||||
return null; // Not a literal pattern
|
||||
}
|
||||
|
||||
// Must have consistent type (usually "string")
|
||||
const variantType = typeof v.type === "string" ? v.type : null;
|
||||
if (!variantType) return null;
|
||||
if (commonType === null) commonType = variantType;
|
||||
else if (commonType !== variantType) return null;
|
||||
|
||||
allValues.push(literalValue);
|
||||
}
|
||||
|
||||
if (commonType && allValues.length > 0) {
|
||||
return { type: commonType, enum: allValues };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Keywords that Cloud Code Assist API rejects (not compliant with their JSON Schema subset)
|
||||
const UNSUPPORTED_SCHEMA_KEYWORDS = new Set([
|
||||
"patternProperties",
|
||||
"additionalProperties",
|
||||
"$schema",
|
||||
"$id",
|
||||
"$ref",
|
||||
"$defs",
|
||||
"definitions",
|
||||
]);
|
||||
|
||||
type SchemaDefs = Map<string, unknown>;
|
||||
|
||||
function extendSchemaDefs(
|
||||
defs: SchemaDefs | undefined,
|
||||
schema: Record<string, unknown>,
|
||||
): SchemaDefs | undefined {
|
||||
const defsEntry =
|
||||
schema.$defs &&
|
||||
typeof schema.$defs === "object" &&
|
||||
!Array.isArray(schema.$defs)
|
||||
? (schema.$defs as Record<string, unknown>)
|
||||
: undefined;
|
||||
const legacyDefsEntry =
|
||||
schema.definitions &&
|
||||
typeof schema.definitions === "object" &&
|
||||
!Array.isArray(schema.definitions)
|
||||
? (schema.definitions as Record<string, unknown>)
|
||||
: undefined;
|
||||
|
||||
if (!defsEntry && !legacyDefsEntry) return defs;
|
||||
|
||||
const next = defs ? new Map(defs) : new Map<string, unknown>();
|
||||
if (defsEntry) {
|
||||
for (const [key, value] of Object.entries(defsEntry)) next.set(key, value);
|
||||
}
|
||||
if (legacyDefsEntry) {
|
||||
for (const [key, value] of Object.entries(legacyDefsEntry))
|
||||
next.set(key, value);
|
||||
}
|
||||
return next;
|
||||
}
|
||||
|
||||
function decodeJsonPointerSegment(segment: string): string {
|
||||
return segment.replaceAll("~1", "/").replaceAll("~0", "~");
|
||||
}
|
||||
|
||||
function tryResolveLocalRef(
|
||||
ref: string,
|
||||
defs: SchemaDefs | undefined,
|
||||
): unknown | undefined {
|
||||
if (!defs) return undefined;
|
||||
const match = ref.match(/^#\/(?:\$defs|definitions)\/(.+)$/);
|
||||
if (!match) return undefined;
|
||||
const name = decodeJsonPointerSegment(match[1] ?? "");
|
||||
if (!name) return undefined;
|
||||
return defs.get(name);
|
||||
}
|
||||
|
||||
function cleanSchemaForGeminiWithDefs(
|
||||
schema: unknown,
|
||||
defs: SchemaDefs | undefined,
|
||||
refStack: Set<string> | undefined,
|
||||
): unknown {
|
||||
if (!schema || typeof schema !== "object") return schema;
|
||||
if (Array.isArray(schema)) {
|
||||
return schema.map((item) =>
|
||||
cleanSchemaForGeminiWithDefs(item, defs, refStack),
|
||||
);
|
||||
}
|
||||
|
||||
const obj = schema as Record<string, unknown>;
|
||||
const nextDefs = extendSchemaDefs(defs, obj);
|
||||
|
||||
const refValue = typeof obj.$ref === "string" ? obj.$ref : undefined;
|
||||
if (refValue) {
|
||||
if (refStack?.has(refValue)) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const resolved = tryResolveLocalRef(refValue, nextDefs);
|
||||
if (resolved) {
|
||||
const nextRefStack = refStack ? new Set(refStack) : new Set<string>();
|
||||
nextRefStack.add(refValue);
|
||||
|
||||
const cleaned = cleanSchemaForGeminiWithDefs(
|
||||
resolved,
|
||||
nextDefs,
|
||||
nextRefStack,
|
||||
);
|
||||
if (!cleaned || typeof cleaned !== "object" || Array.isArray(cleaned)) {
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
const result: Record<string, unknown> = {
|
||||
...(cleaned as Record<string, unknown>),
|
||||
};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) {
|
||||
result[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) {
|
||||
result[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const hasAnyOf = "anyOf" in obj && Array.isArray(obj.anyOf);
|
||||
const hasOneOf = "oneOf" in obj && Array.isArray(obj.oneOf);
|
||||
|
||||
// Try to flatten anyOf of literals to a single enum BEFORE processing
|
||||
// This handles Type.Union([Type.Literal("a"), Type.Literal("b")]) patterns
|
||||
if (hasAnyOf) {
|
||||
const flattened = tryFlattenLiteralAnyOf(obj.anyOf as unknown[]);
|
||||
if (flattened) {
|
||||
// Return flattened enum, preserving metadata (description, title, default, examples)
|
||||
const result: Record<string, unknown> = {
|
||||
type: flattened.type,
|
||||
enum: flattened.enum,
|
||||
};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) {
|
||||
result[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// Try to flatten oneOf of literals similarly
|
||||
if (hasOneOf) {
|
||||
const flattened = tryFlattenLiteralAnyOf(obj.oneOf as unknown[]);
|
||||
if (flattened) {
|
||||
const result: Record<string, unknown> = {
|
||||
type: flattened.type,
|
||||
enum: flattened.enum,
|
||||
};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) {
|
||||
result[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
const cleaned: Record<string, unknown> = {};
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
// Skip keywords that Cloud Code Assist API doesn't support
|
||||
if (UNSUPPORTED_SCHEMA_KEYWORDS.has(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert const to enum (Gemini doesn't support const)
|
||||
if (key === "const") {
|
||||
cleaned.enum = [value];
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip 'type' if we have 'anyOf' or 'oneOf' — Gemini doesn't allow both
|
||||
if (key === "type" && (hasAnyOf || hasOneOf)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (key === "properties" && value && typeof value === "object") {
|
||||
// Recursively clean nested properties
|
||||
const props = value as Record<string, unknown>;
|
||||
cleaned[key] = Object.fromEntries(
|
||||
Object.entries(props).map(([k, v]) => [
|
||||
k,
|
||||
cleanSchemaForGeminiWithDefs(v, nextDefs, refStack),
|
||||
]),
|
||||
);
|
||||
} else if (key === "items" && value && typeof value === "object") {
|
||||
// Recursively clean array items schema
|
||||
cleaned[key] = cleanSchemaForGeminiWithDefs(value, nextDefs, refStack);
|
||||
} else if (key === "anyOf" && Array.isArray(value)) {
|
||||
// Clean each anyOf variant
|
||||
cleaned[key] = value.map((variant) =>
|
||||
cleanSchemaForGeminiWithDefs(variant, nextDefs, refStack),
|
||||
);
|
||||
} else if (key === "oneOf" && Array.isArray(value)) {
|
||||
// Clean each oneOf variant
|
||||
cleaned[key] = value.map((variant) =>
|
||||
cleanSchemaForGeminiWithDefs(variant, nextDefs, refStack),
|
||||
);
|
||||
} else if (key === "allOf" && Array.isArray(value)) {
|
||||
// Clean each allOf variant
|
||||
cleaned[key] = value.map((variant) =>
|
||||
cleanSchemaForGeminiWithDefs(variant, nextDefs, refStack),
|
||||
);
|
||||
} else {
|
||||
cleaned[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
function cleanSchemaForGemini(schema: unknown): unknown {
|
||||
if (!schema || typeof schema !== "object") return schema;
|
||||
if (Array.isArray(schema)) return schema.map(cleanSchemaForGemini);
|
||||
|
||||
const defs = extendSchemaDefs(undefined, schema as Record<string, unknown>);
|
||||
return cleanSchemaForGeminiWithDefs(schema, defs, undefined);
|
||||
}
|
||||
|
||||
function cleanToolSchemaForGemini(schema: Record<string, unknown>): unknown {
|
||||
return cleanSchemaForGemini(schema);
|
||||
}
|
||||
|
||||
function normalizeToolParameters(tool: AnyAgentTool): AnyAgentTool {
|
||||
const schema =
|
||||
tool.parameters && typeof tool.parameters === "object"
|
||||
@@ -532,6 +273,10 @@ function normalizeToolParameters(tool: AnyAgentTool): AnyAgentTool {
|
||||
};
|
||||
}
|
||||
|
||||
function cleanToolSchemaForGemini(schema: Record<string, unknown>): unknown {
|
||||
return cleanSchemaForGemini(schema);
|
||||
}
|
||||
|
||||
function normalizeToolNames(list?: string[]) {
|
||||
if (!list) return [];
|
||||
return list.map((entry) => entry.trim().toLowerCase()).filter(Boolean);
|
||||
|
||||
@@ -52,11 +52,11 @@ describe("Agent-specific sandbox config", () => {
|
||||
spawnCalls.length = 0;
|
||||
});
|
||||
|
||||
it(
|
||||
"should use global sandbox config when no agent-specific config exists",
|
||||
{ timeout: 15_000 },
|
||||
async () => {
|
||||
const { resolveSandboxContext } = await import("./sandbox.js");
|
||||
it(
|
||||
"should use global sandbox config when no agent-specific config exists",
|
||||
{ timeout: 15_000 },
|
||||
async () => {
|
||||
const { resolveSandboxContext } = await import("./sandbox.js");
|
||||
|
||||
const cfg: ClawdbotConfig = {
|
||||
agents: {
|
||||
@@ -75,19 +75,19 @@ describe("Agent-specific sandbox config", () => {
|
||||
},
|
||||
};
|
||||
|
||||
const context = await resolveSandboxContext({
|
||||
config: cfg,
|
||||
sessionKey: "agent:main:main",
|
||||
workspaceDir: "/tmp/test",
|
||||
});
|
||||
const context = await resolveSandboxContext({
|
||||
config: cfg,
|
||||
sessionKey: "agent:main:main",
|
||||
workspaceDir: "/tmp/test",
|
||||
});
|
||||
|
||||
expect(context).toBeDefined();
|
||||
expect(context?.enabled).toBe(true);
|
||||
},
|
||||
);
|
||||
expect(context).toBeDefined();
|
||||
expect(context?.enabled).toBe(true);
|
||||
},
|
||||
);
|
||||
|
||||
it("should allow agent-specific docker setupCommand overrides", async () => {
|
||||
const { resolveSandboxContext } = await import("./sandbox.js");
|
||||
it("should allow agent-specific docker setupCommand overrides", async () => {
|
||||
const { resolveSandboxContext } = await import("./sandbox.js");
|
||||
|
||||
const cfg: ClawdbotConfig = {
|
||||
agents: {
|
||||
|
||||
229
src/agents/schema/clean-for-gemini.ts
Normal file
229
src/agents/schema/clean-for-gemini.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
// Cloud Code Assist API rejects a subset of JSON Schema keywords.
|
||||
// This module scrubs/normalizes tool schemas to keep Gemini happy.
|
||||
|
||||
// Keywords that Cloud Code Assist API rejects (not compliant with their JSON Schema subset)
|
||||
const UNSUPPORTED_SCHEMA_KEYWORDS = new Set([
|
||||
"patternProperties",
|
||||
"additionalProperties",
|
||||
"$schema",
|
||||
"$id",
|
||||
"$ref",
|
||||
"$defs",
|
||||
"definitions",
|
||||
]);
|
||||
|
||||
// Check if an anyOf/oneOf array contains only literal values that can be flattened.
|
||||
// TypeBox Type.Literal generates { const: "value", type: "string" }.
|
||||
// Some schemas may use { enum: ["value"], type: "string" }.
|
||||
// Both patterns are flattened to { type: "string", enum: ["a", "b", ...] }.
|
||||
function tryFlattenLiteralAnyOf(
|
||||
variants: unknown[],
|
||||
): { type: string; enum: unknown[] } | null {
|
||||
if (variants.length === 0) return null;
|
||||
|
||||
const allValues: unknown[] = [];
|
||||
let commonType: string | null = null;
|
||||
|
||||
for (const variant of variants) {
|
||||
if (!variant || typeof variant !== "object") return null;
|
||||
const v = variant as Record<string, unknown>;
|
||||
|
||||
let literalValue: unknown;
|
||||
if ("const" in v) {
|
||||
literalValue = v.const;
|
||||
} else if (Array.isArray(v.enum) && v.enum.length === 1) {
|
||||
literalValue = v.enum[0];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
const variantType = typeof v.type === "string" ? v.type : null;
|
||||
if (!variantType) return null;
|
||||
if (commonType === null) commonType = variantType;
|
||||
else if (commonType !== variantType) return null;
|
||||
|
||||
allValues.push(literalValue);
|
||||
}
|
||||
|
||||
if (commonType && allValues.length > 0)
|
||||
return { type: commonType, enum: allValues };
|
||||
return null;
|
||||
}
|
||||
|
||||
type SchemaDefs = Map<string, unknown>;
|
||||
|
||||
function extendSchemaDefs(
|
||||
defs: SchemaDefs | undefined,
|
||||
schema: Record<string, unknown>,
|
||||
): SchemaDefs | undefined {
|
||||
const defsEntry =
|
||||
schema.$defs &&
|
||||
typeof schema.$defs === "object" &&
|
||||
!Array.isArray(schema.$defs)
|
||||
? (schema.$defs as Record<string, unknown>)
|
||||
: undefined;
|
||||
const legacyDefsEntry =
|
||||
schema.definitions &&
|
||||
typeof schema.definitions === "object" &&
|
||||
!Array.isArray(schema.definitions)
|
||||
? (schema.definitions as Record<string, unknown>)
|
||||
: undefined;
|
||||
|
||||
if (!defsEntry && !legacyDefsEntry) return defs;
|
||||
|
||||
const next = defs ? new Map(defs) : new Map<string, unknown>();
|
||||
if (defsEntry) {
|
||||
for (const [key, value] of Object.entries(defsEntry)) next.set(key, value);
|
||||
}
|
||||
if (legacyDefsEntry) {
|
||||
for (const [key, value] of Object.entries(legacyDefsEntry))
|
||||
next.set(key, value);
|
||||
}
|
||||
return next;
|
||||
}
|
||||
|
||||
function decodeJsonPointerSegment(segment: string): string {
|
||||
return segment.replaceAll("~1", "/").replaceAll("~0", "~");
|
||||
}
|
||||
|
||||
function tryResolveLocalRef(
|
||||
ref: string,
|
||||
defs: SchemaDefs | undefined,
|
||||
): unknown {
|
||||
if (!defs) return undefined;
|
||||
const match = ref.match(/^#\/(?:\$defs|definitions)\/(.+)$/);
|
||||
if (!match) return undefined;
|
||||
const name = decodeJsonPointerSegment(match[1] ?? "");
|
||||
if (!name) return undefined;
|
||||
return defs.get(name);
|
||||
}
|
||||
|
||||
function cleanSchemaForGeminiWithDefs(
|
||||
schema: unknown,
|
||||
defs: SchemaDefs | undefined,
|
||||
refStack: Set<string> | undefined,
|
||||
): unknown {
|
||||
if (!schema || typeof schema !== "object") return schema;
|
||||
if (Array.isArray(schema)) {
|
||||
return schema.map((item) =>
|
||||
cleanSchemaForGeminiWithDefs(item, defs, refStack),
|
||||
);
|
||||
}
|
||||
|
||||
const obj = schema as Record<string, unknown>;
|
||||
const nextDefs = extendSchemaDefs(defs, obj);
|
||||
|
||||
const refValue = typeof obj.$ref === "string" ? obj.$ref : undefined;
|
||||
if (refValue) {
|
||||
if (refStack?.has(refValue)) return {};
|
||||
|
||||
const resolved = tryResolveLocalRef(refValue, nextDefs);
|
||||
if (resolved) {
|
||||
const nextRefStack = refStack ? new Set(refStack) : new Set<string>();
|
||||
nextRefStack.add(refValue);
|
||||
|
||||
const cleaned = cleanSchemaForGeminiWithDefs(
|
||||
resolved,
|
||||
nextDefs,
|
||||
nextRefStack,
|
||||
);
|
||||
if (!cleaned || typeof cleaned !== "object" || Array.isArray(cleaned)) {
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
const result: Record<string, unknown> = {
|
||||
...(cleaned as Record<string, unknown>),
|
||||
};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) result[key] = obj[key];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) result[key] = obj[key];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const hasAnyOf = "anyOf" in obj && Array.isArray(obj.anyOf);
|
||||
const hasOneOf = "oneOf" in obj && Array.isArray(obj.oneOf);
|
||||
|
||||
if (hasAnyOf) {
|
||||
const flattened = tryFlattenLiteralAnyOf(obj.anyOf as unknown[]);
|
||||
if (flattened) {
|
||||
const result: Record<string, unknown> = {
|
||||
type: flattened.type,
|
||||
enum: flattened.enum,
|
||||
};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) result[key] = obj[key];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasOneOf) {
|
||||
const flattened = tryFlattenLiteralAnyOf(obj.oneOf as unknown[]);
|
||||
if (flattened) {
|
||||
const result: Record<string, unknown> = {
|
||||
type: flattened.type,
|
||||
enum: flattened.enum,
|
||||
};
|
||||
for (const key of ["description", "title", "default", "examples"]) {
|
||||
if (key in obj && obj[key] !== undefined) result[key] = obj[key];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
const cleaned: Record<string, unknown> = {};
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (UNSUPPORTED_SCHEMA_KEYWORDS.has(key)) continue;
|
||||
|
||||
if (key === "const") {
|
||||
cleaned.enum = [value];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (key === "type" && (hasAnyOf || hasOneOf)) continue;
|
||||
|
||||
if (key === "properties" && value && typeof value === "object") {
|
||||
const props = value as Record<string, unknown>;
|
||||
cleaned[key] = Object.fromEntries(
|
||||
Object.entries(props).map(([k, v]) => [
|
||||
k,
|
||||
cleanSchemaForGeminiWithDefs(v, nextDefs, refStack),
|
||||
]),
|
||||
);
|
||||
} else if (key === "items" && value && typeof value === "object") {
|
||||
cleaned[key] = cleanSchemaForGeminiWithDefs(value, nextDefs, refStack);
|
||||
} else if (key === "anyOf" && Array.isArray(value)) {
|
||||
cleaned[key] = value.map((variant) =>
|
||||
cleanSchemaForGeminiWithDefs(variant, nextDefs, refStack),
|
||||
);
|
||||
} else if (key === "oneOf" && Array.isArray(value)) {
|
||||
cleaned[key] = value.map((variant) =>
|
||||
cleanSchemaForGeminiWithDefs(variant, nextDefs, refStack),
|
||||
);
|
||||
} else if (key === "allOf" && Array.isArray(value)) {
|
||||
cleaned[key] = value.map((variant) =>
|
||||
cleanSchemaForGeminiWithDefs(variant, nextDefs, refStack),
|
||||
);
|
||||
} else {
|
||||
cleaned[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
export function cleanSchemaForGemini(schema: unknown): unknown {
|
||||
if (!schema || typeof schema !== "object") return schema;
|
||||
if (Array.isArray(schema)) return schema.map(cleanSchemaForGemini);
|
||||
|
||||
const defs = extendSchemaDefs(undefined, schema as Record<string, unknown>);
|
||||
return cleanSchemaForGeminiWithDefs(schema, defs, undefined);
|
||||
}
|
||||
@@ -86,7 +86,10 @@ const DEV_TEMPLATE_DIR = path.resolve(
|
||||
"../../docs/reference/templates",
|
||||
);
|
||||
|
||||
async function loadDevTemplate(name: string, fallback: string): Promise<string> {
|
||||
async function loadDevTemplate(
|
||||
name: string,
|
||||
fallback: string,
|
||||
): Promise<string> {
|
||||
try {
|
||||
const raw = await fs.promises.readFile(
|
||||
path.join(DEV_TEMPLATE_DIR, name),
|
||||
@@ -525,8 +528,8 @@ async function runGatewayCommand(
|
||||
opts: GatewayRunOpts,
|
||||
params: GatewayRunParams = {},
|
||||
) {
|
||||
const isDevProfile = process.env.CLAWDBOT_PROFILE?.trim().toLowerCase() ===
|
||||
"dev";
|
||||
const isDevProfile =
|
||||
process.env.CLAWDBOT_PROFILE?.trim().toLowerCase() === "dev";
|
||||
const devMode = Boolean(opts.dev) || isDevProfile;
|
||||
if (opts.reset && !devMode) {
|
||||
defaultRuntime.error("Use --reset with --dev.");
|
||||
|
||||
@@ -276,28 +276,28 @@ describe("doctor", () => {
|
||||
exit: vi.fn(),
|
||||
};
|
||||
|
||||
migrateLegacyConfig.mockReturnValue({
|
||||
config: { whatsapp: { allowFrom: ["+15555550123"] } },
|
||||
changes: ["Moved routing.allowFrom → whatsapp.allowFrom."],
|
||||
});
|
||||
migrateLegacyConfig.mockReturnValue({
|
||||
config: { whatsapp: { allowFrom: ["+15555550123"] } },
|
||||
changes: ["Moved routing.allowFrom → whatsapp.allowFrom."],
|
||||
});
|
||||
|
||||
await doctorCommand(runtime, { nonInteractive: true });
|
||||
await doctorCommand(runtime, { nonInteractive: true });
|
||||
|
||||
expect(writeConfigFile).toHaveBeenCalledTimes(1);
|
||||
const written = writeConfigFile.mock.calls[0]?.[0] as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
expect((written.whatsapp as Record<string, unknown>)?.allowFrom).toEqual([
|
||||
"+15555550123",
|
||||
]);
|
||||
expect(written.routing).toBeUndefined();
|
||||
},
|
||||
);
|
||||
expect(writeConfigFile).toHaveBeenCalledTimes(1);
|
||||
const written = writeConfigFile.mock.calls[0]?.[0] as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
expect((written.whatsapp as Record<string, unknown>)?.allowFrom).toEqual([
|
||||
"+15555550123",
|
||||
]);
|
||||
expect(written.routing).toBeUndefined();
|
||||
},
|
||||
);
|
||||
|
||||
it("migrates legacy Clawdis services", async () => {
|
||||
readConfigFileSnapshot.mockResolvedValue({
|
||||
path: "/tmp/clawdbot.json",
|
||||
it("migrates legacy Clawdis services", async () => {
|
||||
readConfigFileSnapshot.mockResolvedValue({
|
||||
path: "/tmp/clawdbot.json",
|
||||
exists: true,
|
||||
raw: "{}",
|
||||
parsed: {},
|
||||
|
||||
@@ -1,49 +1,4 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { installTestEnv } from "./test-env";
|
||||
|
||||
import { installWindowsCIOutputSanitizer } from "./windows-ci-output-sanitizer";
|
||||
|
||||
installWindowsCIOutputSanitizer();
|
||||
|
||||
const originalHome = process.env.HOME;
|
||||
const originalUserProfile = process.env.USERPROFILE;
|
||||
const originalXdgConfigHome = process.env.XDG_CONFIG_HOME;
|
||||
const originalXdgDataHome = process.env.XDG_DATA_HOME;
|
||||
const originalXdgStateHome = process.env.XDG_STATE_HOME;
|
||||
const originalXdgCacheHome = process.env.XDG_CACHE_HOME;
|
||||
const originalStateDir = process.env.CLAWDBOT_STATE_DIR;
|
||||
const originalTestHome = process.env.CLAWDBOT_TEST_HOME;
|
||||
|
||||
const tempHome = fs.mkdtempSync(path.join(os.tmpdir(), "clawdbot-test-home-"));
|
||||
process.env.HOME = tempHome;
|
||||
process.env.USERPROFILE = tempHome;
|
||||
process.env.CLAWDBOT_TEST_HOME = tempHome;
|
||||
if (process.platform === "win32") {
|
||||
process.env.CLAWDBOT_STATE_DIR = path.join(tempHome, ".clawdbot");
|
||||
}
|
||||
process.env.XDG_CONFIG_HOME = path.join(tempHome, ".config");
|
||||
process.env.XDG_DATA_HOME = path.join(tempHome, ".local", "share");
|
||||
process.env.XDG_STATE_HOME = path.join(tempHome, ".local", "state");
|
||||
process.env.XDG_CACHE_HOME = path.join(tempHome, ".cache");
|
||||
|
||||
const restoreEnv = (key: string, value: string | undefined) => {
|
||||
if (value === undefined) delete process.env[key];
|
||||
else process.env[key] = value;
|
||||
};
|
||||
|
||||
process.on("exit", () => {
|
||||
restoreEnv("HOME", originalHome);
|
||||
restoreEnv("USERPROFILE", originalUserProfile);
|
||||
restoreEnv("XDG_CONFIG_HOME", originalXdgConfigHome);
|
||||
restoreEnv("XDG_DATA_HOME", originalXdgDataHome);
|
||||
restoreEnv("XDG_STATE_HOME", originalXdgStateHome);
|
||||
restoreEnv("XDG_CACHE_HOME", originalXdgCacheHome);
|
||||
restoreEnv("CLAWDBOT_STATE_DIR", originalStateDir);
|
||||
restoreEnv("CLAWDBOT_TEST_HOME", originalTestHome);
|
||||
try {
|
||||
fs.rmSync(tempHome, { recursive: true, force: true });
|
||||
} catch {
|
||||
// ignore cleanup errors
|
||||
}
|
||||
});
|
||||
const { cleanup } = installTestEnv();
|
||||
process.on("exit", cleanup);
|
||||
|
||||
54
test/test-env.ts
Normal file
54
test/test-env.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
|
||||
type RestoreEntry = { key: string; value: string | undefined };
|
||||
|
||||
function restoreEnv(entries: RestoreEntry[]): void {
|
||||
for (const { key, value } of entries) {
|
||||
if (value === undefined) delete process.env[key];
|
||||
else process.env[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
export function installTestEnv(): { cleanup: () => void; tempHome: string } {
|
||||
const restore: RestoreEntry[] = [
|
||||
{ key: "HOME", value: process.env.HOME },
|
||||
{ key: "USERPROFILE", value: process.env.USERPROFILE },
|
||||
{ key: "XDG_CONFIG_HOME", value: process.env.XDG_CONFIG_HOME },
|
||||
{ key: "XDG_DATA_HOME", value: process.env.XDG_DATA_HOME },
|
||||
{ key: "XDG_STATE_HOME", value: process.env.XDG_STATE_HOME },
|
||||
{ key: "XDG_CACHE_HOME", value: process.env.XDG_CACHE_HOME },
|
||||
{ key: "CLAWDBOT_STATE_DIR", value: process.env.CLAWDBOT_STATE_DIR },
|
||||
{ key: "CLAWDBOT_TEST_HOME", value: process.env.CLAWDBOT_TEST_HOME },
|
||||
];
|
||||
|
||||
const tempHome = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), "clawdbot-test-home-"),
|
||||
);
|
||||
|
||||
process.env.HOME = tempHome;
|
||||
process.env.USERPROFILE = tempHome;
|
||||
process.env.CLAWDBOT_TEST_HOME = tempHome;
|
||||
|
||||
// Windows: prefer the legacy default state dir so auth/profile tests match real paths.
|
||||
if (process.platform === "win32") {
|
||||
process.env.CLAWDBOT_STATE_DIR = path.join(tempHome, ".clawdbot");
|
||||
}
|
||||
|
||||
process.env.XDG_CONFIG_HOME = path.join(tempHome, ".config");
|
||||
process.env.XDG_DATA_HOME = path.join(tempHome, ".local", "share");
|
||||
process.env.XDG_STATE_HOME = path.join(tempHome, ".local", "state");
|
||||
process.env.XDG_CACHE_HOME = path.join(tempHome, ".cache");
|
||||
|
||||
const cleanup = () => {
|
||||
restoreEnv(restore);
|
||||
try {
|
||||
fs.rmSync(tempHome, { recursive: true, force: true });
|
||||
} catch {
|
||||
// ignore cleanup errors
|
||||
}
|
||||
};
|
||||
|
||||
return { cleanup, tempHome };
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
import { installWindowsCIOutputSanitizer } from "./windows-ci-output-sanitizer";
|
||||
|
||||
export default function globalSetup() {
|
||||
installWindowsCIOutputSanitizer();
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
|
||||
function sanitizeWindowsCIOutput(text: string): string {
|
||||
return text
|
||||
.replace(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g, "?")
|
||||
.replace(/[\uD800-\uDFFF]/g, "?");
|
||||
}
|
||||
|
||||
function decodeUtf8Text(chunk: unknown): string | null {
|
||||
if (typeof chunk === "string") return chunk;
|
||||
if (Buffer.isBuffer(chunk)) return chunk.toString("utf-8");
|
||||
if (chunk instanceof Uint8Array) return Buffer.from(chunk).toString("utf-8");
|
||||
if (chunk instanceof ArrayBuffer) return Buffer.from(chunk).toString("utf-8");
|
||||
if (ArrayBuffer.isView(chunk)) {
|
||||
return Buffer.from(
|
||||
chunk.buffer,
|
||||
chunk.byteOffset,
|
||||
chunk.byteLength,
|
||||
).toString("utf-8");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export function installWindowsCIOutputSanitizer(): void {
|
||||
if (process.platform !== "win32") return;
|
||||
if (process.env.GITHUB_ACTIONS !== "true") return;
|
||||
|
||||
const globalKey = "__clawdbotWindowsCIOutputSanitizerInstalled";
|
||||
if ((globalThis as Record<string, unknown>)[globalKey] === true) return;
|
||||
(globalThis as Record<string, unknown>)[globalKey] = true;
|
||||
|
||||
const originalStdoutWrite = process.stdout.write.bind(process.stdout);
|
||||
const originalStderrWrite = process.stderr.write.bind(process.stderr);
|
||||
|
||||
process.stdout.write = ((chunk: unknown, ...args: unknown[]) => {
|
||||
const text = decodeUtf8Text(chunk);
|
||||
if (text !== null)
|
||||
return originalStdoutWrite(sanitizeWindowsCIOutput(text), ...args);
|
||||
return originalStdoutWrite(chunk as never, ...args); // passthrough
|
||||
}) as typeof process.stdout.write;
|
||||
|
||||
process.stderr.write = ((chunk: unknown, ...args: unknown[]) => {
|
||||
const text = decodeUtf8Text(chunk);
|
||||
if (text !== null)
|
||||
return originalStderrWrite(sanitizeWindowsCIOutput(text), ...args);
|
||||
return originalStderrWrite(chunk as never, ...args); // passthrough
|
||||
}) as typeof process.stderr.write;
|
||||
|
||||
const originalWriteSync = fs.writeSync.bind(fs);
|
||||
fs.writeSync = ((fd: number, data: unknown, ...args: unknown[]) => {
|
||||
if (fd === 1 || fd === 2) {
|
||||
const text = decodeUtf8Text(data);
|
||||
if (text !== null) {
|
||||
return originalWriteSync(fd, sanitizeWindowsCIOutput(text), ...args);
|
||||
}
|
||||
}
|
||||
return originalWriteSync(fd, data as never, ...(args as never[]));
|
||||
}) as typeof fs.writeSync;
|
||||
}
|
||||
@@ -4,7 +4,6 @@ export default defineConfig({
|
||||
test: {
|
||||
include: ["src/**/*.test.ts", "test/format-error.test.ts"],
|
||||
setupFiles: ["test/setup.ts"],
|
||||
globalSetup: ["test/vitest-global-setup.ts"],
|
||||
exclude: [
|
||||
"dist/**",
|
||||
"apps/macos/**",
|
||||
|
||||
Reference in New Issue
Block a user