feat(memory): add lifecycle hooks and vector memory plugin
Add plugin lifecycle hooks infrastructure: - before_agent_start: inject context before agent loop - agent_end: analyze conversation after completion - 13 hook types total (message, tool, session, gateway hooks) Memory plugin implementation: - LanceDB vector storage with OpenAI embeddings - kind: "memory" to integrate with upstream slot system - Auto-recall: injects <relevant-memories> when context found - Auto-capture: stores preferences, decisions, entities - Rule-based capture filtering with 0.95 similarity dedup - Tools: memory_recall, memory_store, memory_forget - CLI: clawdbot ltm list|search|stats Plugin infrastructure: - api.on() method for hook registration - Global hook runner singleton for cross-module access - Priority ordering and error catching Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
282
extensions/memory/index.test.ts
Normal file
282
extensions/memory/index.test.ts
Normal file
@@ -0,0 +1,282 @@
|
||||
/**
|
||||
* Memory Plugin E2E Tests
|
||||
*
|
||||
* Tests the memory plugin functionality including:
|
||||
* - Plugin registration and configuration
|
||||
* - Memory storage and retrieval
|
||||
* - Auto-recall via hooks
|
||||
* - Auto-capture filtering
|
||||
*/
|
||||
|
||||
import { describe, test, expect, beforeEach, afterEach } from "vitest";
|
||||
import { randomUUID } from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import os from "node:os";
|
||||
|
||||
// Skip if no OpenAI API key
|
||||
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||
const describeWithKey = OPENAI_API_KEY ? describe : describe.skip;
|
||||
|
||||
describeWithKey("memory plugin e2e", () => {
|
||||
let tmpDir: string;
|
||||
let dbPath: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-memory-test-"));
|
||||
dbPath = path.join(tmpDir, "lancedb");
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (tmpDir) {
|
||||
await fs.rm(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("memory plugin registers and initializes correctly", async () => {
|
||||
// Dynamic import to avoid loading LanceDB when not testing
|
||||
const { default: memoryPlugin } = await import("./index.js");
|
||||
|
||||
expect(memoryPlugin.id).toBe("memory");
|
||||
expect(memoryPlugin.name).toBe("Memory (Vector)");
|
||||
expect(memoryPlugin.kind).toBe("memory");
|
||||
expect(memoryPlugin.configSchema).toBeDefined();
|
||||
expect(memoryPlugin.register).toBeInstanceOf(Function);
|
||||
});
|
||||
|
||||
test("config schema parses valid config", async () => {
|
||||
const { default: memoryPlugin } = await import("./index.js");
|
||||
|
||||
const config = memoryPlugin.configSchema?.parse?.({
|
||||
embedding: {
|
||||
apiKey: OPENAI_API_KEY,
|
||||
model: "text-embedding-3-small",
|
||||
},
|
||||
dbPath,
|
||||
autoCapture: true,
|
||||
autoRecall: true,
|
||||
});
|
||||
|
||||
expect(config).toBeDefined();
|
||||
expect(config?.embedding?.apiKey).toBe(OPENAI_API_KEY);
|
||||
expect(config?.dbPath).toBe(dbPath);
|
||||
});
|
||||
|
||||
test("config schema resolves env vars", async () => {
|
||||
const { default: memoryPlugin } = await import("./index.js");
|
||||
|
||||
// Set a test env var
|
||||
process.env.TEST_MEMORY_API_KEY = "test-key-123";
|
||||
|
||||
const config = memoryPlugin.configSchema?.parse?.({
|
||||
embedding: {
|
||||
apiKey: "${TEST_MEMORY_API_KEY}",
|
||||
},
|
||||
dbPath,
|
||||
});
|
||||
|
||||
expect(config?.embedding?.apiKey).toBe("test-key-123");
|
||||
|
||||
delete process.env.TEST_MEMORY_API_KEY;
|
||||
});
|
||||
|
||||
test("config schema rejects missing apiKey", async () => {
|
||||
const { default: memoryPlugin } = await import("./index.js");
|
||||
|
||||
expect(() => {
|
||||
memoryPlugin.configSchema?.parse?.({
|
||||
embedding: {},
|
||||
dbPath,
|
||||
});
|
||||
}).toThrow("embedding.apiKey is required");
|
||||
});
|
||||
|
||||
test("shouldCapture filters correctly", async () => {
|
||||
// Test the capture filtering logic by checking the rules
|
||||
const triggers = [
|
||||
{ text: "I prefer dark mode", shouldMatch: true },
|
||||
{ text: "Remember that my name is John", shouldMatch: true },
|
||||
{ text: "My email is test@example.com", shouldMatch: true },
|
||||
{ text: "Call me at +1234567890123", shouldMatch: true },
|
||||
{ text: "We decided to use TypeScript", shouldMatch: true },
|
||||
{ text: "I always want verbose output", shouldMatch: true },
|
||||
{ text: "Just a random short message", shouldMatch: false },
|
||||
{ text: "x", shouldMatch: false }, // Too short
|
||||
{ text: "<relevant-memories>injected</relevant-memories>", shouldMatch: false }, // Skip injected
|
||||
];
|
||||
|
||||
// The shouldCapture function is internal, but we can test via the capture behavior
|
||||
// For now, just verify the patterns we expect to match
|
||||
for (const { text, shouldMatch } of triggers) {
|
||||
const hasPreference = /prefer|radši|like|love|hate|want/i.test(text);
|
||||
const hasRemember = /zapamatuj|pamatuj|remember/i.test(text);
|
||||
const hasEmail = /[\w.-]+@[\w.-]+\.\w+/.test(text);
|
||||
const hasPhone = /\+\d{10,}/.test(text);
|
||||
const hasDecision = /rozhodli|decided|will use|budeme/i.test(text);
|
||||
const hasAlways = /always|never|important/i.test(text);
|
||||
const isInjected = text.includes("<relevant-memories>");
|
||||
const isTooShort = text.length < 10;
|
||||
|
||||
const wouldCapture =
|
||||
!isTooShort &&
|
||||
!isInjected &&
|
||||
(hasPreference || hasRemember || hasEmail || hasPhone || hasDecision || hasAlways);
|
||||
|
||||
if (shouldMatch) {
|
||||
expect(wouldCapture).toBe(true);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("detectCategory classifies correctly", async () => {
|
||||
// Test category detection patterns
|
||||
const cases = [
|
||||
{ text: "I prefer dark mode", expected: "preference" },
|
||||
{ text: "We decided to use React", expected: "decision" },
|
||||
{ text: "My email is test@example.com", expected: "entity" },
|
||||
{ text: "The server is running on port 3000", expected: "fact" },
|
||||
];
|
||||
|
||||
for (const { text, expected } of cases) {
|
||||
const lower = text.toLowerCase();
|
||||
let category: string;
|
||||
|
||||
if (/prefer|radši|like|love|hate|want/i.test(lower)) {
|
||||
category = "preference";
|
||||
} else if (/rozhodli|decided|will use|budeme/i.test(lower)) {
|
||||
category = "decision";
|
||||
} else if (/\+\d{10,}|@[\w.-]+\.\w+|is called|jmenuje se/i.test(lower)) {
|
||||
category = "entity";
|
||||
} else if (/is|are|has|have|je|má|jsou/i.test(lower)) {
|
||||
category = "fact";
|
||||
} else {
|
||||
category = "other";
|
||||
}
|
||||
|
||||
expect(category).toBe(expected);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Live tests that require OpenAI API key and actually use LanceDB
|
||||
describeWithKey("memory plugin live tests", () => {
|
||||
let tmpDir: string;
|
||||
let dbPath: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawdbot-memory-live-"));
|
||||
dbPath = path.join(tmpDir, "lancedb");
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (tmpDir) {
|
||||
await fs.rm(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("memory tools work end-to-end", async () => {
|
||||
const { default: memoryPlugin } = await import("./index.js");
|
||||
|
||||
// Mock plugin API
|
||||
const registeredTools: any[] = [];
|
||||
const registeredClis: any[] = [];
|
||||
const registeredServices: any[] = [];
|
||||
const registeredHooks: Record<string, any[]> = {};
|
||||
const logs: string[] = [];
|
||||
|
||||
const mockApi = {
|
||||
id: "memory",
|
||||
name: "Memory (Vector)",
|
||||
source: "test",
|
||||
config: {},
|
||||
pluginConfig: {
|
||||
embedding: {
|
||||
apiKey: OPENAI_API_KEY,
|
||||
model: "text-embedding-3-small",
|
||||
},
|
||||
dbPath,
|
||||
autoCapture: false,
|
||||
autoRecall: false,
|
||||
},
|
||||
runtime: {},
|
||||
logger: {
|
||||
info: (msg: string) => logs.push(`[info] ${msg}`),
|
||||
warn: (msg: string) => logs.push(`[warn] ${msg}`),
|
||||
error: (msg: string) => logs.push(`[error] ${msg}`),
|
||||
debug: (msg: string) => logs.push(`[debug] ${msg}`),
|
||||
},
|
||||
registerTool: (tool: any, opts: any) => {
|
||||
registeredTools.push({ tool, opts });
|
||||
},
|
||||
registerCli: (registrar: any, opts: any) => {
|
||||
registeredClis.push({ registrar, opts });
|
||||
},
|
||||
registerService: (service: any) => {
|
||||
registeredServices.push(service);
|
||||
},
|
||||
on: (hookName: string, handler: any) => {
|
||||
if (!registeredHooks[hookName]) registeredHooks[hookName] = [];
|
||||
registeredHooks[hookName].push(handler);
|
||||
},
|
||||
resolvePath: (p: string) => p,
|
||||
};
|
||||
|
||||
// Register plugin
|
||||
await memoryPlugin.register(mockApi as any);
|
||||
|
||||
// Check registration
|
||||
expect(registeredTools.length).toBe(3);
|
||||
expect(registeredTools.map((t) => t.opts?.name)).toContain("memory_recall");
|
||||
expect(registeredTools.map((t) => t.opts?.name)).toContain("memory_store");
|
||||
expect(registeredTools.map((t) => t.opts?.name)).toContain("memory_forget");
|
||||
expect(registeredClis.length).toBe(1);
|
||||
expect(registeredServices.length).toBe(1);
|
||||
|
||||
// Get tool functions
|
||||
const storeTool = registeredTools.find((t) => t.opts?.name === "memory_store")?.tool;
|
||||
const recallTool = registeredTools.find((t) => t.opts?.name === "memory_recall")?.tool;
|
||||
const forgetTool = registeredTools.find((t) => t.opts?.name === "memory_forget")?.tool;
|
||||
|
||||
// Test store
|
||||
const storeResult = await storeTool.execute("test-call-1", {
|
||||
text: "The user prefers dark mode for all applications",
|
||||
importance: 0.8,
|
||||
category: "preference",
|
||||
});
|
||||
|
||||
expect(storeResult.details?.action).toBe("created");
|
||||
expect(storeResult.details?.id).toBeDefined();
|
||||
const storedId = storeResult.details?.id;
|
||||
|
||||
// Test recall
|
||||
const recallResult = await recallTool.execute("test-call-2", {
|
||||
query: "dark mode preference",
|
||||
limit: 5,
|
||||
});
|
||||
|
||||
expect(recallResult.details?.count).toBeGreaterThan(0);
|
||||
expect(recallResult.details?.memories?.[0]?.text).toContain("dark mode");
|
||||
|
||||
// Test duplicate detection
|
||||
const duplicateResult = await storeTool.execute("test-call-3", {
|
||||
text: "The user prefers dark mode for all applications",
|
||||
});
|
||||
|
||||
expect(duplicateResult.details?.action).toBe("duplicate");
|
||||
|
||||
// Test forget
|
||||
const forgetResult = await forgetTool.execute("test-call-4", {
|
||||
memoryId: storedId,
|
||||
});
|
||||
|
||||
expect(forgetResult.details?.action).toBe("deleted");
|
||||
|
||||
// Verify it's gone
|
||||
const recallAfterForget = await recallTool.execute("test-call-5", {
|
||||
query: "dark mode preference",
|
||||
limit: 5,
|
||||
});
|
||||
|
||||
expect(recallAfterForget.details?.count).toBe(0);
|
||||
}, 60000); // 60s timeout for live API calls
|
||||
});
|
||||
671
extensions/memory/index.ts
Normal file
671
extensions/memory/index.ts
Normal file
@@ -0,0 +1,671 @@
|
||||
/**
|
||||
* Clawdbot Memory Plugin
|
||||
*
|
||||
* Long-term memory with vector search for AI conversations.
|
||||
* Uses LanceDB for storage and OpenAI for embeddings.
|
||||
* Provides seamless auto-recall and auto-capture via lifecycle hooks.
|
||||
*/
|
||||
|
||||
import { Type } from "@sinclair/typebox";
|
||||
import * as lancedb from "@lancedb/lancedb";
|
||||
import OpenAI from "openai";
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { homedir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import type { ClawdbotPluginApi } from "clawdbot/plugin-sdk";
|
||||
|
||||
// ============================================================================
|
||||
// Types
|
||||
// ============================================================================
|
||||
|
||||
type MemoryConfig = {
|
||||
embedding: {
|
||||
provider: "openai";
|
||||
model?: string;
|
||||
apiKey: string;
|
||||
};
|
||||
dbPath?: string;
|
||||
autoCapture?: boolean;
|
||||
autoRecall?: boolean;
|
||||
};
|
||||
|
||||
type MemoryEntry = {
|
||||
id: string;
|
||||
text: string;
|
||||
vector: number[];
|
||||
importance: number;
|
||||
category: "preference" | "fact" | "decision" | "entity" | "other";
|
||||
createdAt: number;
|
||||
};
|
||||
|
||||
type MemorySearchResult = {
|
||||
entry: MemoryEntry;
|
||||
score: number;
|
||||
};
|
||||
|
||||
// ============================================================================
|
||||
// Config Schema
|
||||
// ============================================================================
|
||||
|
||||
const memoryConfigSchema = {
|
||||
parse(value: unknown): MemoryConfig {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
||||
throw new Error("memory config required");
|
||||
}
|
||||
const cfg = value as Record<string, unknown>;
|
||||
|
||||
// Embedding config is required
|
||||
const embedding = cfg.embedding as Record<string, unknown> | undefined;
|
||||
if (!embedding || typeof embedding.apiKey !== "string") {
|
||||
throw new Error("embedding.apiKey is required");
|
||||
}
|
||||
|
||||
return {
|
||||
embedding: {
|
||||
provider: "openai",
|
||||
model:
|
||||
typeof embedding.model === "string"
|
||||
? embedding.model
|
||||
: "text-embedding-3-small",
|
||||
apiKey: resolveEnvVars(embedding.apiKey),
|
||||
},
|
||||
dbPath:
|
||||
typeof cfg.dbPath === "string"
|
||||
? cfg.dbPath
|
||||
: join(homedir(), ".clawdbot", "memory", "lancedb"),
|
||||
autoCapture: cfg.autoCapture !== false,
|
||||
autoRecall: cfg.autoRecall !== false,
|
||||
};
|
||||
},
|
||||
uiHints: {
|
||||
"embedding.apiKey": {
|
||||
label: "OpenAI API Key",
|
||||
sensitive: true,
|
||||
placeholder: "sk-proj-...",
|
||||
help: "API key for OpenAI embeddings (or use ${OPENAI_API_KEY})",
|
||||
},
|
||||
"embedding.model": {
|
||||
label: "Embedding Model",
|
||||
placeholder: "text-embedding-3-small",
|
||||
help: "OpenAI embedding model to use",
|
||||
},
|
||||
dbPath: {
|
||||
label: "Database Path",
|
||||
placeholder: "~/.clawdbot/memory/lancedb",
|
||||
advanced: true,
|
||||
},
|
||||
autoCapture: {
|
||||
label: "Auto-Capture",
|
||||
help: "Automatically capture important information from conversations",
|
||||
},
|
||||
autoRecall: {
|
||||
label: "Auto-Recall",
|
||||
help: "Automatically inject relevant memories into context",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
function resolveEnvVars(value: string): string {
|
||||
return value.replace(/\$\{([^}]+)\}/g, (_, envVar) => {
|
||||
const envValue = process.env[envVar];
|
||||
if (!envValue) {
|
||||
throw new Error(`Environment variable ${envVar} is not set`);
|
||||
}
|
||||
return envValue;
|
||||
});
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// LanceDB Provider
|
||||
// ============================================================================
|
||||
|
||||
const TABLE_NAME = "memories";
|
||||
const VECTOR_DIM = 1536; // OpenAI text-embedding-3-small
|
||||
|
||||
class MemoryDB {
|
||||
private db: lancedb.Connection | null = null;
|
||||
private table: lancedb.Table | null = null;
|
||||
private initPromise: Promise<void> | null = null;
|
||||
|
||||
constructor(private readonly dbPath: string) {}
|
||||
|
||||
private async ensureInitialized(): Promise<void> {
|
||||
if (this.table) return;
|
||||
if (this.initPromise) return this.initPromise;
|
||||
|
||||
this.initPromise = this.doInitialize();
|
||||
return this.initPromise;
|
||||
}
|
||||
|
||||
private async doInitialize(): Promise<void> {
|
||||
this.db = await lancedb.connect(this.dbPath);
|
||||
const tables = await this.db.tableNames();
|
||||
|
||||
if (tables.includes(TABLE_NAME)) {
|
||||
this.table = await this.db.openTable(TABLE_NAME);
|
||||
} else {
|
||||
this.table = await this.db.createTable(TABLE_NAME, [
|
||||
{
|
||||
id: "__schema__",
|
||||
text: "",
|
||||
vector: new Array(VECTOR_DIM).fill(0),
|
||||
importance: 0,
|
||||
category: "other",
|
||||
createdAt: 0,
|
||||
},
|
||||
]);
|
||||
await this.table.delete('id = "__schema__"');
|
||||
}
|
||||
}
|
||||
|
||||
async store(
|
||||
entry: Omit<MemoryEntry, "id" | "createdAt">,
|
||||
): Promise<MemoryEntry> {
|
||||
await this.ensureInitialized();
|
||||
|
||||
const fullEntry: MemoryEntry = {
|
||||
...entry,
|
||||
id: randomUUID(),
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
|
||||
await this.table!.add([fullEntry]);
|
||||
return fullEntry;
|
||||
}
|
||||
|
||||
async search(
|
||||
vector: number[],
|
||||
limit = 5,
|
||||
minScore = 0.5,
|
||||
): Promise<MemorySearchResult[]> {
|
||||
await this.ensureInitialized();
|
||||
|
||||
const results = await this.table!.vectorSearch(vector).limit(limit).toArray();
|
||||
|
||||
// LanceDB uses L2 distance by default; convert to similarity score
|
||||
const mapped = results.map((row) => {
|
||||
const distance = row._distance ?? 0;
|
||||
// Use inverse for a 0-1 range: sim = 1 / (1 + d)
|
||||
const score = 1 / (1 + distance);
|
||||
return {
|
||||
entry: {
|
||||
id: row.id as string,
|
||||
text: row.text as string,
|
||||
vector: row.vector as number[],
|
||||
importance: row.importance as number,
|
||||
category: row.category as MemoryEntry["category"],
|
||||
createdAt: row.createdAt as number,
|
||||
},
|
||||
score,
|
||||
};
|
||||
});
|
||||
|
||||
return mapped.filter((r) => r.score >= minScore);
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<boolean> {
|
||||
await this.ensureInitialized();
|
||||
// Validate UUID format to prevent injection
|
||||
const uuidRegex =
|
||||
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
|
||||
if (!uuidRegex.test(id)) {
|
||||
throw new Error(`Invalid memory ID format: ${id}`);
|
||||
}
|
||||
await this.table!.delete(`id = '${id}'`);
|
||||
return true;
|
||||
}
|
||||
|
||||
async count(): Promise<number> {
|
||||
await this.ensureInitialized();
|
||||
return this.table!.countRows();
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// OpenAI Embeddings
|
||||
// ============================================================================
|
||||
|
||||
class Embeddings {
|
||||
private client: OpenAI;
|
||||
|
||||
constructor(
|
||||
apiKey: string,
|
||||
private model: string,
|
||||
) {
|
||||
this.client = new OpenAI({ apiKey });
|
||||
}
|
||||
|
||||
async embed(text: string): Promise<number[]> {
|
||||
const response = await this.client.embeddings.create({
|
||||
model: this.model,
|
||||
input: text,
|
||||
});
|
||||
return response.data[0].embedding;
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Rule-based capture filter
|
||||
// ============================================================================
|
||||
|
||||
const MEMORY_TRIGGERS = [
|
||||
/zapamatuj si|pamatuj|remember/i,
|
||||
/preferuji|radši|nechci|prefer/i,
|
||||
/rozhodli jsme|budeme používat/i,
|
||||
/\+\d{10,}/,
|
||||
/[\w.-]+@[\w.-]+\.\w+/,
|
||||
/můj\s+\w+\s+je|je\s+můj/i,
|
||||
/my\s+\w+\s+is|is\s+my/i,
|
||||
/i (like|prefer|hate|love|want|need)/i,
|
||||
/always|never|important/i,
|
||||
];
|
||||
|
||||
function shouldCapture(text: string): boolean {
|
||||
if (text.length < 10 || text.length > 500) return false;
|
||||
// Skip injected context from memory recall
|
||||
if (text.includes("<relevant-memories>")) return false;
|
||||
// Skip system-generated content
|
||||
if (text.startsWith("<") && text.includes("</")) return false;
|
||||
// Skip agent summary responses (contain markdown formatting)
|
||||
if (text.includes("**") && text.includes("\n-")) return false;
|
||||
// Skip emoji-heavy responses (likely agent output)
|
||||
const emojiCount = (text.match(/[\u{1F300}-\u{1F9FF}]/gu) || []).length;
|
||||
if (emojiCount > 3) return false;
|
||||
return MEMORY_TRIGGERS.some((r) => r.test(text));
|
||||
}
|
||||
|
||||
function detectCategory(
|
||||
text: string,
|
||||
): "preference" | "fact" | "decision" | "entity" | "other" {
|
||||
const lower = text.toLowerCase();
|
||||
if (/prefer|radši|like|love|hate|want/i.test(lower)) return "preference";
|
||||
if (/rozhodli|decided|will use|budeme/i.test(lower)) return "decision";
|
||||
if (/\+\d{10,}|@[\w.-]+\.\w+|is called|jmenuje se/i.test(lower))
|
||||
return "entity";
|
||||
if (/is|are|has|have|je|má|jsou/i.test(lower)) return "fact";
|
||||
return "other";
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Plugin Definition
|
||||
// ============================================================================
|
||||
|
||||
const memoryPlugin = {
|
||||
id: "memory",
|
||||
name: "Memory (Vector)",
|
||||
description: "Long-term memory with vector search and seamless auto-recall/capture",
|
||||
kind: "memory" as const,
|
||||
configSchema: memoryConfigSchema,
|
||||
|
||||
register(api: ClawdbotPluginApi) {
|
||||
const cfg = memoryConfigSchema.parse(api.pluginConfig);
|
||||
const db = new MemoryDB(cfg.dbPath!);
|
||||
const embeddings = new Embeddings(cfg.embedding.apiKey, cfg.embedding.model!);
|
||||
|
||||
api.logger.info(`memory: plugin registered (db: ${cfg.dbPath}, lazy init)`);
|
||||
|
||||
// ========================================================================
|
||||
// Tools
|
||||
// ========================================================================
|
||||
|
||||
api.registerTool(
|
||||
{
|
||||
name: "memory_recall",
|
||||
label: "Memory Recall",
|
||||
description:
|
||||
"Search through long-term memories. Use when you need context about user preferences, past decisions, or previously discussed topics.",
|
||||
parameters: Type.Object({
|
||||
query: Type.String({ description: "Search query" }),
|
||||
limit: Type.Optional(Type.Number({ description: "Max results (default: 5)" })),
|
||||
}),
|
||||
async execute(_toolCallId, params) {
|
||||
const { query, limit = 5 } = params as { query: string; limit?: number };
|
||||
|
||||
const vector = await embeddings.embed(query);
|
||||
const results = await db.search(vector, limit, 0.1);
|
||||
|
||||
if (results.length === 0) {
|
||||
return {
|
||||
content: [{ type: "text", text: "No relevant memories found." }],
|
||||
details: { count: 0 },
|
||||
};
|
||||
}
|
||||
|
||||
const text = results
|
||||
.map(
|
||||
(r, i) =>
|
||||
`${i + 1}. [${r.entry.category}] ${r.entry.text} (${(r.score * 100).toFixed(0)}%)`,
|
||||
)
|
||||
.join("\n");
|
||||
|
||||
// Strip vector data for serialization (typed arrays can't be cloned)
|
||||
const sanitizedResults = results.map((r) => ({
|
||||
id: r.entry.id,
|
||||
text: r.entry.text,
|
||||
category: r.entry.category,
|
||||
importance: r.entry.importance,
|
||||
score: r.score,
|
||||
}));
|
||||
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Found ${results.length} memories:\n\n${text}` },
|
||||
],
|
||||
details: { count: results.length, memories: sanitizedResults },
|
||||
};
|
||||
},
|
||||
},
|
||||
{ name: "memory_recall" },
|
||||
);
|
||||
|
||||
api.registerTool(
|
||||
{
|
||||
name: "memory_store",
|
||||
label: "Memory Store",
|
||||
description:
|
||||
"Save important information in long-term memory. Use for preferences, facts, decisions.",
|
||||
parameters: Type.Object({
|
||||
text: Type.String({ description: "Information to remember" }),
|
||||
importance: Type.Optional(
|
||||
Type.Number({ description: "Importance 0-1 (default: 0.7)" }),
|
||||
),
|
||||
category: Type.Optional(
|
||||
Type.Union([
|
||||
Type.Literal("preference"),
|
||||
Type.Literal("fact"),
|
||||
Type.Literal("decision"),
|
||||
Type.Literal("entity"),
|
||||
Type.Literal("other"),
|
||||
]),
|
||||
),
|
||||
}),
|
||||
async execute(_toolCallId, params) {
|
||||
const {
|
||||
text,
|
||||
importance = 0.7,
|
||||
category = "other",
|
||||
} = params as {
|
||||
text: string;
|
||||
importance?: number;
|
||||
category?: MemoryEntry["category"];
|
||||
};
|
||||
|
||||
const vector = await embeddings.embed(text);
|
||||
|
||||
// Check for duplicates
|
||||
const existing = await db.search(vector, 1, 0.95);
|
||||
if (existing.length > 0) {
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Similar memory already exists: "${existing[0].entry.text}"` },
|
||||
],
|
||||
details: { action: "duplicate", existingId: existing[0].entry.id, existingText: existing[0].entry.text },
|
||||
};
|
||||
}
|
||||
|
||||
const entry = await db.store({
|
||||
text,
|
||||
vector,
|
||||
importance,
|
||||
category,
|
||||
});
|
||||
|
||||
return {
|
||||
content: [{ type: "text", text: `Stored: "${text.slice(0, 100)}..."` }],
|
||||
details: { action: "created", id: entry.id },
|
||||
};
|
||||
},
|
||||
},
|
||||
{ name: "memory_store" },
|
||||
);
|
||||
|
||||
api.registerTool(
|
||||
{
|
||||
name: "memory_forget",
|
||||
label: "Memory Forget",
|
||||
description: "Delete specific memories. GDPR-compliant.",
|
||||
parameters: Type.Object({
|
||||
query: Type.Optional(Type.String({ description: "Search to find memory" })),
|
||||
memoryId: Type.Optional(Type.String({ description: "Specific memory ID" })),
|
||||
}),
|
||||
async execute(_toolCallId, params) {
|
||||
const { query, memoryId } = params as { query?: string; memoryId?: string };
|
||||
|
||||
if (memoryId) {
|
||||
await db.delete(memoryId);
|
||||
return {
|
||||
content: [{ type: "text", text: `Memory ${memoryId} forgotten.` }],
|
||||
details: { action: "deleted", id: memoryId },
|
||||
};
|
||||
}
|
||||
|
||||
if (query) {
|
||||
const vector = await embeddings.embed(query);
|
||||
const results = await db.search(vector, 5, 0.7);
|
||||
|
||||
if (results.length === 0) {
|
||||
return {
|
||||
content: [{ type: "text", text: "No matching memories found." }],
|
||||
details: { found: 0 },
|
||||
};
|
||||
}
|
||||
|
||||
if (results.length === 1 && results[0].score > 0.9) {
|
||||
await db.delete(results[0].entry.id);
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Forgotten: "${results[0].entry.text}"` },
|
||||
],
|
||||
details: { action: "deleted", id: results[0].entry.id },
|
||||
};
|
||||
}
|
||||
|
||||
const list = results
|
||||
.map((r) => `- [${r.entry.id.slice(0, 8)}] ${r.entry.text.slice(0, 60)}...`)
|
||||
.join("\n");
|
||||
|
||||
// Strip vector data for serialization
|
||||
const sanitizedCandidates = results.map((r) => ({
|
||||
id: r.entry.id,
|
||||
text: r.entry.text,
|
||||
category: r.entry.category,
|
||||
score: r.score,
|
||||
}));
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Found ${results.length} candidates. Specify memoryId:\n${list}`,
|
||||
},
|
||||
],
|
||||
details: { action: "candidates", candidates: sanitizedCandidates },
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{ type: "text", text: "Provide query or memoryId." }],
|
||||
details: { error: "missing_param" },
|
||||
};
|
||||
},
|
||||
},
|
||||
{ name: "memory_forget" },
|
||||
);
|
||||
|
||||
// ========================================================================
|
||||
// CLI Commands
|
||||
// ========================================================================
|
||||
|
||||
api.registerCli(
|
||||
({ program }) => {
|
||||
const memory = program
|
||||
.command("ltm")
|
||||
.description("Long-term memory plugin commands");
|
||||
|
||||
memory
|
||||
.command("list")
|
||||
.description("List memories")
|
||||
.action(async () => {
|
||||
const count = await db.count();
|
||||
console.log(`Total memories: ${count}`);
|
||||
});
|
||||
|
||||
memory
|
||||
.command("search")
|
||||
.description("Search memories")
|
||||
.argument("<query>", "Search query")
|
||||
.option("--limit <n>", "Max results", "5")
|
||||
.action(async (query, opts) => {
|
||||
const vector = await embeddings.embed(query);
|
||||
const results = await db.search(vector, parseInt(opts.limit), 0.3);
|
||||
// Strip vectors for output
|
||||
const output = results.map((r) => ({
|
||||
id: r.entry.id,
|
||||
text: r.entry.text,
|
||||
category: r.entry.category,
|
||||
importance: r.entry.importance,
|
||||
score: r.score,
|
||||
}));
|
||||
console.log(JSON.stringify(output, null, 2));
|
||||
});
|
||||
|
||||
memory
|
||||
.command("stats")
|
||||
.description("Show memory statistics")
|
||||
.action(async () => {
|
||||
const count = await db.count();
|
||||
console.log(`Total memories: ${count}`);
|
||||
});
|
||||
},
|
||||
{ commands: ["ltm"] },
|
||||
);
|
||||
|
||||
// ========================================================================
|
||||
// Lifecycle Hooks
|
||||
// ========================================================================
|
||||
|
||||
// Auto-recall: inject relevant memories before agent starts
|
||||
if (cfg.autoRecall) {
|
||||
api.on("before_agent_start", async (event) => {
|
||||
if (!event.prompt || event.prompt.length < 5) return;
|
||||
|
||||
try {
|
||||
const vector = await embeddings.embed(event.prompt);
|
||||
const results = await db.search(vector, 3, 0.3);
|
||||
|
||||
if (results.length === 0) return;
|
||||
|
||||
const memoryContext = results
|
||||
.map((r) => `- [${r.entry.category}] ${r.entry.text}`)
|
||||
.join("\n");
|
||||
|
||||
api.logger.info?.(
|
||||
`memory: injecting ${results.length} memories into context`,
|
||||
);
|
||||
|
||||
return {
|
||||
prependContext: `<relevant-memories>\nThe following memories may be relevant to this conversation:\n${memoryContext}\n</relevant-memories>`,
|
||||
};
|
||||
} catch (err) {
|
||||
api.logger.warn(`memory: recall failed: ${String(err)}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Auto-capture: analyze and store important information after agent ends
|
||||
if (cfg.autoCapture) {
|
||||
api.on("agent_end", async (event) => {
|
||||
if (!event.success || !event.messages || event.messages.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Extract text content from messages (handling unknown[] type)
|
||||
const texts: string[] = [];
|
||||
for (const msg of event.messages) {
|
||||
// Type guard for message object
|
||||
if (!msg || typeof msg !== "object") continue;
|
||||
const msgObj = msg as Record<string, unknown>;
|
||||
|
||||
// Only process user and assistant messages
|
||||
const role = msgObj.role;
|
||||
if (role !== "user" && role !== "assistant") continue;
|
||||
|
||||
const content = msgObj.content;
|
||||
|
||||
// Handle string content directly
|
||||
if (typeof content === "string") {
|
||||
texts.push(content);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle array content (content blocks)
|
||||
if (Array.isArray(content)) {
|
||||
for (const block of content) {
|
||||
if (
|
||||
block &&
|
||||
typeof block === "object" &&
|
||||
"type" in block &&
|
||||
(block as Record<string, unknown>).type === "text" &&
|
||||
"text" in block &&
|
||||
typeof (block as Record<string, unknown>).text === "string"
|
||||
) {
|
||||
texts.push((block as Record<string, unknown>).text as string);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Filter for capturable content
|
||||
const toCapture = texts.filter(
|
||||
(text) => text && shouldCapture(text),
|
||||
);
|
||||
if (toCapture.length === 0) return;
|
||||
|
||||
// Store each capturable piece (limit to 3 per conversation)
|
||||
let stored = 0;
|
||||
for (const text of toCapture.slice(0, 3)) {
|
||||
const category = detectCategory(text);
|
||||
const vector = await embeddings.embed(text);
|
||||
|
||||
// Check for duplicates (high similarity threshold)
|
||||
const existing = await db.search(vector, 1, 0.95);
|
||||
if (existing.length > 0) continue;
|
||||
|
||||
await db.store({
|
||||
text,
|
||||
vector,
|
||||
importance: 0.7,
|
||||
category,
|
||||
});
|
||||
stored++;
|
||||
}
|
||||
|
||||
if (stored > 0) {
|
||||
api.logger.info(`memory: auto-captured ${stored} memories`);
|
||||
}
|
||||
} catch (err) {
|
||||
api.logger.warn(`memory: capture failed: ${String(err)}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// Service
|
||||
// ========================================================================
|
||||
|
||||
api.registerService({
|
||||
id: "memory",
|
||||
start: () => {
|
||||
api.logger.info(
|
||||
`memory: initialized (db: ${cfg.dbPath}, model: ${cfg.embedding.model})`,
|
||||
);
|
||||
},
|
||||
stop: () => {
|
||||
api.logger.info("memory: stopped");
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
export default memoryPlugin;
|
||||
14
extensions/memory/package.json
Normal file
14
extensions/memory/package.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "@clawdbot/memory",
|
||||
"version": "0.0.1",
|
||||
"type": "module",
|
||||
"description": "Clawdbot long-term memory plugin with vector search and seamless auto-recall/capture",
|
||||
"dependencies": {
|
||||
"@sinclair/typebox": "0.34.47",
|
||||
"@lancedb/lancedb": "^0.15.0",
|
||||
"openai": "^4.77.0"
|
||||
},
|
||||
"clawdbot": {
|
||||
"extensions": ["./index.ts"]
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user