chore: fix formatting
This commit is contained in:
@@ -101,7 +101,14 @@ export function loadAgentIdentity(workspace: string): AgentIdentity | null {
|
||||
try {
|
||||
const content = fs.readFileSync(identityPath, "utf-8");
|
||||
const parsed = parseIdentityMarkdown(content);
|
||||
if (!parsed.name && !parsed.emoji && !parsed.theme && !parsed.creature && !parsed.vibe && !parsed.avatar) {
|
||||
if (
|
||||
!parsed.name &&
|
||||
!parsed.emoji &&
|
||||
!parsed.theme &&
|
||||
!parsed.creature &&
|
||||
!parsed.vibe &&
|
||||
!parsed.avatar
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return parsed;
|
||||
|
||||
@@ -3,92 +3,92 @@ import { Type } from "@sinclair/typebox";
|
||||
import { NonEmptyString, SessionLabelString } from "./primitives.js";
|
||||
|
||||
export const SessionsListParamsSchema = Type.Object(
|
||||
{
|
||||
limit: Type.Optional(Type.Integer({ minimum: 1 })),
|
||||
activeMinutes: Type.Optional(Type.Integer({ minimum: 1 })),
|
||||
includeGlobal: Type.Optional(Type.Boolean()),
|
||||
includeUnknown: Type.Optional(Type.Boolean()),
|
||||
/**
|
||||
* Read first 8KB of each session transcript to derive title from first user message.
|
||||
* Performs a file read per session - use `limit` to bound result set on large stores.
|
||||
*/
|
||||
includeDerivedTitles: Type.Optional(Type.Boolean()),
|
||||
/**
|
||||
* Read last 16KB of each session transcript to extract most recent message preview.
|
||||
* Performs a file read per session - use `limit` to bound result set on large stores.
|
||||
*/
|
||||
includeLastMessage: Type.Optional(Type.Boolean()),
|
||||
label: Type.Optional(SessionLabelString),
|
||||
spawnedBy: Type.Optional(NonEmptyString),
|
||||
agentId: Type.Optional(NonEmptyString),
|
||||
search: Type.Optional(Type.String()),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
{
|
||||
limit: Type.Optional(Type.Integer({ minimum: 1 })),
|
||||
activeMinutes: Type.Optional(Type.Integer({ minimum: 1 })),
|
||||
includeGlobal: Type.Optional(Type.Boolean()),
|
||||
includeUnknown: Type.Optional(Type.Boolean()),
|
||||
/**
|
||||
* Read first 8KB of each session transcript to derive title from first user message.
|
||||
* Performs a file read per session - use `limit` to bound result set on large stores.
|
||||
*/
|
||||
includeDerivedTitles: Type.Optional(Type.Boolean()),
|
||||
/**
|
||||
* Read last 16KB of each session transcript to extract most recent message preview.
|
||||
* Performs a file read per session - use `limit` to bound result set on large stores.
|
||||
*/
|
||||
includeLastMessage: Type.Optional(Type.Boolean()),
|
||||
label: Type.Optional(SessionLabelString),
|
||||
spawnedBy: Type.Optional(NonEmptyString),
|
||||
agentId: Type.Optional(NonEmptyString),
|
||||
search: Type.Optional(Type.String()),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
);
|
||||
|
||||
export const SessionsResolveParamsSchema = Type.Object(
|
||||
{
|
||||
key: Type.Optional(NonEmptyString),
|
||||
label: Type.Optional(SessionLabelString),
|
||||
agentId: Type.Optional(NonEmptyString),
|
||||
spawnedBy: Type.Optional(NonEmptyString),
|
||||
includeGlobal: Type.Optional(Type.Boolean()),
|
||||
includeUnknown: Type.Optional(Type.Boolean()),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
{
|
||||
key: Type.Optional(NonEmptyString),
|
||||
label: Type.Optional(SessionLabelString),
|
||||
agentId: Type.Optional(NonEmptyString),
|
||||
spawnedBy: Type.Optional(NonEmptyString),
|
||||
includeGlobal: Type.Optional(Type.Boolean()),
|
||||
includeUnknown: Type.Optional(Type.Boolean()),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
);
|
||||
|
||||
export const SessionsPatchParamsSchema = Type.Object(
|
||||
{
|
||||
key: NonEmptyString,
|
||||
label: Type.Optional(Type.Union([SessionLabelString, Type.Null()])),
|
||||
thinkingLevel: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
verboseLevel: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
reasoningLevel: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
responseUsage: Type.Optional(
|
||||
Type.Union([
|
||||
Type.Literal("off"),
|
||||
Type.Literal("tokens"),
|
||||
Type.Literal("full"),
|
||||
// Backward compat with older clients/stores.
|
||||
Type.Literal("on"),
|
||||
Type.Null(),
|
||||
]),
|
||||
),
|
||||
elevatedLevel: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
execHost: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
execSecurity: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
execAsk: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
execNode: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
model: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
spawnedBy: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
sendPolicy: Type.Optional(
|
||||
Type.Union([Type.Literal("allow"), Type.Literal("deny"), Type.Null()]),
|
||||
),
|
||||
groupActivation: Type.Optional(
|
||||
Type.Union([Type.Literal("mention"), Type.Literal("always"), Type.Null()]),
|
||||
),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
{
|
||||
key: NonEmptyString,
|
||||
label: Type.Optional(Type.Union([SessionLabelString, Type.Null()])),
|
||||
thinkingLevel: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
verboseLevel: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
reasoningLevel: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
responseUsage: Type.Optional(
|
||||
Type.Union([
|
||||
Type.Literal("off"),
|
||||
Type.Literal("tokens"),
|
||||
Type.Literal("full"),
|
||||
// Backward compat with older clients/stores.
|
||||
Type.Literal("on"),
|
||||
Type.Null(),
|
||||
]),
|
||||
),
|
||||
elevatedLevel: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
execHost: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
execSecurity: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
execAsk: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
execNode: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
model: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
spawnedBy: Type.Optional(Type.Union([NonEmptyString, Type.Null()])),
|
||||
sendPolicy: Type.Optional(
|
||||
Type.Union([Type.Literal("allow"), Type.Literal("deny"), Type.Null()]),
|
||||
),
|
||||
groupActivation: Type.Optional(
|
||||
Type.Union([Type.Literal("mention"), Type.Literal("always"), Type.Null()]),
|
||||
),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
);
|
||||
|
||||
export const SessionsResetParamsSchema = Type.Object(
|
||||
{ key: NonEmptyString },
|
||||
{ additionalProperties: false },
|
||||
{ key: NonEmptyString },
|
||||
{ additionalProperties: false },
|
||||
);
|
||||
|
||||
export const SessionsDeleteParamsSchema = Type.Object(
|
||||
{
|
||||
key: NonEmptyString,
|
||||
deleteTranscript: Type.Optional(Type.Boolean()),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
{
|
||||
key: NonEmptyString,
|
||||
deleteTranscript: Type.Optional(Type.Boolean()),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
);
|
||||
|
||||
export const SessionsCompactParamsSchema = Type.Object(
|
||||
{
|
||||
key: NonEmptyString,
|
||||
maxLines: Type.Optional(Type.Integer({ minimum: 1 })),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
{
|
||||
key: NonEmptyString,
|
||||
maxLines: Type.Optional(Type.Integer({ minimum: 1 })),
|
||||
},
|
||||
{ additionalProperties: false },
|
||||
);
|
||||
|
||||
@@ -11,7 +11,7 @@ const WORD_BOUNDARY_CHARS = /[\s\-_./:#@]/;
|
||||
* Check if position is at a word boundary.
|
||||
*/
|
||||
export function isWordBoundary(text: string, index: number): boolean {
|
||||
return index === 0 || WORD_BOUNDARY_CHARS.test(text[index - 1] ?? "");
|
||||
return index === 0 || WORD_BOUNDARY_CHARS.test(text[index - 1] ?? "");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -19,17 +19,17 @@ export function isWordBoundary(text: string, index: number): boolean {
|
||||
* Returns null if no match.
|
||||
*/
|
||||
export function findWordBoundaryIndex(text: string, query: string): number | null {
|
||||
if (!query) return null;
|
||||
const textLower = text.toLowerCase();
|
||||
const queryLower = query.toLowerCase();
|
||||
const maxIndex = textLower.length - queryLower.length;
|
||||
if (maxIndex < 0) return null;
|
||||
for (let i = 0; i <= maxIndex; i++) {
|
||||
if (textLower.startsWith(queryLower, i) && isWordBoundary(textLower, i)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
if (!query) return null;
|
||||
const textLower = text.toLowerCase();
|
||||
const queryLower = query.toLowerCase();
|
||||
const maxIndex = textLower.length - queryLower.length;
|
||||
if (maxIndex < 0) return null;
|
||||
for (let i = 0; i <= maxIndex; i++) {
|
||||
if (textLower.startsWith(queryLower, i) && isWordBoundary(textLower, i)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -37,31 +37,31 @@ export function findWordBoundaryIndex(text: string, query: string): number | nul
|
||||
* Returns score (lower = better) or null if no match.
|
||||
*/
|
||||
export function fuzzyMatchLower(queryLower: string, textLower: string): number | null {
|
||||
if (queryLower.length === 0) return 0;
|
||||
if (queryLower.length > textLower.length) return null;
|
||||
if (queryLower.length === 0) return 0;
|
||||
if (queryLower.length > textLower.length) return null;
|
||||
|
||||
let queryIndex = 0;
|
||||
let score = 0;
|
||||
let lastMatchIndex = -1;
|
||||
let consecutiveMatches = 0;
|
||||
let queryIndex = 0;
|
||||
let score = 0;
|
||||
let lastMatchIndex = -1;
|
||||
let consecutiveMatches = 0;
|
||||
|
||||
for (let i = 0; i < textLower.length && queryIndex < queryLower.length; i++) {
|
||||
if (textLower[i] === queryLower[queryIndex]) {
|
||||
const isAtWordBoundary = isWordBoundary(textLower, i);
|
||||
if (lastMatchIndex === i - 1) {
|
||||
consecutiveMatches++;
|
||||
score -= consecutiveMatches * 5; // Reward consecutive matches
|
||||
} else {
|
||||
consecutiveMatches = 0;
|
||||
if (lastMatchIndex >= 0) score += (i - lastMatchIndex - 1) * 2; // Penalize gaps
|
||||
}
|
||||
if (isAtWordBoundary) score -= 10; // Reward word boundary matches
|
||||
score += i * 0.1; // Slight penalty for later matches
|
||||
lastMatchIndex = i;
|
||||
queryIndex++;
|
||||
}
|
||||
}
|
||||
return queryIndex < queryLower.length ? null : score;
|
||||
for (let i = 0; i < textLower.length && queryIndex < queryLower.length; i++) {
|
||||
if (textLower[i] === queryLower[queryIndex]) {
|
||||
const isAtWordBoundary = isWordBoundary(textLower, i);
|
||||
if (lastMatchIndex === i - 1) {
|
||||
consecutiveMatches++;
|
||||
score -= consecutiveMatches * 5; // Reward consecutive matches
|
||||
} else {
|
||||
consecutiveMatches = 0;
|
||||
if (lastMatchIndex >= 0) score += (i - lastMatchIndex - 1) * 2; // Penalize gaps
|
||||
}
|
||||
if (isAtWordBoundary) score -= 10; // Reward word boundary matches
|
||||
score += i * 0.1; // Slight penalty for later matches
|
||||
lastMatchIndex = i;
|
||||
queryIndex++;
|
||||
}
|
||||
}
|
||||
return queryIndex < queryLower.length ? null : score;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -69,46 +69,46 @@ export function fuzzyMatchLower(queryLower: string, textLower: string): number |
|
||||
* Supports space-separated tokens (all must match).
|
||||
*/
|
||||
export function fuzzyFilterLower<T extends { searchTextLower?: string }>(
|
||||
items: T[],
|
||||
queryLower: string,
|
||||
items: T[],
|
||||
queryLower: string,
|
||||
): T[] {
|
||||
const trimmed = queryLower.trim();
|
||||
if (!trimmed) return items;
|
||||
const trimmed = queryLower.trim();
|
||||
if (!trimmed) return items;
|
||||
|
||||
const tokens = trimmed.split(/\s+/).filter((t) => t.length > 0);
|
||||
if (tokens.length === 0) return items;
|
||||
const tokens = trimmed.split(/\s+/).filter((t) => t.length > 0);
|
||||
if (tokens.length === 0) return items;
|
||||
|
||||
const results: { item: T; score: number }[] = [];
|
||||
for (const item of items) {
|
||||
const text = item.searchTextLower ?? "";
|
||||
let totalScore = 0;
|
||||
let allMatch = true;
|
||||
for (const token of tokens) {
|
||||
const score = fuzzyMatchLower(token, text);
|
||||
if (score !== null) {
|
||||
totalScore += score;
|
||||
} else {
|
||||
allMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (allMatch) results.push({ item, score: totalScore });
|
||||
}
|
||||
results.sort((a, b) => a.score - b.score);
|
||||
return results.map((r) => r.item);
|
||||
const results: { item: T; score: number }[] = [];
|
||||
for (const item of items) {
|
||||
const text = item.searchTextLower ?? "";
|
||||
let totalScore = 0;
|
||||
let allMatch = true;
|
||||
for (const token of tokens) {
|
||||
const score = fuzzyMatchLower(token, text);
|
||||
if (score !== null) {
|
||||
totalScore += score;
|
||||
} else {
|
||||
allMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (allMatch) results.push({ item, score: totalScore });
|
||||
}
|
||||
results.sort((a, b) => a.score - b.score);
|
||||
return results.map((r) => r.item);
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare items for fuzzy filtering by pre-computing lowercase search text.
|
||||
*/
|
||||
export function prepareSearchItems<T extends { label?: string; description?: string; searchText?: string }>(
|
||||
items: T[],
|
||||
): (T & { searchTextLower: string })[] {
|
||||
return items.map((item) => {
|
||||
const parts: string[] = [];
|
||||
if (item.label) parts.push(item.label);
|
||||
if (item.description) parts.push(item.description);
|
||||
if (item.searchText) parts.push(item.searchText);
|
||||
return { ...item, searchTextLower: parts.join(" ").toLowerCase() };
|
||||
});
|
||||
export function prepareSearchItems<
|
||||
T extends { label?: string; description?: string; searchText?: string },
|
||||
>(items: T[]): (T & { searchTextLower: string })[] {
|
||||
return items.map((item) => {
|
||||
const parts: string[] = [];
|
||||
if (item.label) parts.push(item.label);
|
||||
if (item.description) parts.push(item.description);
|
||||
if (item.searchText) parts.push(item.searchText);
|
||||
return { ...item, searchTextLower: parts.join(" ").toLowerCase() };
|
||||
});
|
||||
}
|
||||
|
||||
@@ -5,10 +5,7 @@ import {
|
||||
selectListTheme,
|
||||
settingsListTheme,
|
||||
} from "../theme/theme.js";
|
||||
import {
|
||||
FilterableSelectList,
|
||||
type FilterableSelectItem,
|
||||
} from "./filterable-select-list.js";
|
||||
import { FilterableSelectList, type FilterableSelectItem } from "./filterable-select-list.js";
|
||||
import { SearchableSelectList } from "./searchable-select-list.js";
|
||||
|
||||
export function createSelectList(items: SelectItem[], maxVisible = 7) {
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
export function formatRelativeTime(timestamp: number): string {
|
||||
const now = Date.now();
|
||||
const diff = now - timestamp;
|
||||
const seconds = Math.floor(diff / 1000);
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const hours = Math.floor(minutes / 60);
|
||||
const days = Math.floor(hours / 24);
|
||||
const now = Date.now();
|
||||
const diff = now - timestamp;
|
||||
const seconds = Math.floor(diff / 1000);
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const hours = Math.floor(minutes / 60);
|
||||
const days = Math.floor(hours / 24);
|
||||
|
||||
if (seconds < 60) return "just now";
|
||||
if (minutes < 60) return `${minutes}m ago`;
|
||||
if (hours < 24) return `${hours}h ago`;
|
||||
if (days === 1) return "Yesterday";
|
||||
if (days < 7) return `${days}d ago`;
|
||||
return new Date(timestamp).toLocaleDateString(undefined, { month: "short", day: "numeric" });
|
||||
if (seconds < 60) return "just now";
|
||||
if (minutes < 60) return `${minutes}m ago`;
|
||||
if (hours < 24) return `${hours}h ago`;
|
||||
if (days === 1) return "Yesterday";
|
||||
if (days < 7) return `${days}d ago`;
|
||||
return new Date(timestamp).toLocaleDateString(undefined, { month: "short", day: "numeric" });
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user