chore: format to 2-space and bump changelog
This commit is contained in:
@@ -6,26 +6,26 @@ export const MAX_DOCUMENT_BYTES = 100 * 1024 * 1024; // 100MB
|
||||
export type MediaKind = "image" | "audio" | "video" | "document" | "unknown";
|
||||
|
||||
export function mediaKindFromMime(mime?: string | null): MediaKind {
|
||||
if (!mime) return "unknown";
|
||||
if (mime.startsWith("image/")) return "image";
|
||||
if (mime.startsWith("audio/")) return "audio";
|
||||
if (mime.startsWith("video/")) return "video";
|
||||
if (mime === "application/pdf") return "document";
|
||||
if (mime.startsWith("application/")) return "document";
|
||||
return "unknown";
|
||||
if (!mime) return "unknown";
|
||||
if (mime.startsWith("image/")) return "image";
|
||||
if (mime.startsWith("audio/")) return "audio";
|
||||
if (mime.startsWith("video/")) return "video";
|
||||
if (mime === "application/pdf") return "document";
|
||||
if (mime.startsWith("application/")) return "document";
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
export function maxBytesForKind(kind: MediaKind): number {
|
||||
switch (kind) {
|
||||
case "image":
|
||||
return MAX_IMAGE_BYTES;
|
||||
case "audio":
|
||||
return MAX_AUDIO_BYTES;
|
||||
case "video":
|
||||
return MAX_VIDEO_BYTES;
|
||||
case "document":
|
||||
return MAX_DOCUMENT_BYTES;
|
||||
default:
|
||||
return MAX_DOCUMENT_BYTES;
|
||||
}
|
||||
switch (kind) {
|
||||
case "image":
|
||||
return MAX_IMAGE_BYTES;
|
||||
case "audio":
|
||||
return MAX_AUDIO_BYTES;
|
||||
case "video":
|
||||
return MAX_VIDEO_BYTES;
|
||||
case "document":
|
||||
return MAX_DOCUMENT_BYTES;
|
||||
default:
|
||||
return MAX_DOCUMENT_BYTES;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,11 +12,11 @@ const logInfo = vi.fn();
|
||||
vi.mock("./store.js", () => ({ saveMediaSource }));
|
||||
vi.mock("../infra/tailscale.js", () => ({ getTailnetHostname }));
|
||||
vi.mock("../infra/ports.js", async () => {
|
||||
const actual =
|
||||
await vi.importActual<typeof import("../infra/ports.js")>(
|
||||
"../infra/ports.js",
|
||||
);
|
||||
return { ensurePortAvailable, PortInUseError: actual.PortInUseError };
|
||||
const actual =
|
||||
await vi.importActual<typeof import("../infra/ports.js")>(
|
||||
"../infra/ports.js",
|
||||
);
|
||||
return { ensurePortAvailable, PortInUseError: actual.PortInUseError };
|
||||
});
|
||||
vi.mock("./server.js", () => ({ startMediaServer }));
|
||||
vi.mock("../logger.js", () => ({ logInfo }));
|
||||
@@ -25,69 +25,69 @@ const { ensureMediaHosted } = await import("./host.js");
|
||||
const { PortInUseError } = await import("../infra/ports.js");
|
||||
|
||||
describe("ensureMediaHosted", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("throws and cleans up when server not allowed to start", async () => {
|
||||
saveMediaSource.mockResolvedValue({
|
||||
id: "id1",
|
||||
path: "/tmp/file1",
|
||||
size: 5,
|
||||
});
|
||||
getTailnetHostname.mockResolvedValue("tailnet-host");
|
||||
ensurePortAvailable.mockResolvedValue(undefined);
|
||||
const rmSpy = vi.spyOn(fs, "rm").mockResolvedValue(undefined);
|
||||
it("throws and cleans up when server not allowed to start", async () => {
|
||||
saveMediaSource.mockResolvedValue({
|
||||
id: "id1",
|
||||
path: "/tmp/file1",
|
||||
size: 5,
|
||||
});
|
||||
getTailnetHostname.mockResolvedValue("tailnet-host");
|
||||
ensurePortAvailable.mockResolvedValue(undefined);
|
||||
const rmSpy = vi.spyOn(fs, "rm").mockResolvedValue(undefined);
|
||||
|
||||
await expect(
|
||||
ensureMediaHosted("/tmp/file1", { startServer: false }),
|
||||
).rejects.toThrow("requires the webhook/Funnel server");
|
||||
expect(rmSpy).toHaveBeenCalledWith("/tmp/file1");
|
||||
rmSpy.mockRestore();
|
||||
});
|
||||
await expect(
|
||||
ensureMediaHosted("/tmp/file1", { startServer: false }),
|
||||
).rejects.toThrow("requires the webhook/Funnel server");
|
||||
expect(rmSpy).toHaveBeenCalledWith("/tmp/file1");
|
||||
rmSpy.mockRestore();
|
||||
});
|
||||
|
||||
it("starts media server when allowed", async () => {
|
||||
saveMediaSource.mockResolvedValue({
|
||||
id: "id2",
|
||||
path: "/tmp/file2",
|
||||
size: 9,
|
||||
});
|
||||
getTailnetHostname.mockResolvedValue("tail.net");
|
||||
ensurePortAvailable.mockResolvedValue(undefined);
|
||||
const fakeServer = { unref: vi.fn() } as unknown as Server;
|
||||
startMediaServer.mockResolvedValue(fakeServer);
|
||||
it("starts media server when allowed", async () => {
|
||||
saveMediaSource.mockResolvedValue({
|
||||
id: "id2",
|
||||
path: "/tmp/file2",
|
||||
size: 9,
|
||||
});
|
||||
getTailnetHostname.mockResolvedValue("tail.net");
|
||||
ensurePortAvailable.mockResolvedValue(undefined);
|
||||
const fakeServer = { unref: vi.fn() } as unknown as Server;
|
||||
startMediaServer.mockResolvedValue(fakeServer);
|
||||
|
||||
const result = await ensureMediaHosted("/tmp/file2", {
|
||||
startServer: true,
|
||||
port: 1234,
|
||||
});
|
||||
expect(startMediaServer).toHaveBeenCalledWith(
|
||||
1234,
|
||||
expect.any(Number),
|
||||
expect.anything(),
|
||||
);
|
||||
expect(logInfo).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
url: "https://tail.net/media/id2",
|
||||
id: "id2",
|
||||
size: 9,
|
||||
});
|
||||
});
|
||||
const result = await ensureMediaHosted("/tmp/file2", {
|
||||
startServer: true,
|
||||
port: 1234,
|
||||
});
|
||||
expect(startMediaServer).toHaveBeenCalledWith(
|
||||
1234,
|
||||
expect.any(Number),
|
||||
expect.anything(),
|
||||
);
|
||||
expect(logInfo).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
url: "https://tail.net/media/id2",
|
||||
id: "id2",
|
||||
size: 9,
|
||||
});
|
||||
});
|
||||
|
||||
it("skips server start when port already in use", async () => {
|
||||
saveMediaSource.mockResolvedValue({
|
||||
id: "id3",
|
||||
path: "/tmp/file3",
|
||||
size: 7,
|
||||
});
|
||||
getTailnetHostname.mockResolvedValue("tail.net");
|
||||
ensurePortAvailable.mockRejectedValue(new PortInUseError(3000, "proc"));
|
||||
it("skips server start when port already in use", async () => {
|
||||
saveMediaSource.mockResolvedValue({
|
||||
id: "id3",
|
||||
path: "/tmp/file3",
|
||||
size: 7,
|
||||
});
|
||||
getTailnetHostname.mockResolvedValue("tail.net");
|
||||
ensurePortAvailable.mockRejectedValue(new PortInUseError(3000, "proc"));
|
||||
|
||||
const result = await ensureMediaHosted("/tmp/file3", {
|
||||
startServer: false,
|
||||
port: 3000,
|
||||
});
|
||||
expect(startMediaServer).not.toHaveBeenCalled();
|
||||
expect(result.url).toBe("https://tail.net/media/id3");
|
||||
});
|
||||
const result = await ensureMediaHosted("/tmp/file3", {
|
||||
startServer: false,
|
||||
port: 3000,
|
||||
});
|
||||
expect(startMediaServer).not.toHaveBeenCalled();
|
||||
expect(result.url).toBe("https://tail.net/media/id3");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -12,54 +12,54 @@ const TTL_MS = 2 * 60 * 1000;
|
||||
let mediaServer: import("http").Server | null = null;
|
||||
|
||||
export type HostedMedia = {
|
||||
url: string;
|
||||
id: string;
|
||||
size: number;
|
||||
url: string;
|
||||
id: string;
|
||||
size: number;
|
||||
};
|
||||
|
||||
export async function ensureMediaHosted(
|
||||
source: string,
|
||||
opts: {
|
||||
port?: number;
|
||||
startServer?: boolean;
|
||||
runtime?: RuntimeEnv;
|
||||
} = {},
|
||||
source: string,
|
||||
opts: {
|
||||
port?: number;
|
||||
startServer?: boolean;
|
||||
runtime?: RuntimeEnv;
|
||||
} = {},
|
||||
): Promise<HostedMedia> {
|
||||
const port = opts.port ?? DEFAULT_PORT;
|
||||
const runtime = opts.runtime ?? defaultRuntime;
|
||||
const port = opts.port ?? DEFAULT_PORT;
|
||||
const runtime = opts.runtime ?? defaultRuntime;
|
||||
|
||||
const saved = await saveMediaSource(source);
|
||||
const hostname = await getTailnetHostname();
|
||||
const saved = await saveMediaSource(source);
|
||||
const hostname = await getTailnetHostname();
|
||||
|
||||
// Decide whether we must start a media server.
|
||||
const needsServerStart = await isPortFree(port);
|
||||
if (needsServerStart && !opts.startServer) {
|
||||
await fs.rm(saved.path).catch(() => {});
|
||||
throw new Error(
|
||||
"Media hosting requires the webhook/Funnel server. Start `warelay webhook`/`warelay up` or re-run with --serve-media.",
|
||||
);
|
||||
}
|
||||
if (needsServerStart && opts.startServer) {
|
||||
if (!mediaServer) {
|
||||
mediaServer = await startMediaServer(port, TTL_MS, runtime);
|
||||
logInfo(
|
||||
`📡 Started temporary media host on http://localhost:${port}/media/:id (TTL ${TTL_MS / 1000}s)`,
|
||||
runtime,
|
||||
);
|
||||
mediaServer.unref?.();
|
||||
}
|
||||
}
|
||||
// Decide whether we must start a media server.
|
||||
const needsServerStart = await isPortFree(port);
|
||||
if (needsServerStart && !opts.startServer) {
|
||||
await fs.rm(saved.path).catch(() => {});
|
||||
throw new Error(
|
||||
"Media hosting requires the webhook/Funnel server. Start `warelay webhook`/`warelay up` or re-run with --serve-media.",
|
||||
);
|
||||
}
|
||||
if (needsServerStart && opts.startServer) {
|
||||
if (!mediaServer) {
|
||||
mediaServer = await startMediaServer(port, TTL_MS, runtime);
|
||||
logInfo(
|
||||
`📡 Started temporary media host on http://localhost:${port}/media/:id (TTL ${TTL_MS / 1000}s)`,
|
||||
runtime,
|
||||
);
|
||||
mediaServer.unref?.();
|
||||
}
|
||||
}
|
||||
|
||||
const url = `https://${hostname}/media/${saved.id}`;
|
||||
return { url, id: saved.id, size: saved.size };
|
||||
const url = `https://${hostname}/media/${saved.id}`;
|
||||
return { url, id: saved.id, size: saved.size };
|
||||
}
|
||||
|
||||
async function isPortFree(port: number) {
|
||||
try {
|
||||
await ensurePortAvailable(port);
|
||||
return true;
|
||||
} catch (err) {
|
||||
if (err instanceof PortInUseError) return false;
|
||||
throw err;
|
||||
}
|
||||
try {
|
||||
await ensurePortAvailable(port);
|
||||
return true;
|
||||
} catch (err) {
|
||||
if (err instanceof PortInUseError) return false;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,53 +1,101 @@
|
||||
// Shared helpers for parsing MEDIA tokens from command/stdout text.
|
||||
|
||||
// Allow optional wrapping backticks and punctuation after the token; capture the core token.
|
||||
export const MEDIA_TOKEN_RE = /\bMEDIA:\s*`?([^\s`]+)`?/i;
|
||||
export const MEDIA_TOKEN_RE = /\bMEDIA:\s*`?([^\n]+)`?/gi;
|
||||
|
||||
export function normalizeMediaSource(src: string) {
|
||||
return src.startsWith("file://") ? src.replace("file://", "") : src;
|
||||
return src.startsWith("file://") ? src.replace("file://", "") : src;
|
||||
}
|
||||
|
||||
function cleanCandidate(raw: string) {
|
||||
return raw.replace(/^[`"'[{(]+/, "").replace(/[`"'\\})\],]+$/, "");
|
||||
return raw.replace(/^[`"'[{(]+/, "").replace(/[`"'\\})\],]+$/, "");
|
||||
}
|
||||
|
||||
function isValidMedia(candidate: string) {
|
||||
if (!candidate) return false;
|
||||
if (candidate.length > 1024) return false;
|
||||
if (/\s/.test(candidate)) return false;
|
||||
return (
|
||||
/^https?:\/\//i.test(candidate) ||
|
||||
candidate.startsWith("/") ||
|
||||
candidate.startsWith("./")
|
||||
);
|
||||
if (!candidate) return false;
|
||||
if (candidate.length > 1024) return false;
|
||||
if (/\s/.test(candidate)) return false;
|
||||
return (
|
||||
/^https?:\/\//i.test(candidate) ||
|
||||
candidate.startsWith("/") ||
|
||||
candidate.startsWith("./")
|
||||
);
|
||||
}
|
||||
|
||||
export function splitMediaFromOutput(raw: string): {
|
||||
text: string;
|
||||
mediaUrl?: string;
|
||||
text: string;
|
||||
mediaUrls?: string[];
|
||||
mediaUrl?: string; // legacy first item for backward compatibility
|
||||
} {
|
||||
const trimmedRaw = raw.trim();
|
||||
const match = MEDIA_TOKEN_RE.exec(trimmedRaw);
|
||||
if (!match?.[1]) return { text: trimmedRaw };
|
||||
const trimmedRaw = raw.trim();
|
||||
if (!trimmedRaw) return { text: "" };
|
||||
|
||||
const candidate = normalizeMediaSource(cleanCandidate(match[1]));
|
||||
const mediaUrl = isValidMedia(candidate) ? candidate : undefined;
|
||||
const media: string[] = [];
|
||||
let foundMediaToken = false;
|
||||
|
||||
const cleanedText = mediaUrl
|
||||
? trimmedRaw
|
||||
.replace(match[0], "")
|
||||
.replace(/[ \t]+\n/g, "\n")
|
||||
.replace(/[ \t]{2,}/g, " ")
|
||||
.replace(/\n{2,}/g, "\n")
|
||||
.trim()
|
||||
: trimmedRaw
|
||||
.split("\n")
|
||||
.filter((line) => !MEDIA_TOKEN_RE.test(line))
|
||||
.join("\n")
|
||||
.replace(/[ \t]+\n/g, "\n")
|
||||
.replace(/[ \t]{2,}/g, " ")
|
||||
.replace(/\n{2,}/g, "\n")
|
||||
.trim();
|
||||
// Collect tokens line by line so we can strip them cleanly.
|
||||
const lines = trimmedRaw.split("\n");
|
||||
const keptLines: string[] = [];
|
||||
|
||||
return mediaUrl ? { text: cleanedText, mediaUrl } : { text: cleanedText };
|
||||
for (const line of lines) {
|
||||
const matches = Array.from(line.matchAll(MEDIA_TOKEN_RE));
|
||||
if (matches.length === 0) {
|
||||
keptLines.push(line);
|
||||
continue;
|
||||
}
|
||||
|
||||
foundMediaToken = true;
|
||||
const pieces: string[] = [];
|
||||
let cursor = 0;
|
||||
let hasValidMedia = false;
|
||||
|
||||
for (const match of matches) {
|
||||
const start = match.index ?? 0;
|
||||
pieces.push(line.slice(cursor, start));
|
||||
|
||||
const payload = match[1];
|
||||
const parts = payload.split(/\s+/).filter(Boolean);
|
||||
const invalidParts: string[] = [];
|
||||
for (const part of parts) {
|
||||
const candidate = normalizeMediaSource(cleanCandidate(part));
|
||||
if (isValidMedia(candidate)) {
|
||||
media.push(candidate);
|
||||
hasValidMedia = true;
|
||||
} else {
|
||||
invalidParts.push(part);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasValidMedia && invalidParts.length > 0) {
|
||||
pieces.push(invalidParts.join(" "));
|
||||
}
|
||||
|
||||
cursor = start + match[0].length;
|
||||
}
|
||||
|
||||
pieces.push(line.slice(cursor));
|
||||
|
||||
const cleanedLine = pieces
|
||||
.join("")
|
||||
.replace(/[ \t]{2,}/g, " ")
|
||||
.trim();
|
||||
|
||||
// If the line becomes empty, drop it.
|
||||
if (cleanedLine) {
|
||||
keptLines.push(cleanedLine);
|
||||
}
|
||||
}
|
||||
|
||||
const cleanedText = keptLines
|
||||
.join("\n")
|
||||
.replace(/[ \t]+\n/g, "\n")
|
||||
.replace(/[ \t]{2,}/g, " ")
|
||||
.replace(/\n{2,}/g, "\n")
|
||||
.trim();
|
||||
|
||||
if (media.length === 0) {
|
||||
return { text: foundMediaToken ? cleanedText : trimmedRaw };
|
||||
}
|
||||
|
||||
return { text: cleanedText, mediaUrls: media, mediaUrl: media[0] };
|
||||
}
|
||||
|
||||
@@ -8,45 +8,45 @@ const MEDIA_DIR = path.join(process.cwd(), "tmp-media-test");
|
||||
const cleanOldMedia = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
vi.mock("./store.js", () => ({
|
||||
getMediaDir: () => MEDIA_DIR,
|
||||
cleanOldMedia,
|
||||
getMediaDir: () => MEDIA_DIR,
|
||||
cleanOldMedia,
|
||||
}));
|
||||
|
||||
const { startMediaServer } = await import("./server.js");
|
||||
|
||||
describe("media server", () => {
|
||||
beforeAll(async () => {
|
||||
await fs.rm(MEDIA_DIR, { recursive: true, force: true });
|
||||
await fs.mkdir(MEDIA_DIR, { recursive: true });
|
||||
});
|
||||
beforeAll(async () => {
|
||||
await fs.rm(MEDIA_DIR, { recursive: true, force: true });
|
||||
await fs.mkdir(MEDIA_DIR, { recursive: true });
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await fs.rm(MEDIA_DIR, { recursive: true, force: true });
|
||||
});
|
||||
afterAll(async () => {
|
||||
await fs.rm(MEDIA_DIR, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("serves media and cleans up after send", async () => {
|
||||
const file = path.join(MEDIA_DIR, "file1");
|
||||
await fs.writeFile(file, "hello");
|
||||
const server = await startMediaServer(0, 5_000);
|
||||
const port = (server.address() as AddressInfo).port;
|
||||
const res = await fetch(`http://localhost:${port}/media/file1`);
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("hello");
|
||||
await new Promise((r) => setTimeout(r, 600));
|
||||
await expect(fs.stat(file)).rejects.toThrow();
|
||||
await new Promise((r) => server.close(r));
|
||||
});
|
||||
it("serves media and cleans up after send", async () => {
|
||||
const file = path.join(MEDIA_DIR, "file1");
|
||||
await fs.writeFile(file, "hello");
|
||||
const server = await startMediaServer(0, 5_000);
|
||||
const port = (server.address() as AddressInfo).port;
|
||||
const res = await fetch(`http://localhost:${port}/media/file1`);
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("hello");
|
||||
await new Promise((r) => setTimeout(r, 600));
|
||||
await expect(fs.stat(file)).rejects.toThrow();
|
||||
await new Promise((r) => server.close(r));
|
||||
});
|
||||
|
||||
it("expires old media", async () => {
|
||||
const file = path.join(MEDIA_DIR, "old");
|
||||
await fs.writeFile(file, "stale");
|
||||
const past = Date.now() - 10_000;
|
||||
await fs.utimes(file, past / 1000, past / 1000);
|
||||
const server = await startMediaServer(0, 1_000);
|
||||
const port = (server.address() as AddressInfo).port;
|
||||
const res = await fetch(`http://localhost:${port}/media/old`);
|
||||
expect(res.status).toBe(410);
|
||||
await expect(fs.stat(file)).rejects.toThrow();
|
||||
await new Promise((r) => server.close(r));
|
||||
});
|
||||
it("expires old media", async () => {
|
||||
const file = path.join(MEDIA_DIR, "old");
|
||||
await fs.writeFile(file, "stale");
|
||||
const past = Date.now() - 10_000;
|
||||
await fs.utimes(file, past / 1000, past / 1000);
|
||||
const server = await startMediaServer(0, 1_000);
|
||||
const port = (server.address() as AddressInfo).port;
|
||||
const res = await fetch(`http://localhost:${port}/media/old`);
|
||||
expect(res.status).toBe(410);
|
||||
await expect(fs.stat(file)).rejects.toThrow();
|
||||
await new Promise((r) => server.close(r));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,53 +9,53 @@ import { cleanOldMedia, getMediaDir } from "./store.js";
|
||||
const DEFAULT_TTL_MS = 2 * 60 * 1000;
|
||||
|
||||
export function attachMediaRoutes(
|
||||
app: Express,
|
||||
ttlMs = DEFAULT_TTL_MS,
|
||||
_runtime: RuntimeEnv = defaultRuntime,
|
||||
app: Express,
|
||||
ttlMs = DEFAULT_TTL_MS,
|
||||
_runtime: RuntimeEnv = defaultRuntime,
|
||||
) {
|
||||
const mediaDir = getMediaDir();
|
||||
const mediaDir = getMediaDir();
|
||||
|
||||
app.get("/media/:id", async (req, res) => {
|
||||
const id = req.params.id;
|
||||
const file = path.join(mediaDir, id);
|
||||
try {
|
||||
const stat = await fs.stat(file);
|
||||
if (Date.now() - stat.mtimeMs > ttlMs) {
|
||||
await fs.rm(file).catch(() => {});
|
||||
res.status(410).send("expired");
|
||||
return;
|
||||
}
|
||||
res.sendFile(file);
|
||||
// best-effort single-use cleanup after response ends
|
||||
res.on("finish", () => {
|
||||
setTimeout(() => {
|
||||
fs.rm(file).catch(() => {});
|
||||
}, 500);
|
||||
});
|
||||
} catch {
|
||||
res.status(404).send("not found");
|
||||
}
|
||||
});
|
||||
app.get("/media/:id", async (req, res) => {
|
||||
const id = req.params.id;
|
||||
const file = path.join(mediaDir, id);
|
||||
try {
|
||||
const stat = await fs.stat(file);
|
||||
if (Date.now() - stat.mtimeMs > ttlMs) {
|
||||
await fs.rm(file).catch(() => {});
|
||||
res.status(410).send("expired");
|
||||
return;
|
||||
}
|
||||
res.sendFile(file);
|
||||
// best-effort single-use cleanup after response ends
|
||||
res.on("finish", () => {
|
||||
setTimeout(() => {
|
||||
fs.rm(file).catch(() => {});
|
||||
}, 500);
|
||||
});
|
||||
} catch {
|
||||
res.status(404).send("not found");
|
||||
}
|
||||
});
|
||||
|
||||
// periodic cleanup
|
||||
setInterval(() => {
|
||||
void cleanOldMedia(ttlMs);
|
||||
}, ttlMs).unref();
|
||||
// periodic cleanup
|
||||
setInterval(() => {
|
||||
void cleanOldMedia(ttlMs);
|
||||
}, ttlMs).unref();
|
||||
}
|
||||
|
||||
export async function startMediaServer(
|
||||
port: number,
|
||||
ttlMs = DEFAULT_TTL_MS,
|
||||
runtime: RuntimeEnv = defaultRuntime,
|
||||
port: number,
|
||||
ttlMs = DEFAULT_TTL_MS,
|
||||
runtime: RuntimeEnv = defaultRuntime,
|
||||
): Promise<Server> {
|
||||
const app = express();
|
||||
attachMediaRoutes(app, ttlMs, runtime);
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = app.listen(port);
|
||||
server.once("listening", () => resolve(server));
|
||||
server.once("error", (err) => {
|
||||
runtime.error(danger(`Media server failed: ${String(err)}`));
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
const app = express();
|
||||
attachMediaRoutes(app, ttlMs, runtime);
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = app.listen(port);
|
||||
server.once("listening", () => resolve(server));
|
||||
server.once("error", (err) => {
|
||||
runtime.error(danger(`Media server failed: ${String(err)}`));
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -7,54 +7,54 @@ const realOs = await vi.importActual<typeof import("node:os")>("node:os");
|
||||
const HOME = path.join(realOs.tmpdir(), "warelay-home-test");
|
||||
|
||||
vi.mock("node:os", () => ({
|
||||
default: { homedir: () => HOME },
|
||||
homedir: () => HOME,
|
||||
default: { homedir: () => HOME },
|
||||
homedir: () => HOME,
|
||||
}));
|
||||
|
||||
const store = await import("./store.js");
|
||||
|
||||
describe("media store", () => {
|
||||
beforeAll(async () => {
|
||||
await fs.rm(HOME, { recursive: true, force: true });
|
||||
});
|
||||
beforeAll(async () => {
|
||||
await fs.rm(HOME, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await fs.rm(HOME, { recursive: true, force: true });
|
||||
});
|
||||
afterAll(async () => {
|
||||
await fs.rm(HOME, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("creates and returns media directory", async () => {
|
||||
const dir = await store.ensureMediaDir();
|
||||
expect(dir).toContain("warelay-home-test");
|
||||
const stat = await fs.stat(dir);
|
||||
expect(stat.isDirectory()).toBe(true);
|
||||
});
|
||||
it("creates and returns media directory", async () => {
|
||||
const dir = await store.ensureMediaDir();
|
||||
expect(dir).toContain("warelay-home-test");
|
||||
const stat = await fs.stat(dir);
|
||||
expect(stat.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it("saves buffers and enforces size limit", async () => {
|
||||
const buf = Buffer.from("hello");
|
||||
const saved = await store.saveMediaBuffer(buf, "text/plain");
|
||||
const savedStat = await fs.stat(saved.path);
|
||||
expect(savedStat.size).toBe(buf.length);
|
||||
expect(saved.contentType).toBe("text/plain");
|
||||
it("saves buffers and enforces size limit", async () => {
|
||||
const buf = Buffer.from("hello");
|
||||
const saved = await store.saveMediaBuffer(buf, "text/plain");
|
||||
const savedStat = await fs.stat(saved.path);
|
||||
expect(savedStat.size).toBe(buf.length);
|
||||
expect(saved.contentType).toBe("text/plain");
|
||||
|
||||
const huge = Buffer.alloc(5 * 1024 * 1024 + 1);
|
||||
await expect(store.saveMediaBuffer(huge)).rejects.toThrow(
|
||||
"Media exceeds 5MB limit",
|
||||
);
|
||||
});
|
||||
const huge = Buffer.alloc(5 * 1024 * 1024 + 1);
|
||||
await expect(store.saveMediaBuffer(huge)).rejects.toThrow(
|
||||
"Media exceeds 5MB limit",
|
||||
);
|
||||
});
|
||||
|
||||
it("copies local files and cleans old media", async () => {
|
||||
const srcFile = path.join(HOME, "tmp-src.txt");
|
||||
await fs.mkdir(HOME, { recursive: true });
|
||||
await fs.writeFile(srcFile, "local file");
|
||||
const saved = await store.saveMediaSource(srcFile);
|
||||
expect(saved.size).toBe(10);
|
||||
const savedStat = await fs.stat(saved.path);
|
||||
expect(savedStat.isFile()).toBe(true);
|
||||
it("copies local files and cleans old media", async () => {
|
||||
const srcFile = path.join(HOME, "tmp-src.txt");
|
||||
await fs.mkdir(HOME, { recursive: true });
|
||||
await fs.writeFile(srcFile, "local file");
|
||||
const saved = await store.saveMediaSource(srcFile);
|
||||
expect(saved.size).toBe(10);
|
||||
const savedStat = await fs.stat(saved.path);
|
||||
expect(savedStat.isFile()).toBe(true);
|
||||
|
||||
// make the file look old and ensure cleanOldMedia removes it
|
||||
const past = Date.now() - 10_000;
|
||||
await fs.utimes(saved.path, past / 1000, past / 1000);
|
||||
await store.cleanOldMedia(1);
|
||||
await expect(fs.stat(saved.path)).rejects.toThrow();
|
||||
});
|
||||
// make the file look old and ensure cleanOldMedia removes it
|
||||
const past = Date.now() - 10_000;
|
||||
await fs.utimes(saved.path, past / 1000, past / 1000);
|
||||
await store.cleanOldMedia(1);
|
||||
await expect(fs.stat(saved.path)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,108 +11,108 @@ const MAX_BYTES = 5 * 1024 * 1024; // 5MB
|
||||
const DEFAULT_TTL_MS = 2 * 60 * 1000; // 2 minutes
|
||||
|
||||
export function getMediaDir() {
|
||||
return MEDIA_DIR;
|
||||
return MEDIA_DIR;
|
||||
}
|
||||
|
||||
export async function ensureMediaDir() {
|
||||
await fs.mkdir(MEDIA_DIR, { recursive: true });
|
||||
return MEDIA_DIR;
|
||||
await fs.mkdir(MEDIA_DIR, { recursive: true });
|
||||
return MEDIA_DIR;
|
||||
}
|
||||
|
||||
export async function cleanOldMedia(ttlMs = DEFAULT_TTL_MS) {
|
||||
await ensureMediaDir();
|
||||
const entries = await fs.readdir(MEDIA_DIR).catch(() => []);
|
||||
const now = Date.now();
|
||||
await Promise.all(
|
||||
entries.map(async (file) => {
|
||||
const full = path.join(MEDIA_DIR, file);
|
||||
const stat = await fs.stat(full).catch(() => null);
|
||||
if (!stat) return;
|
||||
if (now - stat.mtimeMs > ttlMs) {
|
||||
await fs.rm(full).catch(() => {});
|
||||
}
|
||||
}),
|
||||
);
|
||||
await ensureMediaDir();
|
||||
const entries = await fs.readdir(MEDIA_DIR).catch(() => []);
|
||||
const now = Date.now();
|
||||
await Promise.all(
|
||||
entries.map(async (file) => {
|
||||
const full = path.join(MEDIA_DIR, file);
|
||||
const stat = await fs.stat(full).catch(() => null);
|
||||
if (!stat) return;
|
||||
if (now - stat.mtimeMs > ttlMs) {
|
||||
await fs.rm(full).catch(() => {});
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function looksLikeUrl(src: string) {
|
||||
return /^https?:\/\//i.test(src);
|
||||
return /^https?:\/\//i.test(src);
|
||||
}
|
||||
|
||||
async function downloadToFile(
|
||||
url: string,
|
||||
dest: string,
|
||||
headers?: Record<string, string>,
|
||||
url: string,
|
||||
dest: string,
|
||||
headers?: Record<string, string>,
|
||||
) {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const req = request(url, { headers }, (res) => {
|
||||
if (!res.statusCode || res.statusCode >= 400) {
|
||||
reject(new Error(`HTTP ${res.statusCode ?? "?"} downloading media`));
|
||||
return;
|
||||
}
|
||||
let total = 0;
|
||||
const out = createWriteStream(dest);
|
||||
res.on("data", (chunk) => {
|
||||
total += chunk.length;
|
||||
if (total > MAX_BYTES) {
|
||||
req.destroy(new Error("Media exceeds 5MB limit"));
|
||||
}
|
||||
});
|
||||
pipeline(res, out)
|
||||
.then(() => resolve())
|
||||
.catch(reject);
|
||||
});
|
||||
req.on("error", reject);
|
||||
req.end();
|
||||
});
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const req = request(url, { headers }, (res) => {
|
||||
if (!res.statusCode || res.statusCode >= 400) {
|
||||
reject(new Error(`HTTP ${res.statusCode ?? "?"} downloading media`));
|
||||
return;
|
||||
}
|
||||
let total = 0;
|
||||
const out = createWriteStream(dest);
|
||||
res.on("data", (chunk) => {
|
||||
total += chunk.length;
|
||||
if (total > MAX_BYTES) {
|
||||
req.destroy(new Error("Media exceeds 5MB limit"));
|
||||
}
|
||||
});
|
||||
pipeline(res, out)
|
||||
.then(() => resolve())
|
||||
.catch(reject);
|
||||
});
|
||||
req.on("error", reject);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
export type SavedMedia = {
|
||||
id: string;
|
||||
path: string;
|
||||
size: number;
|
||||
contentType?: string;
|
||||
id: string;
|
||||
path: string;
|
||||
size: number;
|
||||
contentType?: string;
|
||||
};
|
||||
|
||||
export async function saveMediaSource(
|
||||
source: string,
|
||||
headers?: Record<string, string>,
|
||||
subdir = "",
|
||||
source: string,
|
||||
headers?: Record<string, string>,
|
||||
subdir = "",
|
||||
): Promise<SavedMedia> {
|
||||
const dir = subdir ? path.join(MEDIA_DIR, subdir) : MEDIA_DIR;
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
await cleanOldMedia();
|
||||
const id = crypto.randomUUID();
|
||||
const dest = path.join(dir, id);
|
||||
if (looksLikeUrl(source)) {
|
||||
await downloadToFile(source, dest, headers);
|
||||
const stat = await fs.stat(dest);
|
||||
return { id, path: dest, size: stat.size };
|
||||
}
|
||||
// local path
|
||||
const stat = await fs.stat(source);
|
||||
if (!stat.isFile()) {
|
||||
throw new Error("Media path is not a file");
|
||||
}
|
||||
if (stat.size > MAX_BYTES) {
|
||||
throw new Error("Media exceeds 5MB limit");
|
||||
}
|
||||
await fs.copyFile(source, dest);
|
||||
return { id, path: dest, size: stat.size };
|
||||
const dir = subdir ? path.join(MEDIA_DIR, subdir) : MEDIA_DIR;
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
await cleanOldMedia();
|
||||
const id = crypto.randomUUID();
|
||||
const dest = path.join(dir, id);
|
||||
if (looksLikeUrl(source)) {
|
||||
await downloadToFile(source, dest, headers);
|
||||
const stat = await fs.stat(dest);
|
||||
return { id, path: dest, size: stat.size };
|
||||
}
|
||||
// local path
|
||||
const stat = await fs.stat(source);
|
||||
if (!stat.isFile()) {
|
||||
throw new Error("Media path is not a file");
|
||||
}
|
||||
if (stat.size > MAX_BYTES) {
|
||||
throw new Error("Media exceeds 5MB limit");
|
||||
}
|
||||
await fs.copyFile(source, dest);
|
||||
return { id, path: dest, size: stat.size };
|
||||
}
|
||||
|
||||
export async function saveMediaBuffer(
|
||||
buffer: Buffer,
|
||||
contentType?: string,
|
||||
subdir = "inbound",
|
||||
buffer: Buffer,
|
||||
contentType?: string,
|
||||
subdir = "inbound",
|
||||
): Promise<SavedMedia> {
|
||||
if (buffer.byteLength > MAX_BYTES) {
|
||||
throw new Error("Media exceeds 5MB limit");
|
||||
}
|
||||
const dir = path.join(MEDIA_DIR, subdir);
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
const id = crypto.randomUUID();
|
||||
const dest = path.join(dir, id);
|
||||
await fs.writeFile(dest, buffer);
|
||||
return { id, path: dest, size: buffer.byteLength, contentType };
|
||||
if (buffer.byteLength > MAX_BYTES) {
|
||||
throw new Error("Media exceeds 5MB limit");
|
||||
}
|
||||
const dir = path.join(MEDIA_DIR, subdir);
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
const id = crypto.randomUUID();
|
||||
const dest = path.join(dir, id);
|
||||
await fs.writeFile(dest, buffer);
|
||||
return { id, path: dest, size: buffer.byteLength, contentType };
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user