fix(media): preserve alpha channel for transparent PNGs (#1473)

This commit is contained in:
Robby
2026-01-23 07:50:50 +00:00
committed by Peter Steinberger
parent 78071f8ec4
commit e634791585
2 changed files with 139 additions and 1 deletions

View File

@@ -339,6 +339,49 @@ export async function convertHeicToJpeg(buffer: Buffer): Promise<Buffer> {
return await sharp(buffer).jpeg({ quality: 90, mozjpeg: true }).toBuffer();
}
/**
* Checks if an image has an alpha channel (transparency).
* Returns true if the image has alpha, false otherwise.
*/
export async function hasAlphaChannel(buffer: Buffer): Promise<boolean> {
try {
const sharp = await loadSharp();
const meta = await sharp(buffer).metadata();
// Check if the image has an alpha channel
// PNG color types with alpha: 4 (grayscale+alpha), 6 (RGBA)
// Sharp reports this via 'channels' (4 = RGBA) or 'hasAlpha'
return meta.hasAlpha === true || meta.channels === 4;
} catch {
return false;
}
}
/**
* Resizes an image to PNG format, preserving alpha channel (transparency).
* Falls back to sharp only (no sips fallback for PNG with alpha).
*/
export async function resizeToPng(params: {
buffer: Buffer;
maxSide: number;
compressionLevel?: number;
withoutEnlargement?: boolean;
}): Promise<Buffer> {
const sharp = await loadSharp();
// Compression level 6 is a good balance (0=fastest, 9=smallest)
const compressionLevel = params.compressionLevel ?? 6;
return await sharp(params.buffer)
.rotate() // Auto-rotate based on EXIF if present
.resize({
width: params.maxSide,
height: params.maxSide,
fit: "inside",
withoutEnlargement: params.withoutEnlargement !== false,
})
.png({ compressionLevel })
.toBuffer();
}
/**
* Internal sips-only EXIF normalization (no sharp fallback).
* Used by resizeToJpeg to normalize before sips resize.

View File

@@ -6,7 +6,12 @@ import { logVerbose, shouldLogVerbose } from "../globals.js";
import { type MediaKind, maxBytesForKind, mediaKindFromMime } from "../media/constants.js";
import { resolveUserPath } from "../utils.js";
import { fetchRemoteMedia } from "../media/fetch.js";
import { convertHeicToJpeg, resizeToJpeg } from "../media/image-ops.js";
import {
convertHeicToJpeg,
hasAlphaChannel,
resizeToJpeg,
resizeToPng,
} from "../media/image-ops.js";
import { detectMime, extensionForMime } from "../media/mime.js";
export type WebMediaResult = {
@@ -61,6 +66,37 @@ async function loadWebMediaInternal(
meta?: { contentType?: string; fileName?: string },
) => {
const originalSize = buffer.length;
// Check if this is a PNG with alpha channel - preserve transparency
const isPng =
meta?.contentType === "image/png" || meta?.fileName?.toLowerCase().endsWith(".png");
const hasAlpha = isPng && (await hasAlphaChannel(buffer));
if (hasAlpha) {
// Use PNG optimization to preserve transparency
const optimized = await optimizeImageToPng(buffer, cap);
if (optimized.optimizedSize < originalSize && shouldLogVerbose()) {
logVerbose(
`Optimized PNG (preserving alpha) from ${(originalSize / (1024 * 1024)).toFixed(2)}MB to ${(optimized.optimizedSize / (1024 * 1024)).toFixed(2)}MB (side≤${optimized.resizeSide}px)`,
);
}
if (optimized.buffer.length > cap) {
throw new Error(
`Media could not be reduced below ${(cap / (1024 * 1024)).toFixed(0)}MB (got ${(
optimized.buffer.length /
(1024 * 1024)
).toFixed(2)}MB)`,
);
}
return {
buffer: optimized.buffer,
contentType: "image/png",
kind: "image" as const,
fileName: meta?.fileName,
};
}
// Default: optimize to JPEG (no alpha channel)
const optimized = await optimizeImageToJpeg(buffer, cap, meta);
const fileName = meta && isHeicSource(meta) ? toJpegFileName(meta.fileName) : meta?.fileName;
if (optimized.optimizedSize < originalSize && shouldLogVerbose()) {
@@ -246,3 +282,62 @@ export async function optimizeImageToJpeg(
throw new Error("Failed to optimize image");
}
export async function optimizeImageToPng(
buffer: Buffer,
maxBytes: number,
): Promise<{
buffer: Buffer;
optimizedSize: number;
resizeSide: number;
compressionLevel: number;
}> {
// Try a grid of sizes/compression levels until under the limit.
// PNG uses compression levels 0-9 (higher = smaller but slower)
const sides = [2048, 1536, 1280, 1024, 800];
const compressionLevels = [6, 7, 8, 9];
let smallest: {
buffer: Buffer;
size: number;
resizeSide: number;
compressionLevel: number;
} | null = null;
for (const side of sides) {
for (const compressionLevel of compressionLevels) {
try {
const out = await resizeToPng({
buffer,
maxSide: side,
compressionLevel,
withoutEnlargement: true,
});
const size = out.length;
if (!smallest || size < smallest.size) {
smallest = { buffer: out, size, resizeSide: side, compressionLevel };
}
if (size <= maxBytes) {
return {
buffer: out,
optimizedSize: size,
resizeSide: side,
compressionLevel,
};
}
} catch {
// Continue trying other size/compression combinations
}
}
}
if (smallest) {
return {
buffer: smallest.buffer,
optimizedSize: smallest.size,
resizeSide: smallest.resizeSide,
compressionLevel: smallest.compressionLevel,
};
}
throw new Error("Failed to optimize PNG image");
}