feat(server): JXL previews from DNG 1.7+ (#17861)

* feat(server): JXL previews from RAW

* refactor(server): use var name assumedExtractedFormat for clarity

* test(server): fix existing media.extract() returning JPEG

* chore(openapi): regen

* style(server): lint

* fix(server): ignore undefined decode orientation

* fix(server): correct orientation assignment in media decode options

* test(server): unit tests of JXL-encoded DNG

* refactor(server): return buffer and format from mediaRepository.extract()

* chore(open-api): regen

* refactor

---------

Co-authored-by: mertalev <101130780+mertalev@users.noreply.github.com>
This commit is contained in:
Eli Gao 2025-04-29 06:18:46 +08:00 committed by GitHub
parent f621f8ef2c
commit 48bcbee6ed
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 198 additions and 148 deletions

View File

@ -90,7 +90,7 @@ export class StorageCore {
return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, person.ownerId, `${person.id}.jpeg`);
}
static getImagePath(asset: ThumbnailPathEntity, type: GeneratedImageType, format: ImageFormat) {
static getImagePath(asset: ThumbnailPathEntity, type: GeneratedImageType, format: 'jpeg' | 'webp') {
return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, asset.ownerId, `${asset.id}-${type}.${format}`);
}

View File

@ -337,6 +337,11 @@ export enum ImageFormat {
WEBP = 'webp',
}
export enum RawExtractedFormat {
JPEG = 'jpeg',
JXL = 'jxl',
}
export enum LogLevel {
VERBOSE = 'verbose',
DEBUG = 'debug',

View File

@ -7,7 +7,7 @@ import { Writable } from 'node:stream';
import sharp from 'sharp';
import { ORIENTATION_TO_SHARP_ROTATION } from 'src/constants';
import { Exif } from 'src/database';
import { Colorspace, LogLevel } from 'src/enum';
import { Colorspace, LogLevel, RawExtractedFormat } from 'src/enum';
import { LoggingRepository } from 'src/repositories/logging.repository';
import {
DecodeToBufferOptions,
@ -36,34 +36,51 @@ type ProgressEvent = {
percent?: number;
};
export type ExtractResult = {
buffer: Buffer;
format: RawExtractedFormat;
};
@Injectable()
export class MediaRepository {
constructor(private logger: LoggingRepository) {
this.logger.setContext(MediaRepository.name);
}
async extract(input: string, output: string): Promise<boolean> {
/**
*
* @param input file path to the input image
* @returns ExtractResult if succeeded, or null if failed
*/
async extract(input: string): Promise<ExtractResult | null> {
try {
// remove existing output file if it exists
// as exiftool-vendored does not support overwriting via "-w!" flag
// and throws "1 files could not be read" error when the output file exists
await fs.unlink(output).catch(() => null);
await exiftool.extractBinaryTag('JpgFromRaw2', input, output);
} catch {
try {
this.logger.debug('Extracting JPEG from RAW image:', input);
await exiftool.extractJpgFromRaw(input, output);
} catch (error: any) {
this.logger.debug('Could not extract JPEG from image, trying preview', error.message);
try {
await exiftool.extractPreview(input, output);
} catch (error: any) {
this.logger.debug('Could not extract preview from image', error.message);
return false;
}
}
const buffer = await exiftool.extractBinaryTagToBuffer('JpgFromRaw2', input);
return { buffer, format: RawExtractedFormat.JPEG };
} catch (error: any) {
this.logger.debug('Could not extract JpgFromRaw2 buffer from image, trying JPEG from RAW next', error.message);
}
try {
const buffer = await exiftool.extractBinaryTagToBuffer('JpgFromRaw', input);
return { buffer, format: RawExtractedFormat.JPEG };
} catch (error: any) {
this.logger.debug('Could not extract JPEG buffer from image, trying PreviewJXL next', error.message);
}
try {
const buffer = await exiftool.extractBinaryTagToBuffer('PreviewJXL', input);
return { buffer, format: RawExtractedFormat.JXL };
} catch (error: any) {
this.logger.debug('Could not extract PreviewJXL buffer from image, trying PreviewImage next', error.message);
}
try {
const buffer = await exiftool.extractBinaryTagToBuffer('PreviewImage', input);
return { buffer, format: RawExtractedFormat.JPEG };
} catch (error: any) {
this.logger.debug('Could not extract preview buffer from image', error.message);
return null;
}
return true;
}
async writeExif(tags: Partial<Exif>, output: string): Promise<boolean> {
@ -104,7 +121,7 @@ export class MediaRepository {
}
}
decodeImage(input: string, options: DecodeToBufferOptions) {
decodeImage(input: string | Buffer, options: DecodeToBufferOptions) {
return this.getImageDecodingPipeline(input, options).raw().toBuffer({ resolveWithObject: true });
}
@ -235,7 +252,7 @@ export class MediaRepository {
});
}
async getImageDimensions(input: string): Promise<ImageDimensions> {
async getImageDimensions(input: string | Buffer): Promise<ImageDimensions> {
const { width = 0, height = 0 } = await sharp(input).metadata();
return { width, height };
}

View File

@ -1,7 +1,6 @@
import { OutputInfo } from 'sharp';
import { SystemConfig } from 'src/config';
import { Exif } from 'src/database';
import { AssetMediaSize } from 'src/dtos/asset-media.dto';
import {
AssetFileType,
AssetPathType,
@ -11,6 +10,7 @@ import {
ImageFormat,
JobName,
JobStatus,
RawExtractedFormat,
TranscodeHWAccel,
TranscodePolicy,
VideoCodec,
@ -231,17 +231,19 @@ describe(MediaService.name, () => {
describe('handleGenerateThumbnails', () => {
let rawBuffer: Buffer;
let fullsizeBuffer: Buffer;
let extractedBuffer: Buffer;
let rawInfo: RawImageInfo;
beforeEach(() => {
fullsizeBuffer = Buffer.from('embedded image data');
rawBuffer = Buffer.from('image data');
rawBuffer = Buffer.from('raw image data');
extractedBuffer = Buffer.from('embedded image file');
rawInfo = { width: 100, height: 100, channels: 3 };
mocks.media.decodeImage.mockImplementation((path) =>
mocks.media.decodeImage.mockImplementation((input) =>
Promise.resolve(
path.includes(AssetMediaSize.FULLSIZE)
? { data: fullsizeBuffer, info: rawInfo as OutputInfo }
: { data: rawBuffer, info: rawInfo as OutputInfo },
typeof input === 'string'
? { data: rawBuffer, info: rawInfo as OutputInfo } // string implies original file
: { data: fullsizeBuffer, info: rawInfo as OutputInfo }, // buffer implies embedded image extracted
),
);
});
@ -584,16 +586,15 @@ describe(MediaService.name, () => {
});
it('should extract embedded image if enabled and available', async () => {
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
const convertedPath = mocks.media.extract.mock.lastCall?.[1].toString();
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(convertedPath, {
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440,
@ -601,16 +602,13 @@ describe(MediaService.name, () => {
});
it('should resize original image if embedded image is too small', async () => {
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 1000, height: 1000 });
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
const extractedPath = mocks.media.extract.mock.lastCall?.[1].toString();
expect(extractedPath).toMatch(/-fullsize\.jpeg$/);
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.imageDng.originalPath, {
colorspace: Colorspace.P3,
processInvalidImages: false,
@ -665,38 +663,40 @@ describe(MediaService.name, () => {
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(2);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({ processInvalidImages: true }),
expect.objectContaining({ processInvalidImages: false }),
'upload/thumbs/user-id/as/se/asset-id-preview.jpeg',
);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({ processInvalidImages: true }),
expect.objectContaining({ processInvalidImages: false }),
'upload/thumbs/user-id/as/se/asset-id-thumbnail.webp',
);
expect(mocks.media.generateThumbhash).toHaveBeenCalledOnce();
expect(mocks.media.generateThumbhash).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({ processInvalidImages: true }),
expect.objectContaining({ processInvalidImages: false }),
);
expect(mocks.media.getImageDimensions).not.toHaveBeenCalled();
vi.unstubAllEnvs();
});
it('should generate full-size preview using embedded JPEG from RAW images when extractEmbedded is true', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true }, extractEmbedded: true } });
mocks.media.extract.mockResolvedValue(true);
it('should extract full-size JPEG preview from RAW', async () => {
mocks.systemMetadata.get.mockResolvedValue({
image: { fullsize: { enabled: true, format: ImageFormat.WEBP }, extractEmbedded: true },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
const extractedPath = mocks.media.extract.mock.lastCall?.[1].toString();
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedPath, {
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440, // capped to preview size as fullsize conversion is skipped
});
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(2);
@ -714,9 +714,51 @@ describe(MediaService.name, () => {
);
});
it('should convert full-size WEBP preview from JXL preview of RAW', async () => {
mocks.systemMetadata.get.mockResolvedValue({
image: { fullsize: { enabled: true, format: ImageFormat.WEBP }, extractEmbedded: true },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JXL });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
colorspace: Colorspace.P3,
processInvalidImages: false,
});
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
fullsizeBuffer,
{
colorspace: Colorspace.P3,
format: ImageFormat.WEBP,
quality: 80,
processInvalidImages: false,
raw: rawInfo,
},
'upload/thumbs/user-id/as/se/asset-id-fullsize.webp',
);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
fullsizeBuffer,
{
colorspace: Colorspace.P3,
format: ImageFormat.JPEG,
size: 1440,
quality: 80,
processInvalidImages: false,
raw: rawInfo,
},
'upload/thumbs/user-id/as/se/asset-id-preview.jpeg',
);
});
it('should generate full-size preview directly from RAW images when extractEmbedded is false', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true }, extractEmbedded: false } });
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
@ -756,7 +798,7 @@ describe(MediaService.name, () => {
it('should generate full-size preview from non-web-friendly images', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } });
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
@ -785,7 +827,7 @@ describe(MediaService.name, () => {
it('should skip generating full-size preview for web-friendly images', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } });
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image);
@ -810,7 +852,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
image: { fullsize: { enabled: true, format: ImageFormat.WEBP, quality: 90 } },
});
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
@ -2481,48 +2523,39 @@ describe(MediaService.name, () => {
describe('isSRGB', () => {
it('should return true for srgb colorspace', () => {
const asset = { ...assetStub.image, exifInfo: { colorspace: 'sRGB' } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({ colorspace: 'sRGB' } as Exif)).toEqual(true);
});
it('should return true for srgb profile description', () => {
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sRGB v1.31' } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({ profileDescription: 'sRGB v1.31' } as Exif)).toEqual(true);
});
it('should return true for 8-bit image with no colorspace metadata', () => {
const asset = { ...assetStub.image, exifInfo: { bitsPerSample: 8 } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({ bitsPerSample: 8 } as Exif)).toEqual(true);
});
it('should return true for image with no colorspace or bit depth metadata', () => {
const asset = { ...assetStub.image, exifInfo: {} as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({} as Exif)).toEqual(true);
});
it('should return false for non-srgb colorspace', () => {
const asset = { ...assetStub.image, exifInfo: { colorspace: 'Adobe RGB' } as Exif };
expect(sut.isSRGB(asset)).toEqual(false);
expect(sut.isSRGB({ colorspace: 'Adobe RGB' } as Exif)).toEqual(false);
});
it('should return false for non-srgb profile description', () => {
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sP3C' } as Exif };
expect(sut.isSRGB(asset)).toEqual(false);
expect(sut.isSRGB({ profileDescription: 'sP3C' } as Exif)).toEqual(false);
});
it('should return false for 16-bit image with no colorspace metadata', () => {
const asset = { ...assetStub.image, exifInfo: { bitsPerSample: 16 } as Exif };
expect(sut.isSRGB(asset)).toEqual(false);
expect(sut.isSRGB({ bitsPerSample: 16 } as Exif)).toEqual(false);
});
it('should return true for 16-bit image with sRGB colorspace', () => {
const asset = { ...assetStub.image, exifInfo: { colorspace: 'sRGB', bitsPerSample: 16 } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({ colorspace: 'sRGB', bitsPerSample: 16 } as Exif)).toEqual(true);
});
it('should return true for 16-bit image with sRGB profile', () => {
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sRGB', bitsPerSample: 16 } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({ profileDescription: 'sRGB', bitsPerSample: 16 } as Exif)).toEqual(true);
});
});
});

View File

@ -10,11 +10,11 @@ import {
AssetType,
AudioCodec,
Colorspace,
ImageFormat,
JobName,
JobStatus,
LogLevel,
QueueName,
RawExtractedFormat,
StorageFolder,
TranscodeHWAccel,
TranscodePolicy,
@ -27,7 +27,6 @@ import { BaseService } from 'src/services/base.service';
import {
AudioStreamInfo,
DecodeToBufferOptions,
GenerateThumbnailOptions,
JobItem,
JobOf,
VideoFormat,
@ -213,6 +212,29 @@ export class MediaService extends BaseService {
return JobStatus.SUCCESS;
}
private async extractImage(originalPath: string, minSize: number) {
let extracted = await this.mediaRepository.extract(originalPath);
if (extracted && !(await this.shouldUseExtractedImage(extracted.buffer, minSize))) {
extracted = null;
}
return extracted;
}
private async decodeImage(thumbSource: string | Buffer, exifInfo: Exif, targetSize?: number) {
const { image } = await this.getConfig({ withCache: true });
const colorspace = this.isSRGB(exifInfo) ? Colorspace.SRGB : image.colorspace;
const decodeOptions: DecodeToBufferOptions = {
colorspace,
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
size: targetSize,
orientation: exifInfo.orientation ? Number(exifInfo.orientation) : undefined,
};
const { info, data } = await this.mediaRepository.decodeImage(thumbSource, decodeOptions);
return { info, data, colorspace };
}
private async generateImageThumbnails(asset: {
id: string;
ownerId: string;
@ -225,68 +247,48 @@ export class MediaService extends BaseService {
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.THUMBNAIL, image.thumbnail.format);
this.storageCore.ensureFolders(previewPath);
const processInvalidImages = process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true';
const colorspace = this.isSRGB(asset) ? Colorspace.SRGB : image.colorspace;
// Handle embedded preview extraction for RAW files
const extractEmbedded = image.extractEmbedded && mimeTypes.isRaw(asset.originalFileName);
const extracted = extractEmbedded ? await this.extractImage(asset.originalPath, image.preview.size) : null;
const generateFullsize = image.fullsize.enabled && !mimeTypes.isWebSupportedImage(asset.originalPath);
const convertFullsize = generateFullsize && (!extracted || !mimeTypes.isWebSupportedImage(` .${extracted.format}`));
// prevents this extra "enabled" from leaking into fullsizeOptions later
const { enabled: imageFullsizeEnabled, ...imageFullsizeConfig } = image.fullsize;
const { info, data, colorspace } = await this.decodeImage(
extracted ? extracted.buffer : asset.originalPath,
asset.exifInfo,
convertFullsize ? undefined : image.preview.size,
);
const shouldConvertFullsize = imageFullsizeEnabled && !mimeTypes.isWebSupportedImage(asset.originalFileName);
const shouldExtractEmbedded = image.extractEmbedded && mimeTypes.isRaw(asset.originalFileName);
const decodeOptions: DecodeToBufferOptions = { colorspace, processInvalidImages, size: image.preview.size };
let useExtracted = false;
let decodeInputPath: string = asset.originalPath;
// Converted or extracted image from non-web-supported formats (e.g. RAW)
let fullsizePath: string | undefined;
if (shouldConvertFullsize) {
// unset size to decode fullsize image
decodeOptions.size = undefined;
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, image.fullsize.format);
}
if (shouldExtractEmbedded) {
// For RAW files, try extracting embedded preview first
// Assume extracted image from RAW always in JPEG format, as implied from the `jpgFromRaw` tag name
const extractedPath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, ImageFormat.JPEG);
const didExtract = await this.mediaRepository.extract(asset.originalPath, extractedPath);
useExtracted = didExtract && (await this.shouldUseExtractedImage(extractedPath, image.preview.size));
if (useExtracted) {
if (shouldConvertFullsize) {
// skip re-encoding and directly use extracted as fullsize preview
// as usually the extracted image is already heavily compressed, no point doing lossy conversion again
fullsizePath = extractedPath;
}
// use this as origin of preview and thumbnail
decodeInputPath = extractedPath;
if (asset.exifInfo) {
// write essential orientation and colorspace EXIF for correct fullsize preview and subsequent processing
const exif = { orientation: asset.exifInfo.orientation, colorspace: asset.exifInfo.colorspace };
await this.mediaRepository.writeExif(exif, extractedPath);
}
}
}
const { info, data } = await this.mediaRepository.decodeImage(decodeInputPath, decodeOptions);
const thumbnailOptions = { colorspace, processInvalidImages, raw: info };
// generate final images
const thumbnailOptions = { colorspace, processInvalidImages: false, raw: info };
const promises = [
this.mediaRepository.generateThumbhash(data, thumbnailOptions),
this.mediaRepository.generateThumbnail(data, { ...image.thumbnail, ...thumbnailOptions }, thumbnailPath),
this.mediaRepository.generateThumbnail(data, { ...image.preview, ...thumbnailOptions }, previewPath),
];
// did not extract a usable image from RAW
if (fullsizePath && !useExtracted) {
const fullsizeOptions: GenerateThumbnailOptions = {
...imageFullsizeConfig,
...thumbnailOptions,
size: undefined,
};
let fullsizePath: string | undefined;
if (convertFullsize) {
// convert a new fullsize image from the same source as the thumbnail
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, image.fullsize.format);
const fullsizeOptions = { format: image.fullsize.format, quality: image.fullsize.quality, ...thumbnailOptions };
promises.push(this.mediaRepository.generateThumbnail(data, fullsizeOptions, fullsizePath));
} else if (generateFullsize && extracted && extracted.format === RawExtractedFormat.JPEG) {
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, extracted.format);
this.storageCore.ensureFolders(fullsizePath);
// Write the buffer to disk with essential EXIF data
await this.storageRepository.createOrOverwriteFile(fullsizePath, extracted.buffer);
await this.mediaRepository.writeExif(
{
orientation: asset.exifInfo.orientation,
colorspace: asset.exifInfo.colorspace,
},
fullsizePath,
);
}
const outputs = await Promise.all(promises);
return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer };
@ -521,8 +523,7 @@ export class MediaService extends BaseService {
return name !== VideoContainer.MP4 && !ffmpegConfig.acceptedContainers.includes(name);
}
isSRGB(asset: { exifInfo: Exif }): boolean {
const { colorspace, profileDescription, bitsPerSample } = asset.exifInfo;
isSRGB({ colorspace, profileDescription, bitsPerSample }: Exif): boolean {
if (colorspace || profileDescription) {
return [colorspace, profileDescription].some((s) => s?.toLowerCase().includes('srgb'));
} else if (bitsPerSample) {
@ -550,10 +551,9 @@ export class MediaService extends BaseService {
}
}
private async shouldUseExtractedImage(extractedPath: string, targetSize: number) {
const { width, height } = await this.mediaRepository.getImageDimensions(extractedPath);
private async shouldUseExtractedImage(extractedPathOrBuffer: string | Buffer, targetSize: number) {
const { width, height } = await this.mediaRepository.getImageDimensions(extractedPathOrBuffer);
const extractedSize = Math.min(width, height);
return extractedSize >= targetSize;
}

View File

@ -34,45 +34,40 @@ const raw: Record<string, string[]> = {
'.x3f': ['image/x3f', 'image/x-sigma-x3f'],
};
/**
* list of supported image extensions from https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Image_types excluding svg
* @TODO share with the client
* @see {@link web/src/lib/utils/asset-utils.ts#L329}
**/
const webSupportedImage = {
'.avif': ['image/avif'],
'.gif': ['image/gif'],
'.jpeg': ['image/jpeg'],
'.jpg': ['image/jpeg'],
'.png': ['image/png', 'image/apng'],
'.webp': ['image/webp'],
};
const image: Record<string, string[]> = {
...raw,
'.avif': ['image/avif'],
...webSupportedImage,
'.bmp': ['image/bmp'],
'.gif': ['image/gif'],
'.heic': ['image/heic'],
'.heif': ['image/heif'],
'.hif': ['image/hif'],
'.insp': ['image/jpeg'],
'.jp2': ['image/jp2'],
'.jpe': ['image/jpeg'],
'.jpeg': ['image/jpeg'],
'.jpg': ['image/jpeg'],
'.jxl': ['image/jxl'],
'.png': ['image/png'],
'.svg': ['image/svg'],
'.tif': ['image/tiff'],
'.tiff': ['image/tiff'],
'.webp': ['image/webp'],
};
const extensionOverrides: Record<string, string> = {
'image/jpeg': '.jpg',
};
/**
* list of supported image extensions from https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Image_types excluding svg
* @TODO share with the client
* @see {@link web/src/lib/utils/asset-utils.ts#L329}
**/
const webSupportedImageMimeTypes = new Set([
'image/apng',
'image/avif',
'image/gif',
'image/jpeg',
'image/png',
'image/webp',
]);
const profileExtensions = new Set(['.avif', '.dng', '.heic', '.heif', '.jpeg', '.jpg', '.png', '.webp', '.svg']);
const profile: Record<string, string[]> = Object.fromEntries(
Object.entries(image).filter(([key]) => profileExtensions.has(key)),
@ -123,7 +118,7 @@ export const mimeTypes = {
isAsset: (filename: string) => isType(filename, image) || isType(filename, video),
isImage: (filename: string) => isType(filename, image),
isWebSupportedImage: (filename: string) => webSupportedImageMimeTypes.has(lookup(filename)),
isWebSupportedImage: (filename: string) => isType(filename, webSupportedImage),
isProfile: (filename: string) => isType(filename, profile),
isSidecar: (filename: string) => isType(filename, sidecar),
isVideo: (filename: string) => isType(filename, video),

View File

@ -8,7 +8,7 @@ export const newMediaRepositoryMock = (): Mocked<RepositoryInterface<MediaReposi
writeExif: vitest.fn().mockImplementation(() => Promise.resolve()),
generateThumbhash: vitest.fn().mockResolvedValue(Buffer.from('')),
decodeImage: vitest.fn().mockResolvedValue({ data: Buffer.from(''), info: {} }),
extract: vitest.fn().mockResolvedValue(false),
extract: vitest.fn().mockResolvedValue(null),
probe: vitest.fn(),
transcode: vitest.fn(),
getImageDimensions: vitest.fn(),