mirror of
https://github.com/immich-app/immich.git
synced 2025-05-24 01:12:58 -04:00
fix(server): more robust person thumbnail generation (#17974)
* more robust person thumbnail generation * clamp bounding boxes * update sql * no need to process invalid images after decoding * cursed knowledge * new line
This commit is contained in:
parent
d33ce13561
commit
2a80251dc3
@ -2,6 +2,7 @@ import {
|
|||||||
mdiBug,
|
mdiBug,
|
||||||
mdiCalendarToday,
|
mdiCalendarToday,
|
||||||
mdiCrosshairsOff,
|
mdiCrosshairsOff,
|
||||||
|
mdiCrop,
|
||||||
mdiDatabase,
|
mdiDatabase,
|
||||||
mdiLeadPencil,
|
mdiLeadPencil,
|
||||||
mdiLockOff,
|
mdiLockOff,
|
||||||
@ -22,6 +23,18 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
|
|||||||
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
|
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
|
||||||
|
|
||||||
const items: Item[] = [
|
const items: Item[] = [
|
||||||
|
{
|
||||||
|
icon: mdiCrop,
|
||||||
|
iconColor: 'tomato',
|
||||||
|
title: 'Image dimensions in EXIF metadata are cursed',
|
||||||
|
description:
|
||||||
|
'The dimensions in EXIF metadata can be different from the actual dimensions of the image, causing issues with cropping and resizing.',
|
||||||
|
link: {
|
||||||
|
url: 'https://github.com/immich-app/immich/pull/17974',
|
||||||
|
text: '#17974',
|
||||||
|
},
|
||||||
|
date: new Date(2025, 5, 5),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
icon: mdiMicrosoftWindows,
|
icon: mdiMicrosoftWindows,
|
||||||
iconColor: '#357EC7',
|
iconColor: '#357EC7',
|
||||||
|
@ -143,23 +143,20 @@ select
|
|||||||
"asset_faces"."boundingBoxY2" as "y2",
|
"asset_faces"."boundingBoxY2" as "y2",
|
||||||
"asset_faces"."imageWidth" as "oldWidth",
|
"asset_faces"."imageWidth" as "oldWidth",
|
||||||
"asset_faces"."imageHeight" as "oldHeight",
|
"asset_faces"."imageHeight" as "oldHeight",
|
||||||
"exif"."exifImageWidth",
|
|
||||||
"exif"."exifImageHeight",
|
|
||||||
"assets"."type",
|
"assets"."type",
|
||||||
"assets"."originalPath",
|
"assets"."originalPath",
|
||||||
"asset_files"."path" as "previewPath"
|
"asset_files"."path" as "previewPath",
|
||||||
|
"exif"."orientation" as "exifOrientation"
|
||||||
from
|
from
|
||||||
"person"
|
"person"
|
||||||
inner join "asset_faces" on "asset_faces"."id" = "person"."faceAssetId"
|
inner join "asset_faces" on "asset_faces"."id" = "person"."faceAssetId"
|
||||||
inner join "assets" on "asset_faces"."assetId" = "assets"."id"
|
inner join "assets" on "asset_faces"."assetId" = "assets"."id"
|
||||||
inner join "exif" on "exif"."assetId" = "assets"."id"
|
left join "exif" on "exif"."assetId" = "assets"."id"
|
||||||
inner join "asset_files" on "asset_files"."assetId" = "assets"."id"
|
left join "asset_files" on "asset_files"."assetId" = "assets"."id"
|
||||||
where
|
where
|
||||||
"person"."id" = $1
|
"person"."id" = $1
|
||||||
and "asset_faces"."deletedAt" is null
|
and "asset_faces"."deletedAt" is null
|
||||||
and "asset_files"."type" = $2
|
and "asset_files"."type" = $2
|
||||||
and "exif"."exifImageWidth" > $3
|
|
||||||
and "exif"."exifImageHeight" > $4
|
|
||||||
|
|
||||||
-- PersonRepository.reassignFace
|
-- PersonRepository.reassignFace
|
||||||
update "asset_faces"
|
update "asset_faces"
|
||||||
|
@ -264,8 +264,8 @@ export class PersonRepository {
|
|||||||
.selectFrom('person')
|
.selectFrom('person')
|
||||||
.innerJoin('asset_faces', 'asset_faces.id', 'person.faceAssetId')
|
.innerJoin('asset_faces', 'asset_faces.id', 'person.faceAssetId')
|
||||||
.innerJoin('assets', 'asset_faces.assetId', 'assets.id')
|
.innerJoin('assets', 'asset_faces.assetId', 'assets.id')
|
||||||
.innerJoin('exif', 'exif.assetId', 'assets.id')
|
.leftJoin('exif', 'exif.assetId', 'assets.id')
|
||||||
.innerJoin('asset_files', 'asset_files.assetId', 'assets.id')
|
.leftJoin('asset_files', 'asset_files.assetId', 'assets.id')
|
||||||
.select([
|
.select([
|
||||||
'person.ownerId',
|
'person.ownerId',
|
||||||
'asset_faces.boundingBoxX1 as x1',
|
'asset_faces.boundingBoxX1 as x1',
|
||||||
@ -274,17 +274,14 @@ export class PersonRepository {
|
|||||||
'asset_faces.boundingBoxY2 as y2',
|
'asset_faces.boundingBoxY2 as y2',
|
||||||
'asset_faces.imageWidth as oldWidth',
|
'asset_faces.imageWidth as oldWidth',
|
||||||
'asset_faces.imageHeight as oldHeight',
|
'asset_faces.imageHeight as oldHeight',
|
||||||
'exif.exifImageWidth',
|
|
||||||
'exif.exifImageHeight',
|
|
||||||
'assets.type',
|
'assets.type',
|
||||||
'assets.originalPath',
|
'assets.originalPath',
|
||||||
'asset_files.path as previewPath',
|
'asset_files.path as previewPath',
|
||||||
|
'exif.orientation as exifOrientation',
|
||||||
])
|
])
|
||||||
.where('person.id', '=', id)
|
.where('person.id', '=', id)
|
||||||
.where('asset_faces.deletedAt', 'is', null)
|
.where('asset_faces.deletedAt', 'is', null)
|
||||||
.where('asset_files.type', '=', AssetFileType.PREVIEW)
|
.where('asset_files.type', '=', AssetFileType.PREVIEW)
|
||||||
.where('exif.exifImageWidth', '>', 0)
|
|
||||||
.where('exif.exifImageHeight', '>', 0)
|
|
||||||
.$narrowType<{ exifImageWidth: NotNull; exifImageHeight: NotNull }>()
|
.$narrowType<{ exifImageWidth: NotNull; exifImageHeight: NotNull }>()
|
||||||
.executeTakeFirst();
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ import {
|
|||||||
AssetType,
|
AssetType,
|
||||||
AudioCodec,
|
AudioCodec,
|
||||||
Colorspace,
|
Colorspace,
|
||||||
|
ExifOrientation,
|
||||||
ImageFormat,
|
ImageFormat,
|
||||||
JobName,
|
JobName,
|
||||||
JobStatus,
|
JobStatus,
|
||||||
@ -20,7 +21,8 @@ import { JobCounts, RawImageInfo } from 'src/types';
|
|||||||
import { assetStub } from 'test/fixtures/asset.stub';
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
import { faceStub } from 'test/fixtures/face.stub';
|
import { faceStub } from 'test/fixtures/face.stub';
|
||||||
import { probeStub } from 'test/fixtures/media.stub';
|
import { probeStub } from 'test/fixtures/media.stub';
|
||||||
import { personStub } from 'test/fixtures/person.stub';
|
import { personStub, personThumbnailStub } from 'test/fixtures/person.stub';
|
||||||
|
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||||
|
|
||||||
describe(MediaService.name, () => {
|
describe(MediaService.name, () => {
|
||||||
@ -872,6 +874,323 @@ describe(MediaService.name, () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('handleGeneratePersonThumbnail', () => {
|
||||||
|
it('should skip if machine learning is disabled', async () => {
|
||||||
|
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
|
||||||
|
|
||||||
|
await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(JobStatus.SKIPPED);
|
||||||
|
expect(mocks.asset.getByIds).not.toHaveBeenCalled();
|
||||||
|
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip a person not found', async () => {
|
||||||
|
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||||
|
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip a person without a face asset id', async () => {
|
||||||
|
mocks.person.getById.mockResolvedValue(personStub.noThumbnail);
|
||||||
|
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||||
|
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip a person with face not found', async () => {
|
||||||
|
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||||
|
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should generate a thumbnail', async () => {
|
||||||
|
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailMiddle);
|
||||||
|
mocks.media.generateThumbnail.mockResolvedValue();
|
||||||
|
const data = Buffer.from('');
|
||||||
|
const info = { width: 1000, height: 1000 } as OutputInfo;
|
||||||
|
mocks.media.decodeImage.mockResolvedValue({ data, info });
|
||||||
|
|
||||||
|
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(personStub.primaryPerson.id);
|
||||||
|
expect(mocks.storage.mkdirSync).toHaveBeenCalledWith('upload/thumbs/admin_id/pe/rs');
|
||||||
|
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailMiddle.originalPath, {
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
orientation: undefined,
|
||||||
|
processInvalidImages: false,
|
||||||
|
});
|
||||||
|
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||||
|
data,
|
||||||
|
{
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
format: ImageFormat.JPEG,
|
||||||
|
quality: 80,
|
||||||
|
crop: {
|
||||||
|
left: 238,
|
||||||
|
top: 163,
|
||||||
|
width: 274,
|
||||||
|
height: 274,
|
||||||
|
},
|
||||||
|
raw: info,
|
||||||
|
processInvalidImages: false,
|
||||||
|
size: 250,
|
||||||
|
},
|
||||||
|
'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
||||||
|
);
|
||||||
|
expect(mocks.person.update).toHaveBeenCalledWith({
|
||||||
|
id: 'person-1',
|
||||||
|
thumbnailPath: 'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should generate a thumbnail without going negative', async () => {
|
||||||
|
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailStart);
|
||||||
|
mocks.media.generateThumbnail.mockResolvedValue();
|
||||||
|
const data = Buffer.from('');
|
||||||
|
const info = { width: 2160, height: 3840 } as OutputInfo;
|
||||||
|
mocks.media.decodeImage.mockResolvedValue({ data, info });
|
||||||
|
|
||||||
|
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailStart.originalPath, {
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
orientation: undefined,
|
||||||
|
processInvalidImages: false,
|
||||||
|
});
|
||||||
|
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||||
|
data,
|
||||||
|
{
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
format: ImageFormat.JPEG,
|
||||||
|
quality: 80,
|
||||||
|
crop: {
|
||||||
|
left: 0,
|
||||||
|
top: 85,
|
||||||
|
width: 510,
|
||||||
|
height: 510,
|
||||||
|
},
|
||||||
|
raw: info,
|
||||||
|
processInvalidImages: false,
|
||||||
|
size: 250,
|
||||||
|
},
|
||||||
|
'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should generate a thumbnail without overflowing', async () => {
|
||||||
|
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailEnd);
|
||||||
|
mocks.person.update.mockResolvedValue(personStub.primaryPerson);
|
||||||
|
mocks.media.generateThumbnail.mockResolvedValue();
|
||||||
|
const data = Buffer.from('');
|
||||||
|
const info = { width: 1000, height: 1000 } as OutputInfo;
|
||||||
|
mocks.media.decodeImage.mockResolvedValue({ data, info });
|
||||||
|
|
||||||
|
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailEnd.originalPath, {
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
orientation: undefined,
|
||||||
|
processInvalidImages: false,
|
||||||
|
});
|
||||||
|
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||||
|
data,
|
||||||
|
{
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
format: ImageFormat.JPEG,
|
||||||
|
quality: 80,
|
||||||
|
crop: {
|
||||||
|
left: 591,
|
||||||
|
top: 591,
|
||||||
|
width: 408,
|
||||||
|
height: 408,
|
||||||
|
},
|
||||||
|
raw: info,
|
||||||
|
processInvalidImages: false,
|
||||||
|
size: 250,
|
||||||
|
},
|
||||||
|
'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle negative coordinates', async () => {
|
||||||
|
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.negativeCoordinate);
|
||||||
|
mocks.person.update.mockResolvedValue(personStub.primaryPerson);
|
||||||
|
mocks.media.generateThumbnail.mockResolvedValue();
|
||||||
|
const data = Buffer.from('');
|
||||||
|
const info = { width: 4624, height: 3080 } as OutputInfo;
|
||||||
|
mocks.media.decodeImage.mockResolvedValue({ data, info });
|
||||||
|
|
||||||
|
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.negativeCoordinate.originalPath, {
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
orientation: undefined,
|
||||||
|
processInvalidImages: false,
|
||||||
|
});
|
||||||
|
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||||
|
data,
|
||||||
|
{
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
format: ImageFormat.JPEG,
|
||||||
|
quality: 80,
|
||||||
|
crop: {
|
||||||
|
left: 0,
|
||||||
|
top: 62,
|
||||||
|
width: 412,
|
||||||
|
height: 412,
|
||||||
|
},
|
||||||
|
raw: info,
|
||||||
|
processInvalidImages: false,
|
||||||
|
size: 250,
|
||||||
|
},
|
||||||
|
'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle overflowing coordinate', async () => {
|
||||||
|
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.overflowingCoordinate);
|
||||||
|
mocks.person.update.mockResolvedValue(personStub.primaryPerson);
|
||||||
|
mocks.media.generateThumbnail.mockResolvedValue();
|
||||||
|
const data = Buffer.from('');
|
||||||
|
const info = { width: 4624, height: 3080 } as OutputInfo;
|
||||||
|
mocks.media.decodeImage.mockResolvedValue({ data, info });
|
||||||
|
|
||||||
|
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.overflowingCoordinate.originalPath, {
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
orientation: undefined,
|
||||||
|
processInvalidImages: false,
|
||||||
|
});
|
||||||
|
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||||
|
data,
|
||||||
|
{
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
format: ImageFormat.JPEG,
|
||||||
|
quality: 80,
|
||||||
|
crop: {
|
||||||
|
left: 4485,
|
||||||
|
top: 94,
|
||||||
|
width: 138,
|
||||||
|
height: 138,
|
||||||
|
},
|
||||||
|
raw: info,
|
||||||
|
processInvalidImages: false,
|
||||||
|
size: 250,
|
||||||
|
},
|
||||||
|
'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use embedded preview if enabled and raw image', async () => {
|
||||||
|
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
|
||||||
|
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.rawEmbeddedThumbnail);
|
||||||
|
mocks.person.update.mockResolvedValue(personStub.primaryPerson);
|
||||||
|
mocks.media.generateThumbnail.mockResolvedValue();
|
||||||
|
const extracted = Buffer.from('');
|
||||||
|
const data = Buffer.from('');
|
||||||
|
const info = { width: 2160, height: 3840 } as OutputInfo;
|
||||||
|
mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.JPEG });
|
||||||
|
mocks.media.decodeImage.mockResolvedValue({ data, info });
|
||||||
|
mocks.media.getImageDimensions.mockResolvedValue(info);
|
||||||
|
|
||||||
|
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath);
|
||||||
|
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extracted, {
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
orientation: ExifOrientation.Horizontal,
|
||||||
|
processInvalidImages: false,
|
||||||
|
});
|
||||||
|
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||||
|
data,
|
||||||
|
{
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
format: ImageFormat.JPEG,
|
||||||
|
quality: 80,
|
||||||
|
crop: {
|
||||||
|
height: 844,
|
||||||
|
left: 388,
|
||||||
|
top: 730,
|
||||||
|
width: 844,
|
||||||
|
},
|
||||||
|
raw: info,
|
||||||
|
processInvalidImages: false,
|
||||||
|
size: 250,
|
||||||
|
},
|
||||||
|
'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not use embedded preview if enabled and not raw image', async () => {
|
||||||
|
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailMiddle);
|
||||||
|
mocks.media.generateThumbnail.mockResolvedValue();
|
||||||
|
const data = Buffer.from('');
|
||||||
|
const info = { width: 2160, height: 3840 } as OutputInfo;
|
||||||
|
mocks.media.decodeImage.mockResolvedValue({ data, info });
|
||||||
|
|
||||||
|
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mocks.media.extract).not.toHaveBeenCalled();
|
||||||
|
expect(mocks.media.generateThumbnail).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not use embedded preview if enabled and raw image if not exists', async () => {
|
||||||
|
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
|
||||||
|
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.rawEmbeddedThumbnail);
|
||||||
|
mocks.media.generateThumbnail.mockResolvedValue();
|
||||||
|
const data = Buffer.from('');
|
||||||
|
const info = { width: 2160, height: 3840 } as OutputInfo;
|
||||||
|
mocks.media.decodeImage.mockResolvedValue({ data, info });
|
||||||
|
|
||||||
|
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath);
|
||||||
|
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath, {
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
orientation: undefined,
|
||||||
|
processInvalidImages: false,
|
||||||
|
});
|
||||||
|
expect(mocks.media.generateThumbnail).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not use embedded preview if enabled and raw image if low resolution', async () => {
|
||||||
|
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
|
||||||
|
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.rawEmbeddedThumbnail);
|
||||||
|
mocks.media.generateThumbnail.mockResolvedValue();
|
||||||
|
const extracted = Buffer.from('');
|
||||||
|
const data = Buffer.from('');
|
||||||
|
const info = { width: 1000, height: 1000 } as OutputInfo;
|
||||||
|
mocks.media.decodeImage.mockResolvedValue({ data, info });
|
||||||
|
mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.JPEG });
|
||||||
|
mocks.media.getImageDimensions.mockResolvedValue(info);
|
||||||
|
|
||||||
|
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath);
|
||||||
|
expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath, {
|
||||||
|
colorspace: Colorspace.P3,
|
||||||
|
orientation: undefined,
|
||||||
|
processInvalidImages: false,
|
||||||
|
});
|
||||||
|
expect(mocks.media.generateThumbnail).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('handleQueueVideoConversion', () => {
|
describe('handleQueueVideoConversion', () => {
|
||||||
it('should queue all video assets', async () => {
|
it('should queue all video assets', async () => {
|
||||||
mocks.assetJob.streamForVideoConversion.mockReturnValue(makeStream([assetStub.video]));
|
mocks.assetJob.streamForVideoConversion.mockReturnValue(makeStream([assetStub.video]));
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||||
import { StorageCore, ThumbnailPathEntity } from 'src/cores/storage.core';
|
import { StorageCore, ThumbnailPathEntity } from 'src/cores/storage.core';
|
||||||
import { Exif } from 'src/database';
|
import { Exif } from 'src/database';
|
||||||
import { OnEvent, OnJob } from 'src/decorators';
|
import { OnEvent, OnJob } from 'src/decorators';
|
||||||
@ -11,6 +11,7 @@ import {
|
|||||||
AssetVisibility,
|
AssetVisibility,
|
||||||
AudioCodec,
|
AudioCodec,
|
||||||
Colorspace,
|
Colorspace,
|
||||||
|
ImageFormat,
|
||||||
JobName,
|
JobName,
|
||||||
JobStatus,
|
JobStatus,
|
||||||
LogLevel,
|
LogLevel,
|
||||||
@ -24,10 +25,13 @@ import {
|
|||||||
VideoContainer,
|
VideoContainer,
|
||||||
} from 'src/enum';
|
} from 'src/enum';
|
||||||
import { UpsertFileOptions } from 'src/repositories/asset.repository';
|
import { UpsertFileOptions } from 'src/repositories/asset.repository';
|
||||||
|
import { BoundingBox } from 'src/repositories/machine-learning.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import {
|
import {
|
||||||
AudioStreamInfo,
|
AudioStreamInfo,
|
||||||
|
CropOptions,
|
||||||
DecodeToBufferOptions,
|
DecodeToBufferOptions,
|
||||||
|
ImageDimensions,
|
||||||
JobItem,
|
JobItem,
|
||||||
JobOf,
|
JobOf,
|
||||||
VideoFormat,
|
VideoFormat,
|
||||||
@ -37,6 +41,7 @@ import {
|
|||||||
import { getAssetFiles } from 'src/utils/asset.util';
|
import { getAssetFiles } from 'src/utils/asset.util';
|
||||||
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
|
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
|
||||||
import { mimeTypes } from 'src/utils/mime-types';
|
import { mimeTypes } from 'src/utils/mime-types';
|
||||||
|
import { clamp, isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class MediaService extends BaseService {
|
export class MediaService extends BaseService {
|
||||||
@ -308,6 +313,100 @@ export class MediaService extends BaseService {
|
|||||||
return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer };
|
return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@OnJob({ name: JobName.GENERATE_PERSON_THUMBNAIL, queue: QueueName.THUMBNAIL_GENERATION })
|
||||||
|
async handleGeneratePersonThumbnail({ id }: JobOf<JobName.GENERATE_PERSON_THUMBNAIL>): Promise<JobStatus> {
|
||||||
|
const { machineLearning, metadata, image } = await this.getConfig({ withCache: true });
|
||||||
|
if (!isFacialRecognitionEnabled(machineLearning) && !isFaceImportEnabled(metadata)) {
|
||||||
|
return JobStatus.SKIPPED;
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await this.personRepository.getDataForThumbnailGenerationJob(id);
|
||||||
|
if (!data) {
|
||||||
|
this.logger.error(`Could not generate person thumbnail for ${id}: missing data`);
|
||||||
|
return JobStatus.FAILED;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { ownerId, x1, y1, x2, y2, oldWidth, oldHeight, exifOrientation, previewPath, originalPath } = data;
|
||||||
|
let inputImage: string | Buffer;
|
||||||
|
if (mimeTypes.isVideo(originalPath)) {
|
||||||
|
if (!previewPath) {
|
||||||
|
this.logger.error(`Could not generate person thumbnail for video ${id}: missing preview path`);
|
||||||
|
return JobStatus.FAILED;
|
||||||
|
}
|
||||||
|
inputImage = previewPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (image.extractEmbedded && mimeTypes.isRaw(originalPath)) {
|
||||||
|
const extracted = await this.extractImage(originalPath, image.preview.size);
|
||||||
|
inputImage = extracted ? extracted.buffer : originalPath;
|
||||||
|
} else {
|
||||||
|
inputImage = originalPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { data: decodedImage, info } = await this.mediaRepository.decodeImage(inputImage, {
|
||||||
|
colorspace: image.colorspace,
|
||||||
|
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
|
||||||
|
// if this is an extracted image, it may not have orientation metadata
|
||||||
|
orientation: Buffer.isBuffer(inputImage) && exifOrientation ? Number(exifOrientation) : undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
const thumbnailPath = StorageCore.getPersonThumbnailPath({ id, ownerId });
|
||||||
|
this.storageCore.ensureFolders(thumbnailPath);
|
||||||
|
|
||||||
|
const thumbnailOptions = {
|
||||||
|
colorspace: image.colorspace,
|
||||||
|
format: ImageFormat.JPEG,
|
||||||
|
raw: info,
|
||||||
|
quality: image.thumbnail.quality,
|
||||||
|
crop: this.getCrop(
|
||||||
|
{ old: { width: oldWidth, height: oldHeight }, new: { width: info.width, height: info.height } },
|
||||||
|
{ x1, y1, x2, y2 },
|
||||||
|
),
|
||||||
|
processInvalidImages: false,
|
||||||
|
size: FACE_THUMBNAIL_SIZE,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.mediaRepository.generateThumbnail(decodedImage, thumbnailOptions, thumbnailPath);
|
||||||
|
await this.personRepository.update({ id, thumbnailPath });
|
||||||
|
|
||||||
|
return JobStatus.SUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getCrop(dims: { old: ImageDimensions; new: ImageDimensions }, { x1, y1, x2, y2 }: BoundingBox): CropOptions {
|
||||||
|
// face bounding boxes can spill outside the image dimensions
|
||||||
|
const clampedX1 = clamp(x1, 0, dims.old.width);
|
||||||
|
const clampedY1 = clamp(y1, 0, dims.old.height);
|
||||||
|
const clampedX2 = clamp(x2, 0, dims.old.width);
|
||||||
|
const clampedY2 = clamp(y2, 0, dims.old.height);
|
||||||
|
|
||||||
|
const widthScale = dims.new.width / dims.old.width;
|
||||||
|
const heightScale = dims.new.height / dims.old.height;
|
||||||
|
|
||||||
|
const halfWidth = (widthScale * (clampedX2 - clampedX1)) / 2;
|
||||||
|
const halfHeight = (heightScale * (clampedY2 - clampedY1)) / 2;
|
||||||
|
|
||||||
|
const middleX = Math.round(widthScale * clampedX1 + halfWidth);
|
||||||
|
const middleY = Math.round(heightScale * clampedY1 + halfHeight);
|
||||||
|
|
||||||
|
// zoom out 10%
|
||||||
|
const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1);
|
||||||
|
|
||||||
|
// get the longest distance from the center of the image without overflowing
|
||||||
|
const newHalfSize = Math.min(
|
||||||
|
middleX - Math.max(0, middleX - targetHalfSize),
|
||||||
|
middleY - Math.max(0, middleY - targetHalfSize),
|
||||||
|
Math.min(dims.new.width - 1, middleX + targetHalfSize) - middleX,
|
||||||
|
Math.min(dims.new.height - 1, middleY + targetHalfSize) - middleY,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
left: middleX - newHalfSize,
|
||||||
|
top: middleY - newHalfSize,
|
||||||
|
width: newHalfSize * 2,
|
||||||
|
height: newHalfSize * 2,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
private async generateVideoThumbnails(asset: ThumbnailPathEntity & { originalPath: string }) {
|
private async generateVideoThumbnails(asset: ThumbnailPathEntity & { originalPath: string }) {
|
||||||
const { image, ffmpeg } = await this.getConfig({ withCache: true });
|
const { image, ffmpeg } = await this.getConfig({ withCache: true });
|
||||||
const previewPath = StorageCore.getImagePath(asset, AssetPathType.PREVIEW, image.preview.format);
|
const previewPath = StorageCore.getImagePath(asset, AssetPathType.PREVIEW, image.preview.format);
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { BadRequestException, NotFoundException } from '@nestjs/common';
|
import { BadRequestException, NotFoundException } from '@nestjs/common';
|
||||||
import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto';
|
import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto';
|
||||||
import { mapFaces, mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
|
import { mapFaces, mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
|
||||||
import { CacheControl, Colorspace, ImageFormat, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum';
|
import { CacheControl, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum';
|
||||||
import { DetectedFaces } from 'src/repositories/machine-learning.repository';
|
import { DetectedFaces } from 'src/repositories/machine-learning.repository';
|
||||||
import { FaceSearchResult } from 'src/repositories/search.repository';
|
import { FaceSearchResult } from 'src/repositories/search.repository';
|
||||||
import { PersonService } from 'src/services/person.service';
|
import { PersonService } from 'src/services/person.service';
|
||||||
@ -9,7 +9,7 @@ import { ImmichFileResponse } from 'src/utils/file';
|
|||||||
import { assetStub } from 'test/fixtures/asset.stub';
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
import { authStub } from 'test/fixtures/auth.stub';
|
import { authStub } from 'test/fixtures/auth.stub';
|
||||||
import { faceStub } from 'test/fixtures/face.stub';
|
import { faceStub } from 'test/fixtures/face.stub';
|
||||||
import { personStub, personThumbnailStub } from 'test/fixtures/person.stub';
|
import { personStub } from 'test/fixtures/person.stub';
|
||||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||||
import { factory } from 'test/small.factory';
|
import { factory } from 'test/small.factory';
|
||||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||||
@ -1024,114 +1024,6 @@ describe(PersonService.name, () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('handleGeneratePersonThumbnail', () => {
|
|
||||||
it('should skip if machine learning is disabled', async () => {
|
|
||||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
|
|
||||||
|
|
||||||
await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(JobStatus.SKIPPED);
|
|
||||||
expect(mocks.asset.getByIds).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should skip a person not found', async () => {
|
|
||||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
|
||||||
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should skip a person without a face asset id', async () => {
|
|
||||||
mocks.person.getById.mockResolvedValue(personStub.noThumbnail);
|
|
||||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
|
||||||
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should skip a person with face not found', async () => {
|
|
||||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
|
||||||
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should generate a thumbnail', async () => {
|
|
||||||
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailMiddle);
|
|
||||||
mocks.media.generateThumbnail.mockResolvedValue();
|
|
||||||
|
|
||||||
await sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id });
|
|
||||||
|
|
||||||
expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(personStub.primaryPerson.id);
|
|
||||||
expect(mocks.storage.mkdirSync).toHaveBeenCalledWith('upload/thumbs/admin_id/pe/rs');
|
|
||||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
|
||||||
assetStub.primaryImage.originalPath,
|
|
||||||
{
|
|
||||||
colorspace: Colorspace.P3,
|
|
||||||
format: ImageFormat.JPEG,
|
|
||||||
size: 250,
|
|
||||||
quality: 80,
|
|
||||||
crop: {
|
|
||||||
left: 238,
|
|
||||||
top: 163,
|
|
||||||
width: 274,
|
|
||||||
height: 274,
|
|
||||||
},
|
|
||||||
processInvalidImages: false,
|
|
||||||
},
|
|
||||||
'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
|
||||||
);
|
|
||||||
expect(mocks.person.update).toHaveBeenCalledWith({
|
|
||||||
id: 'person-1',
|
|
||||||
thumbnailPath: 'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should generate a thumbnail without going negative', async () => {
|
|
||||||
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailStart);
|
|
||||||
mocks.media.generateThumbnail.mockResolvedValue();
|
|
||||||
|
|
||||||
await sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id });
|
|
||||||
|
|
||||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
|
||||||
assetStub.primaryImage.originalPath,
|
|
||||||
{
|
|
||||||
colorspace: Colorspace.P3,
|
|
||||||
format: ImageFormat.JPEG,
|
|
||||||
size: 250,
|
|
||||||
quality: 80,
|
|
||||||
crop: {
|
|
||||||
left: 0,
|
|
||||||
top: 85,
|
|
||||||
width: 510,
|
|
||||||
height: 510,
|
|
||||||
},
|
|
||||||
processInvalidImages: false,
|
|
||||||
},
|
|
||||||
'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should generate a thumbnail without overflowing', async () => {
|
|
||||||
mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailEnd);
|
|
||||||
mocks.person.update.mockResolvedValue(personStub.primaryPerson);
|
|
||||||
mocks.media.generateThumbnail.mockResolvedValue();
|
|
||||||
|
|
||||||
await sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id });
|
|
||||||
|
|
||||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
|
||||||
assetStub.primaryImage.originalPath,
|
|
||||||
{
|
|
||||||
colorspace: Colorspace.P3,
|
|
||||||
format: ImageFormat.JPEG,
|
|
||||||
size: 250,
|
|
||||||
quality: 80,
|
|
||||||
crop: {
|
|
||||||
left: 591,
|
|
||||||
top: 591,
|
|
||||||
width: 408,
|
|
||||||
height: 408,
|
|
||||||
},
|
|
||||||
processInvalidImages: false,
|
|
||||||
},
|
|
||||||
'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('mergePerson', () => {
|
describe('mergePerson', () => {
|
||||||
it('should require person.write and person.merge permission', async () => {
|
it('should require person.write and person.merge permission', async () => {
|
||||||
mocks.person.getById.mockResolvedValueOnce(personStub.primaryPerson);
|
mocks.person.getById.mockResolvedValueOnce(personStub.primaryPerson);
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import { BadRequestException, Injectable, NotFoundException } from '@nestjs/common';
|
import { BadRequestException, Injectable, NotFoundException } from '@nestjs/common';
|
||||||
import { Insertable, Updateable } from 'kysely';
|
import { Insertable, Updateable } from 'kysely';
|
||||||
import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||||
import { StorageCore } from 'src/cores/storage.core';
|
|
||||||
import { Person } from 'src/database';
|
import { Person } from 'src/database';
|
||||||
import { AssetFaces, FaceSearch } from 'src/db';
|
import { AssetFaces, FaceSearch } from 'src/db';
|
||||||
import { Chunked, OnJob } from 'src/decorators';
|
import { Chunked, OnJob } from 'src/decorators';
|
||||||
@ -25,10 +24,8 @@ import {
|
|||||||
PersonUpdateDto,
|
PersonUpdateDto,
|
||||||
} from 'src/dtos/person.dto';
|
} from 'src/dtos/person.dto';
|
||||||
import {
|
import {
|
||||||
AssetType,
|
|
||||||
AssetVisibility,
|
AssetVisibility,
|
||||||
CacheControl,
|
CacheControl,
|
||||||
ImageFormat,
|
|
||||||
JobName,
|
JobName,
|
||||||
JobStatus,
|
JobStatus,
|
||||||
Permission,
|
Permission,
|
||||||
@ -40,10 +37,10 @@ import {
|
|||||||
import { BoundingBox } from 'src/repositories/machine-learning.repository';
|
import { BoundingBox } from 'src/repositories/machine-learning.repository';
|
||||||
import { UpdateFacesData } from 'src/repositories/person.repository';
|
import { UpdateFacesData } from 'src/repositories/person.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { CropOptions, ImageDimensions, InputDimensions, JobItem, JobOf } from 'src/types';
|
import { JobItem, JobOf } from 'src/types';
|
||||||
import { ImmichFileResponse } from 'src/utils/file';
|
import { ImmichFileResponse } from 'src/utils/file';
|
||||||
import { mimeTypes } from 'src/utils/mime-types';
|
import { mimeTypes } from 'src/utils/mime-types';
|
||||||
import { isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
|
import { isFacialRecognitionEnabled } from 'src/utils/misc';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class PersonService extends BaseService {
|
export class PersonService extends BaseService {
|
||||||
@ -537,41 +534,6 @@ export class PersonService extends BaseService {
|
|||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnJob({ name: JobName.GENERATE_PERSON_THUMBNAIL, queue: QueueName.THUMBNAIL_GENERATION })
|
|
||||||
async handleGeneratePersonThumbnail({ id }: JobOf<JobName.GENERATE_PERSON_THUMBNAIL>): Promise<JobStatus> {
|
|
||||||
const { machineLearning, metadata, image } = await this.getConfig({ withCache: true });
|
|
||||||
if (!isFacialRecognitionEnabled(machineLearning) && !isFaceImportEnabled(metadata)) {
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await this.personRepository.getDataForThumbnailGenerationJob(id);
|
|
||||||
if (!data) {
|
|
||||||
this.logger.error(`Could not generate person thumbnail for ${id}: missing data`);
|
|
||||||
return JobStatus.FAILED;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { ownerId, x1, y1, x2, y2, oldWidth, oldHeight } = data;
|
|
||||||
|
|
||||||
const { width, height, inputPath } = await this.getInputDimensions(data);
|
|
||||||
|
|
||||||
const thumbnailPath = StorageCore.getPersonThumbnailPath({ id, ownerId });
|
|
||||||
this.storageCore.ensureFolders(thumbnailPath);
|
|
||||||
|
|
||||||
const thumbnailOptions = {
|
|
||||||
colorspace: image.colorspace,
|
|
||||||
format: ImageFormat.JPEG,
|
|
||||||
size: FACE_THUMBNAIL_SIZE,
|
|
||||||
quality: image.thumbnail.quality,
|
|
||||||
crop: this.getCrop({ old: { width: oldWidth, height: oldHeight }, new: { width, height } }, { x1, y1, x2, y2 }),
|
|
||||||
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
|
|
||||||
};
|
|
||||||
|
|
||||||
await this.mediaRepository.generateThumbnail(inputPath, thumbnailOptions, thumbnailPath);
|
|
||||||
await this.personRepository.update({ id, thumbnailPath });
|
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
|
||||||
}
|
|
||||||
|
|
||||||
async mergePerson(auth: AuthDto, id: string, dto: MergePersonDto): Promise<BulkIdResponseDto[]> {
|
async mergePerson(auth: AuthDto, id: string, dto: MergePersonDto): Promise<BulkIdResponseDto[]> {
|
||||||
const mergeIds = dto.ids;
|
const mergeIds = dto.ids;
|
||||||
if (mergeIds.includes(id)) {
|
if (mergeIds.includes(id)) {
|
||||||
@ -642,57 +604,6 @@ export class PersonService extends BaseService {
|
|||||||
return person;
|
return person;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getInputDimensions(asset: {
|
|
||||||
type: AssetType;
|
|
||||||
exifImageWidth: number;
|
|
||||||
exifImageHeight: number;
|
|
||||||
previewPath: string;
|
|
||||||
originalPath: string;
|
|
||||||
oldWidth: number;
|
|
||||||
oldHeight: number;
|
|
||||||
}): Promise<InputDimensions> {
|
|
||||||
if (asset.type === AssetType.IMAGE) {
|
|
||||||
let { exifImageWidth: width, exifImageHeight: height } = asset;
|
|
||||||
if (asset.oldHeight > asset.oldWidth !== height > width) {
|
|
||||||
[width, height] = [height, width];
|
|
||||||
}
|
|
||||||
|
|
||||||
return { width, height, inputPath: asset.originalPath };
|
|
||||||
}
|
|
||||||
|
|
||||||
const { width, height } = await this.mediaRepository.getImageDimensions(asset.previewPath);
|
|
||||||
return { width, height, inputPath: asset.previewPath };
|
|
||||||
}
|
|
||||||
|
|
||||||
private getCrop(dims: { old: ImageDimensions; new: ImageDimensions }, { x1, y1, x2, y2 }: BoundingBox): CropOptions {
|
|
||||||
const widthScale = dims.new.width / dims.old.width;
|
|
||||||
const heightScale = dims.new.height / dims.old.height;
|
|
||||||
|
|
||||||
const halfWidth = (widthScale * (x2 - x1)) / 2;
|
|
||||||
const halfHeight = (heightScale * (y2 - y1)) / 2;
|
|
||||||
|
|
||||||
const middleX = Math.round(widthScale * x1 + halfWidth);
|
|
||||||
const middleY = Math.round(heightScale * y1 + halfHeight);
|
|
||||||
|
|
||||||
// zoom out 10%
|
|
||||||
const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1);
|
|
||||||
|
|
||||||
// get the longest distance from the center of the image without overflowing
|
|
||||||
const newHalfSize = Math.min(
|
|
||||||
middleX - Math.max(0, middleX - targetHalfSize),
|
|
||||||
middleY - Math.max(0, middleY - targetHalfSize),
|
|
||||||
Math.min(dims.new.width - 1, middleX + targetHalfSize) - middleX,
|
|
||||||
Math.min(dims.new.height - 1, middleY + targetHalfSize) - middleY,
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
|
||||||
left: middleX - newHalfSize,
|
|
||||||
top: middleY - newHalfSize,
|
|
||||||
width: newHalfSize * 2,
|
|
||||||
height: newHalfSize * 2,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO return a asset face response
|
// TODO return a asset face response
|
||||||
async createFace(auth: AuthDto, dto: AssetFaceCreateDto): Promise<void> {
|
async createFace(auth: AuthDto, dto: AssetFaceCreateDto): Promise<void> {
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
|
@ -301,3 +301,7 @@ export const globToSqlPattern = (glob: string) => {
|
|||||||
const tokens = picomatch.parse(glob).tokens;
|
const tokens = picomatch.parse(glob).tokens;
|
||||||
return tokens.map((token) => convertTokenToSqlPattern(token)).join('');
|
return tokens.map((token) => convertTokenToSqlPattern(token)).join('');
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export function clamp(value: number, min: number, max: number) {
|
||||||
|
return Math.max(min, Math.min(max, value));
|
||||||
|
}
|
||||||
|
48
server/test/fixtures/person.stub.ts
vendored
48
server/test/fixtures/person.stub.ts
vendored
@ -178,8 +178,7 @@ export const personThumbnailStub = {
|
|||||||
oldWidth: 2160,
|
oldWidth: 2160,
|
||||||
type: AssetType.IMAGE,
|
type: AssetType.IMAGE,
|
||||||
originalPath: '/original/path.jpg',
|
originalPath: '/original/path.jpg',
|
||||||
exifImageHeight: 3840,
|
exifOrientation: '1',
|
||||||
exifImageWidth: 2160,
|
|
||||||
previewPath: previewFile.path,
|
previewPath: previewFile.path,
|
||||||
}),
|
}),
|
||||||
newThumbnailMiddle: Object.freeze({
|
newThumbnailMiddle: Object.freeze({
|
||||||
@ -192,8 +191,7 @@ export const personThumbnailStub = {
|
|||||||
oldWidth: 400,
|
oldWidth: 400,
|
||||||
type: AssetType.IMAGE,
|
type: AssetType.IMAGE,
|
||||||
originalPath: '/original/path.jpg',
|
originalPath: '/original/path.jpg',
|
||||||
exifImageHeight: 1000,
|
exifOrientation: '1',
|
||||||
exifImageWidth: 1000,
|
|
||||||
previewPath: previewFile.path,
|
previewPath: previewFile.path,
|
||||||
}),
|
}),
|
||||||
newThumbnailEnd: Object.freeze({
|
newThumbnailEnd: Object.freeze({
|
||||||
@ -206,8 +204,46 @@ export const personThumbnailStub = {
|
|||||||
oldWidth: 500,
|
oldWidth: 500,
|
||||||
type: AssetType.IMAGE,
|
type: AssetType.IMAGE,
|
||||||
originalPath: '/original/path.jpg',
|
originalPath: '/original/path.jpg',
|
||||||
exifImageHeight: 1000,
|
exifOrientation: '1',
|
||||||
exifImageWidth: 1000,
|
previewPath: previewFile.path,
|
||||||
|
}),
|
||||||
|
rawEmbeddedThumbnail: Object.freeze({
|
||||||
|
ownerId: userStub.admin.id,
|
||||||
|
x1: 100,
|
||||||
|
y1: 100,
|
||||||
|
x2: 200,
|
||||||
|
y2: 200,
|
||||||
|
oldHeight: 500,
|
||||||
|
oldWidth: 400,
|
||||||
|
type: AssetType.IMAGE,
|
||||||
|
originalPath: '/original/path.dng',
|
||||||
|
exifOrientation: '1',
|
||||||
|
previewPath: previewFile.path,
|
||||||
|
}),
|
||||||
|
negativeCoordinate: Object.freeze({
|
||||||
|
ownerId: userStub.admin.id,
|
||||||
|
x1: -176,
|
||||||
|
y1: -230,
|
||||||
|
x2: 193,
|
||||||
|
y2: 251,
|
||||||
|
oldHeight: 1440,
|
||||||
|
oldWidth: 2162,
|
||||||
|
type: AssetType.IMAGE,
|
||||||
|
originalPath: '/original/path.jpg',
|
||||||
|
exifOrientation: '1',
|
||||||
|
previewPath: previewFile.path,
|
||||||
|
}),
|
||||||
|
overflowingCoordinate: Object.freeze({
|
||||||
|
ownerId: userStub.admin.id,
|
||||||
|
x1: 2097,
|
||||||
|
y1: 0,
|
||||||
|
x2: 2171,
|
||||||
|
y2: 152,
|
||||||
|
oldHeight: 1440,
|
||||||
|
oldWidth: 2162,
|
||||||
|
type: AssetType.IMAGE,
|
||||||
|
originalPath: '/original/path.jpg',
|
||||||
|
exifOrientation: '1',
|
||||||
previewPath: previewFile.path,
|
previewPath: previewFile.path,
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
Loading…
x
Reference in New Issue
Block a user