refactor: handle detect faces job query (#17660)

This commit is contained in:
Daniel Dietzler 2025-04-16 22:52:54 +02:00 committed by GitHub
parent 1bbfacfc09
commit 586a7a173b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 80 additions and 32 deletions

View File

@ -219,6 +219,46 @@ from
where
"assets"."id" = $2
-- AssetJobRepository.getForDetectFacesJob
select
"assets"."id",
"assets"."isVisible",
to_json("exif") as "exifInfo",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_faces".*
from
"asset_faces"
where
"asset_faces"."assetId" = "assets"."id"
) as agg
) as "faces",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_files"."id",
"asset_files"."path",
"asset_files"."type"
from
"asset_files"
where
"asset_files"."assetId" = "assets"."id"
and "asset_files"."type" = $1
) as agg
) as "files"
from
"assets"
inner join "exif" on "assets"."id" = "exif"."assetId"
where
"assets"."id" = $2
-- AssetJobRepository.getForStorageTemplateJob
select
"assets"."id",

View File

@ -137,6 +137,18 @@ export class AssetJobRepository {
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForDetectFacesJob(id: string) {
return this.db
.selectFrom('assets')
.select(['assets.id', 'assets.isVisible'])
.$call(withExifInner)
.select((eb) => withFaces(eb, true))
.select((eb) => withFiles(eb, AssetFileType.PREVIEW))
.where('assets.id', '=', id)
.executeTakeFirst();
}
private storageTemplateAssetQuery() {
return this.db
.selectFrom('assets')

View File

@ -1,5 +1,4 @@
import { BadRequestException, NotFoundException } from '@nestjs/common';
import { AssetFace } from 'src/database';
import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto';
import { mapFaces, mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
import { CacheControl, Colorspace, ImageFormat, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum';
@ -719,24 +718,7 @@ describe(PersonService.name, () => {
});
it('should skip when no resize path', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.noResizePath]);
await sut.handleDetectFaces({ id: assetStub.noResizePath.id });
expect(mocks.machineLearning.detectFaces).not.toHaveBeenCalled();
});
it('should skip it the asset has already been processed', async () => {
mocks.asset.getByIds.mockResolvedValue([
{
...assetStub.noResizePath,
faces: [
{
id: 'asset-face-1',
assetId: assetStub.noResizePath.id,
personId: faceStub.face1.personId,
} as AssetFace,
],
},
]);
mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ ...assetStub.noResizePath, files: [] });
await sut.handleDetectFaces({ id: assetStub.noResizePath.id });
expect(mocks.machineLearning.detectFaces).not.toHaveBeenCalled();
});
@ -745,7 +727,7 @@ describe(PersonService.name, () => {
const start = Date.now();
mocks.machineLearning.detectFaces.mockResolvedValue({ imageHeight: 500, imageWidth: 400, faces: [] });
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] });
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(mocks.machineLearning.detectFaces).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
@ -766,7 +748,7 @@ describe(PersonService.name, () => {
it('should create a face with no person and queue recognition job', async () => {
mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock);
mocks.search.searchFaces.mockResolvedValue([{ ...faceStub.face1, distance: 0.7 }]);
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] });
mocks.person.refreshFaces.mockResolvedValue();
await sut.handleDetectFaces({ id: assetStub.image.id });
@ -782,7 +764,11 @@ describe(PersonService.name, () => {
it('should delete an existing face not among the new detected faces', async () => {
mocks.machineLearning.detectFaces.mockResolvedValue({ faces: [], imageHeight: 500, imageWidth: 400 });
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.primaryFace1] }]);
mocks.assetJob.getForDetectFacesJob.mockResolvedValue({
...assetStub.image,
faces: [faceStub.primaryFace1],
files: [assetStub.image.files[1]],
});
await sut.handleDetectFaces({ id: assetStub.image.id });
@ -794,7 +780,11 @@ describe(PersonService.name, () => {
it('should add new face and delete an existing face not among the new detected faces', async () => {
mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock);
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.primaryFace1] }]);
mocks.assetJob.getForDetectFacesJob.mockResolvedValue({
...assetStub.image,
faces: [faceStub.primaryFace1],
files: [assetStub.image.files[1]],
});
mocks.person.refreshFaces.mockResolvedValue();
await sut.handleDetectFaces({ id: assetStub.image.id });
@ -810,7 +800,11 @@ describe(PersonService.name, () => {
it('should add embedding to matching metadata face', async () => {
mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock);
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.fromExif1] }]);
mocks.assetJob.getForDetectFacesJob.mockResolvedValue({
...assetStub.image,
faces: [faceStub.fromExif1],
files: [assetStub.image.files[1]],
});
mocks.person.refreshFaces.mockResolvedValue();
await sut.handleDetectFaces({ id: assetStub.image.id });
@ -827,7 +821,11 @@ describe(PersonService.name, () => {
it('should not add embedding to non-matching metadata face', async () => {
mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock);
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.fromExif2] }]);
mocks.assetJob.getForDetectFacesJob.mockResolvedValue({
...assetStub.image,
faces: [faceStub.fromExif2],
files: [assetStub.image.files[1]],
});
await sut.handleDetectFaces({ id: assetStub.image.id });

View File

@ -24,7 +24,6 @@ import {
PersonUpdateDto,
} from 'src/dtos/person.dto';
import {
AssetFileType,
AssetType,
CacheControl,
ImageFormat,
@ -41,7 +40,6 @@ import { BoundingBox } from 'src/repositories/machine-learning.repository';
import { UpdateFacesData } from 'src/repositories/person.repository';
import { BaseService } from 'src/services/base.service';
import { CropOptions, ImageDimensions, InputDimensions, JobItem, JobOf } from 'src/types';
import { getAssetFile } from 'src/utils/asset.util';
import { ImmichFileResponse } from 'src/utils/file';
import { mimeTypes } from 'src/utils/mime-types';
import { isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
@ -297,10 +295,9 @@ export class PersonService extends BaseService {
return JobStatus.SKIPPED;
}
const relations = { exifInfo: true, faces: { person: false, withDeleted: true }, files: true };
const [asset] = await this.assetRepository.getByIds([id], relations);
const previewFile = getAssetFile(asset.files, AssetFileType.PREVIEW);
if (!asset || !previewFile) {
const asset = await this.assetJobRepository.getForDetectFacesJob(id);
const previewFile = asset?.files[0];
if (!asset || asset.files.length !== 1 || !previewFile) {
return JobStatus.FAILED;
}

View File

@ -52,7 +52,7 @@ export const assetStub = {
fileSizeInByte: 12_345,
...asset,
}),
noResizePath: Object.freeze<AssetEntity>({
noResizePath: Object.freeze({
id: 'asset-id',
status: AssetStatus.ACTIVE,
originalFileName: 'IMG_123.jpg',
@ -79,6 +79,7 @@ export const assetStub = {
livePhotoVideoId: null,
sharedLinks: [],
faces: [],
exifInfo: {} as Exif,
sidecarPath: null,
deletedAt: null,
isExternal: false,