From f50e5d006cae5cdd1831797e8b571db150070cb2 Mon Sep 17 00:00:00 2001 From: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> Date: Wed, 16 Apr 2025 20:08:49 +0200 Subject: [PATCH 01/13] refactor: dedicated queries for asset jobs (#17652) --- server/src/cores/storage.core.ts | 2 +- server/src/database.ts | 18 ++ server/src/entities/asset.entity.ts | 4 +- server/src/queries/asset.job.repository.sql | 68 ++++++ .../src/repositories/asset-job.repository.ts | 32 ++- server/src/services/media.service.spec.ts | 48 ++-- server/src/services/media.service.ts | 20 +- server/src/services/metadata.service.spec.ts | 211 +++++++++--------- server/src/services/metadata.service.ts | 38 +++- server/test/fixtures/asset.stub.ts | 27 ++- server/test/fixtures/shared-link.stub.ts | 2 + .../medium/specs/metadata.service.spec.ts | 3 +- 12 files changed, 310 insertions(+), 163 deletions(-) diff --git a/server/src/cores/storage.core.ts b/server/src/cores/storage.core.ts index dd3edd3ab1..9bd43a662d 100644 --- a/server/src/cores/storage.core.ts +++ b/server/src/cores/storage.core.ts @@ -28,7 +28,7 @@ export interface MoveRequest { export type GeneratedImageType = AssetPathType.PREVIEW | AssetPathType.THUMBNAIL | AssetPathType.FULLSIZE; export type GeneratedAssetType = GeneratedImageType | AssetPathType.ENCODED_VIDEO; -type ThumbnailPathEntity = { id: string; ownerId: string }; +export type ThumbnailPathEntity = { id: string; ownerId: string }; let instance: StorageCore | null; diff --git a/server/src/database.ts b/server/src/database.ts index 7766a13f02..b504d39579 100644 --- a/server/src/database.ts +++ b/server/src/database.ts @@ -263,6 +263,24 @@ export type AssetJobStatus = Selectable & { const userColumns = ['id', 'name', 'email', 'profileImagePath', 'profileChangedAt'] as const; export const columns = { + asset: [ + 'assets.id', + 'assets.checksum', + 'assets.deviceAssetId', + 'assets.deviceId', + 'assets.fileCreatedAt', + 'assets.fileModifiedAt', + 'assets.isExternal', + 'assets.isVisible', + 'assets.libraryId', + 'assets.livePhotoVideoId', + 'assets.localDateTime', + 'assets.originalFileName', + 'assets.originalPath', + 'assets.ownerId', + 'assets.sidecarPath', + 'assets.type', + ], assetFiles: ['asset_files.id', 'asset_files.path', 'asset_files.type'], authUser: [ 'users.id', diff --git a/server/src/entities/asset.entity.ts b/server/src/entities/asset.entity.ts index 19e8de6702..b17328f790 100644 --- a/server/src/entities/asset.entity.ts +++ b/server/src/entities/asset.entity.ts @@ -56,13 +56,13 @@ export class AssetEntity { export function withExif(qb: SelectQueryBuilder) { return qb .leftJoin('exif', 'assets.id', 'exif.assetId') - .select((eb) => eb.fn.toJson(eb.table('exif')).$castTo().as('exifInfo')); + .select((eb) => eb.fn.toJson(eb.table('exif')).$castTo().as('exifInfo')); } export function withExifInner(qb: SelectQueryBuilder) { return qb .innerJoin('exif', 'assets.id', 'exif.assetId') - .select((eb) => eb.fn.toJson(eb.table('exif')).as('exifInfo')); + .select((eb) => eb.fn.toJson(eb.table('exif')).$castTo().as('exifInfo')); } export function withSmartSearch(qb: SelectQueryBuilder) { diff --git a/server/src/queries/asset.job.repository.sql b/server/src/queries/asset.job.repository.sql index 15172c55eb..5e28f2c4c9 100644 --- a/server/src/queries/asset.job.repository.sql +++ b/server/src/queries/asset.job.repository.sql @@ -115,6 +115,74 @@ from where "assets"."id" = $1 +-- AssetJobRepository.getForGenerateThumbnailJob +select + "assets"."id", + "assets"."isVisible", + "assets"."originalFileName", + "assets"."originalPath", + "assets"."ownerId", + "assets"."thumbhash", + "assets"."type", + ( + select + coalesce(json_agg(agg), '[]') + from + ( + select + "asset_files"."id", + "asset_files"."path", + "asset_files"."type" + from + "asset_files" + where + "asset_files"."assetId" = "assets"."id" + ) as agg + ) as "files", + to_json("exif") as "exifInfo" +from + "assets" + inner join "exif" on "assets"."id" = "exif"."assetId" +where + "assets"."id" = $1 + +-- AssetJobRepository.getForMetadataExtraction +select + "assets"."id", + "assets"."checksum", + "assets"."deviceAssetId", + "assets"."deviceId", + "assets"."fileCreatedAt", + "assets"."fileModifiedAt", + "assets"."isExternal", + "assets"."isVisible", + "assets"."libraryId", + "assets"."livePhotoVideoId", + "assets"."localDateTime", + "assets"."originalFileName", + "assets"."originalPath", + "assets"."ownerId", + "assets"."sidecarPath", + "assets"."type", + ( + select + coalesce(json_agg(agg), '[]') + from + ( + select + "asset_faces".* + from + "asset_faces" + where + "asset_faces"."assetId" = "assets"."id" + and "asset_faces"."deletedAt" is null + ) as agg + ) as "faces" +from + "assets" +where + "assets"."id" = $1 + -- AssetJobRepository.getForStorageTemplateJob select "assets"."id", diff --git a/server/src/repositories/asset-job.repository.ts b/server/src/repositories/asset-job.repository.ts index 3824c13519..770ca93891 100644 --- a/server/src/repositories/asset-job.repository.ts +++ b/server/src/repositories/asset-job.repository.ts @@ -2,9 +2,10 @@ import { Injectable } from '@nestjs/common'; import { Kysely } from 'kysely'; import { jsonArrayFrom } from 'kysely/helpers/postgres'; import { InjectKysely } from 'nestjs-kysely'; +import { columns } from 'src/database'; import { DB } from 'src/db'; import { DummyValue, GenerateSql } from 'src/decorators'; -import { withFiles } from 'src/entities/asset.entity'; +import { withExifInner, withFaces, withFiles } from 'src/entities/asset.entity'; import { AssetFileType } from 'src/enum'; import { StorageAsset } from 'src/types'; import { asUuid } from 'src/utils/database'; @@ -87,6 +88,35 @@ export class AssetJobRepository { .executeTakeFirst(); } + @GenerateSql({ params: [DummyValue.UUID] }) + getForGenerateThumbnailJob(id: string) { + return this.db + .selectFrom('assets') + .select([ + 'assets.id', + 'assets.isVisible', + 'assets.originalFileName', + 'assets.originalPath', + 'assets.ownerId', + 'assets.thumbhash', + 'assets.type', + ]) + .select(withFiles) + .$call(withExifInner) + .where('assets.id', '=', id) + .executeTakeFirst(); + } + + @GenerateSql({ params: [DummyValue.UUID] }) + getForMetadataExtraction(id: string) { + return this.db + .selectFrom('assets') + .select(columns.asset) + .select(withFaces) + .where('assets.id', '=', id) + .executeTakeFirst(); + } + private storageTemplateAssetQuery() { return this.db .selectFrom('assets') diff --git a/server/src/services/media.service.spec.ts b/server/src/services/media.service.spec.ts index 6fe14b954b..8990ad86a6 100644 --- a/server/src/services/media.service.spec.ts +++ b/server/src/services/media.service.spec.ts @@ -2,7 +2,6 @@ import { OutputInfo } from 'sharp'; import { SystemConfig } from 'src/config'; import { Exif } from 'src/database'; import { AssetMediaSize } from 'src/dtos/asset-media.dto'; -import { AssetEntity } from 'src/entities/asset.entity'; import { AssetFileType, AssetPathType, @@ -249,6 +248,7 @@ describe(MediaService.name, () => { }); it('should skip thumbnail generation if asset not found', async () => { + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(void 0); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); expect(mocks.media.generateThumbnail).not.toHaveBeenCalled(); @@ -256,7 +256,7 @@ describe(MediaService.name, () => { }); it('should skip thumbnail generation if asset type is unknown', async () => { - mocks.asset.getById.mockResolvedValue({ ...assetStub.image, type: 'foo' } as never as AssetEntity); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({ ...assetStub.image, type: 'foo' as AssetType }); await expect(sut.handleGenerateThumbnails({ id: assetStub.image.id })).resolves.toBe(JobStatus.SKIPPED); expect(mocks.media.probe).not.toHaveBeenCalled(); @@ -266,14 +266,14 @@ describe(MediaService.name, () => { it('should skip video thumbnail generation if no video stream', async () => { mocks.media.probe.mockResolvedValue(probeStub.noVideoStreams); - mocks.asset.getById.mockResolvedValue(assetStub.video); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video); await expect(sut.handleGenerateThumbnails({ id: assetStub.video.id })).rejects.toThrowError(); expect(mocks.media.generateThumbnail).not.toHaveBeenCalled(); expect(mocks.asset.update).not.toHaveBeenCalledWith(); }); it('should skip invisible assets', async () => { - mocks.asset.getById.mockResolvedValue(assetStub.livePhotoMotionAsset); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.livePhotoMotionAsset); expect(await sut.handleGenerateThumbnails({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.SKIPPED); @@ -283,7 +283,7 @@ describe(MediaService.name, () => { it('should delete previous preview if different path', async () => { mocks.systemMetadata.get.mockResolvedValue({ image: { thumbnail: { format: ImageFormat.WEBP } } }); - mocks.asset.getById.mockResolvedValue(assetStub.image); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -291,7 +291,7 @@ describe(MediaService.name, () => { }); it('should generate P3 thumbnails for a wide gamut image', async () => { - mocks.asset.getById.mockResolvedValue({ + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({ ...assetStub.image, exifInfo: { profileDescription: 'Adobe RGB', bitsPerSample: 14 } as Exif, }); @@ -359,7 +359,7 @@ describe(MediaService.name, () => { it('should generate a thumbnail for a video', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p); - mocks.asset.getById.mockResolvedValue(assetStub.video); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video); await sut.handleGenerateThumbnails({ id: assetStub.video.id }); expect(mocks.storage.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/as/se'); @@ -394,7 +394,7 @@ describe(MediaService.name, () => { it('should tonemap thumbnail for hdr video', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR); - mocks.asset.getById.mockResolvedValue(assetStub.video); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video); await sut.handleGenerateThumbnails({ id: assetStub.video.id }); expect(mocks.storage.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/as/se'); @@ -432,7 +432,7 @@ describe(MediaService.name, () => { mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { twoPass: true, maxBitrate: '5000k' }, }); - mocks.asset.getById.mockResolvedValue(assetStub.video); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video); await sut.handleGenerateThumbnails({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -453,7 +453,7 @@ describe(MediaService.name, () => { }); it('should not skip intra frames for MTS file', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamMTS); - mocks.asset.getById.mockResolvedValue(assetStub.video); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video); await sut.handleGenerateThumbnails({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -471,7 +471,7 @@ describe(MediaService.name, () => { it('should use scaling divisible by 2 even when using quick sync', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p); mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } }); - mocks.asset.getById.mockResolvedValue(assetStub.video); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video); await sut.handleGenerateThumbnails({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -487,7 +487,7 @@ describe(MediaService.name, () => { it.each(Object.values(ImageFormat))('should generate an image preview in %s format', async (format) => { mocks.systemMetadata.get.mockResolvedValue({ image: { preview: { format } } }); - mocks.asset.getById.mockResolvedValue(assetStub.image); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image); const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8'); mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer); const previewPath = `upload/thumbs/user-id/as/se/asset-id-preview.${format}`; @@ -532,7 +532,7 @@ describe(MediaService.name, () => { it.each(Object.values(ImageFormat))('should generate an image thumbnail in %s format', async (format) => { mocks.systemMetadata.get.mockResolvedValue({ image: { thumbnail: { format } } }); - mocks.asset.getById.mockResolvedValue(assetStub.image); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image); const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8'); mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer); const previewPath = `upload/thumbs/user-id/as/se/asset-id-preview.jpeg`; @@ -577,7 +577,7 @@ describe(MediaService.name, () => { it('should delete previous thumbnail if different path', async () => { mocks.systemMetadata.get.mockResolvedValue({ image: { thumbnail: { format: ImageFormat.WEBP } } }); - mocks.asset.getById.mockResolvedValue(assetStub.image); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -588,7 +588,7 @@ describe(MediaService.name, () => { mocks.media.extract.mockResolvedValue(true); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } }); - mocks.asset.getById.mockResolvedValue(assetStub.imageDng); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -605,7 +605,7 @@ describe(MediaService.name, () => { mocks.media.extract.mockResolvedValue(true); mocks.media.getImageDimensions.mockResolvedValue({ width: 1000, height: 1000 }); mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } }); - mocks.asset.getById.mockResolvedValue(assetStub.imageDng); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -621,7 +621,7 @@ describe(MediaService.name, () => { it('should resize original image if embedded image not found', async () => { mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } }); - mocks.asset.getById.mockResolvedValue(assetStub.imageDng); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -636,7 +636,7 @@ describe(MediaService.name, () => { it('should resize original image if embedded image extraction is not enabled', async () => { mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: false } }); - mocks.asset.getById.mockResolvedValue(assetStub.imageDng); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -653,7 +653,7 @@ describe(MediaService.name, () => { it('should process invalid images if enabled', async () => { vi.stubEnv('IMMICH_PROCESS_INVALID_IMAGES', 'true'); - mocks.asset.getById.mockResolvedValue(assetStub.imageDng); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -689,7 +689,7 @@ describe(MediaService.name, () => { mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true }, extractEmbedded: true } }); mocks.media.extract.mockResolvedValue(true); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); - mocks.asset.getById.mockResolvedValue(assetStub.imageDng); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -719,7 +719,7 @@ describe(MediaService.name, () => { mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true }, extractEmbedded: false } }); mocks.media.extract.mockResolvedValue(true); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); - mocks.asset.getById.mockResolvedValue(assetStub.imageDng); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -760,7 +760,7 @@ describe(MediaService.name, () => { mocks.media.extract.mockResolvedValue(true); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); // HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari. - mocks.asset.getById.mockResolvedValue(assetStub.imageHif); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -788,7 +788,7 @@ describe(MediaService.name, () => { mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } }); mocks.media.extract.mockResolvedValue(true); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); - mocks.asset.getById.mockResolvedValue(assetStub.image); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -814,7 +814,7 @@ describe(MediaService.name, () => { mocks.media.extract.mockResolvedValue(true); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); // HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari. - mocks.asset.getById.mockResolvedValue(assetStub.imageHif); + mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); diff --git a/server/src/services/media.service.ts b/server/src/services/media.service.ts index f855168898..59d708772b 100644 --- a/server/src/services/media.service.ts +++ b/server/src/services/media.service.ts @@ -1,9 +1,9 @@ import { Injectable } from '@nestjs/common'; import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants'; -import { StorageCore } from 'src/cores/storage.core'; +import { StorageCore, ThumbnailPathEntity } from 'src/cores/storage.core'; +import { Exif } from 'src/database'; import { OnEvent, OnJob } from 'src/decorators'; import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto'; -import { AssetEntity } from 'src/entities/asset.entity'; import { AssetFileType, AssetPathType, @@ -136,7 +136,7 @@ export class MediaService extends BaseService { @OnJob({ name: JobName.GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION }) async handleGenerateThumbnails({ id }: JobOf): Promise { - const asset = await this.assetRepository.getById(id, { exifInfo: true, files: true }); + const asset = await this.assetJobRepository.getForGenerateThumbnailJob(id); if (!asset) { this.logger.warn(`Thumbnail generation failed for asset ${id}: not found`); return JobStatus.FAILED; @@ -213,7 +213,13 @@ export class MediaService extends BaseService { return JobStatus.SUCCESS; } - private async generateImageThumbnails(asset: AssetEntity) { + private async generateImageThumbnails(asset: { + id: string; + ownerId: string; + originalFileName: string; + originalPath: string; + exifInfo: Exif; + }) { const { image } = await this.getConfig({ withCache: true }); const previewPath = StorageCore.getImagePath(asset, AssetPathType.PREVIEW, image.preview.format); const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.THUMBNAIL, image.thumbnail.format); @@ -286,7 +292,7 @@ export class MediaService extends BaseService { return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer }; } - private async generateVideoThumbnails(asset: AssetEntity) { + private async generateVideoThumbnails(asset: ThumbnailPathEntity & { originalPath: string }) { const { image, ffmpeg } = await this.getConfig({ withCache: true }); const previewPath = StorageCore.getImagePath(asset, AssetPathType.PREVIEW, image.preview.format); const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.THUMBNAIL, image.thumbnail.format); @@ -515,8 +521,8 @@ export class MediaService extends BaseService { return name !== VideoContainer.MP4 && !ffmpegConfig.acceptedContainers.includes(name); } - isSRGB(asset: AssetEntity): boolean { - const { colorspace, profileDescription, bitsPerSample } = asset.exifInfo ?? {}; + isSRGB(asset: { exifInfo: Exif }): boolean { + const { colorspace, profileDescription, bitsPerSample } = asset.exifInfo; if (colorspace || profileDescription) { return [colorspace, profileDescription].some((s) => s?.toLowerCase().includes('srgb')); } else if (bitsPerSample) { diff --git a/server/src/services/metadata.service.spec.ts b/server/src/services/metadata.service.spec.ts index 358c0eea5e..ca1277a8c8 100644 --- a/server/src/services/metadata.service.spec.ts +++ b/server/src/services/metadata.service.spec.ts @@ -3,7 +3,6 @@ import { randomBytes } from 'node:crypto'; import { Stats } from 'node:fs'; import { constants } from 'node:fs/promises'; import { defaults } from 'src/config'; -import { Exif } from 'src/database'; import { AssetEntity } from 'src/entities/asset.entity'; import { AssetType, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum'; import { WithoutProperty } from 'src/repositories/asset.repository'; @@ -144,9 +143,10 @@ describe(MetadataService.name, () => { }); it('should handle an asset that could not be found', async () => { + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(void 0); await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.upsertExif).not.toHaveBeenCalled(); expect(mocks.asset.update).not.toHaveBeenCalled(); }); @@ -154,11 +154,11 @@ describe(MetadataService.name, () => { it('should handle a date in a sidecar file', async () => { const originalDate = new Date('2023-11-21T16:13:17.517Z'); const sidecarDate = new Date('2022-01-01T00:00:00.000Z'); - mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar); mockReadTags({ CreationDate: originalDate.toISOString() }, { CreationDate: sidecarDate.toISOString() }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.sidecar.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.sidecar.id); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: sidecarDate })); expect(mocks.asset.update).toHaveBeenCalledWith( expect.objectContaining({ @@ -173,7 +173,7 @@ describe(MetadataService.name, () => { it('should take the file modification date when missing exif and earlier than creation date', async () => { const fileCreatedAt = new Date('2022-01-01T00:00:00.000Z'); const fileModifiedAt = new Date('2021-01-01T00:00:00.000Z'); - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mocks.storage.stat.mockResolvedValue({ size: 123_456, mtime: fileModifiedAt, @@ -183,7 +183,7 @@ describe(MetadataService.name, () => { mockReadTags(); await sut.handleMetadataExtraction({ id: assetStub.image.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.upsertExif).toHaveBeenCalledWith( expect.objectContaining({ dateTimeOriginal: fileModifiedAt }), ); @@ -199,7 +199,7 @@ describe(MetadataService.name, () => { it('should take the file creation date when missing exif and earlier than modification date', async () => { const fileCreatedAt = new Date('2021-01-01T00:00:00.000Z'); const fileModifiedAt = new Date('2022-01-01T00:00:00.000Z'); - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mocks.storage.stat.mockResolvedValue({ size: 123_456, mtime: fileModifiedAt, @@ -209,7 +209,7 @@ describe(MetadataService.name, () => { mockReadTags(); await sut.handleMetadataExtraction({ id: assetStub.image.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: fileCreatedAt })); expect(mocks.asset.update).toHaveBeenCalledWith({ id: assetStub.image.id, @@ -222,7 +222,7 @@ describe(MetadataService.name, () => { it('should account for the server being in a non-UTC timezone', async () => { process.env.TZ = 'America/Los_Angeles'; - mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar); mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -240,7 +240,7 @@ describe(MetadataService.name, () => { }); it('should handle lists of numbers', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mocks.storage.stat.mockResolvedValue({ size: 123_456, mtime: assetStub.image.fileModifiedAt, @@ -252,7 +252,7 @@ describe(MetadataService.name, () => { }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ iso: 160 })); expect(mocks.asset.update).toHaveBeenCalledWith({ id: assetStub.image.id, @@ -265,7 +265,7 @@ describe(MetadataService.name, () => { it('should not delete latituide and longitude without reverse geocode', async () => { // regression test for issue 17511 - mocks.asset.getByIds.mockResolvedValue([assetStub.withLocation]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation); mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: false } }); mocks.storage.stat.mockResolvedValue({ size: 123_456, @@ -279,7 +279,7 @@ describe(MetadataService.name, () => { }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.upsertExif).toHaveBeenCalledWith( expect.objectContaining({ city: null, state: null, country: null }), ); @@ -293,7 +293,7 @@ describe(MetadataService.name, () => { }); it('should apply reverse geocoding', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.withLocation]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation); mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: true } }); mocks.map.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' }); mocks.storage.stat.mockResolvedValue({ @@ -308,7 +308,7 @@ describe(MetadataService.name, () => { }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.upsertExif).toHaveBeenCalledWith( expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }), ); @@ -322,19 +322,19 @@ describe(MetadataService.name, () => { }); it('should discard latitude and longitude on null island', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.withLocation]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation); mockReadTags({ GPSLatitude: 0, GPSLongitude: 0, }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ latitude: null, longitude: null })); }); it('should extract tags from TagsList', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ TagsList: ['Parent'] }); mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert); @@ -344,7 +344,7 @@ describe(MetadataService.name, () => { }); it('should extract hierarchy from TagsList', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ TagsList: ['Parent/Child'] }); mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert); mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert); @@ -364,7 +364,7 @@ describe(MetadataService.name, () => { }); it('should extract tags from Keywords as a string', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ Keywords: 'Parent' }); mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert); @@ -374,7 +374,7 @@ describe(MetadataService.name, () => { }); it('should extract tags from Keywords as a list', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ Keywords: ['Parent'] }); mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert); @@ -384,7 +384,7 @@ describe(MetadataService.name, () => { }); it('should extract tags from Keywords as a list with a number', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ Keywords: ['Parent', 2024] }); mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert); @@ -395,7 +395,7 @@ describe(MetadataService.name, () => { }); it('should extract hierarchal tags from Keywords', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ Keywords: 'Parent/Child' }); mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert); @@ -414,7 +414,7 @@ describe(MetadataService.name, () => { }); it('should ignore Keywords when TagsList is present', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] }); mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert); @@ -433,7 +433,7 @@ describe(MetadataService.name, () => { }); it('should extract hierarchy from HierarchicalSubject', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] }); mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert); mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert); @@ -454,7 +454,7 @@ describe(MetadataService.name, () => { }); it('should extract tags from HierarchicalSubject as a list with a number', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ HierarchicalSubject: ['Parent', 2024] }); mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert); @@ -465,7 +465,7 @@ describe(MetadataService.name, () => { }); it('should extract ignore / characters in a HierarchicalSubject tag', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ HierarchicalSubject: ['Mom/Dad'] }); mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert); @@ -479,7 +479,7 @@ describe(MetadataService.name, () => { }); it('should ignore HierarchicalSubject when TagsList is present', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ HierarchicalSubject: ['Parent2|Child2'], TagsList: ['Parent/Child'] }); mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert); @@ -498,7 +498,7 @@ describe(MetadataService.name, () => { }); it('should remove existing tags', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({}); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -507,13 +507,11 @@ describe(MetadataService.name, () => { }); it('should not apply motion photos if asset is video', async () => { - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.livePhotoMotionAsset, isVisible: true }]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ ...assetStub.livePhotoMotionAsset, isVisible: true }); mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); await sut.handleMetadataExtraction({ id: assetStub.livePhotoMotionAsset.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id], { - faces: { person: false }, - }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.id); expect(mocks.storage.createOrOverwriteFile).not.toHaveBeenCalled(); expect(mocks.job.queue).not.toHaveBeenCalled(); expect(mocks.job.queueAll).not.toHaveBeenCalled(); @@ -523,7 +521,7 @@ describe(MetadataService.name, () => { }); it('should handle an invalid Directory Item', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ MotionPhoto: 1, ContainerDirectory: [{ Foo: 100 }], @@ -533,19 +531,24 @@ describe(MetadataService.name, () => { }); it('should extract the correct video orientation', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.video]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.video); mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p); mockReadTags({}); await sut.handleMetadataExtraction({ id: assetStub.video.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.video.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.video.id); expect(mocks.asset.upsertExif).toHaveBeenCalledWith( expect.objectContaining({ orientation: ExifOrientation.Rotate270CW.toString() }), ); }); it('should extract the MotionPhotoVideo tag from Samsung HEIC motion photos', async () => { + mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ + ...assetStub.livePhotoWithOriginalFileName, + livePhotoVideoId: null, + libraryId: null, + }); mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]); mocks.storage.stat.mockResolvedValue({ size: 123_456, @@ -573,9 +576,7 @@ describe(MetadataService.name, () => { assetStub.livePhotoWithOriginalFileName.originalPath, 'MotionPhotoVideo', ); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.livePhotoWithOriginalFileName.id], { - faces: { person: false }, - }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoWithOriginalFileName.id); expect(mocks.asset.create).toHaveBeenCalledWith({ checksum: expect.any(Buffer), deviceAssetId: 'NONE', @@ -607,7 +608,11 @@ describe(MetadataService.name, () => { mtimeMs: assetStub.livePhotoWithOriginalFileName.fileModifiedAt.valueOf(), birthtimeMs: assetStub.livePhotoWithOriginalFileName.fileCreatedAt.valueOf(), } as Stats); - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ + ...assetStub.livePhotoWithOriginalFileName, + livePhotoVideoId: null, + libraryId: null, + }); mockReadTags({ Directory: 'foo/bar/', EmbeddedVideoFile: new BinaryField(0, ''), @@ -625,9 +630,7 @@ describe(MetadataService.name, () => { assetStub.livePhotoWithOriginalFileName.originalPath, 'EmbeddedVideoFile', ); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.livePhotoWithOriginalFileName.id], { - faces: { person: false }, - }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoWithOriginalFileName.id); expect(mocks.asset.create).toHaveBeenCalledWith({ checksum: expect.any(Buffer), deviceAssetId: 'NONE', @@ -653,7 +656,11 @@ describe(MetadataService.name, () => { }); it('should extract the motion photo video from the XMP directory entry ', async () => { - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ + ...assetStub.livePhotoWithOriginalFileName, + livePhotoVideoId: null, + libraryId: null, + }); mocks.storage.stat.mockResolvedValue({ size: 123_456, mtime: assetStub.livePhotoWithOriginalFileName.fileModifiedAt, @@ -673,9 +680,7 @@ describe(MetadataService.name, () => { mocks.storage.readFile.mockResolvedValue(video); await sut.handleMetadataExtraction({ id: assetStub.livePhotoWithOriginalFileName.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.livePhotoWithOriginalFileName.id], { - faces: { person: false }, - }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoWithOriginalFileName.id); expect(mocks.storage.readFile).toHaveBeenCalledWith( assetStub.livePhotoWithOriginalFileName.originalPath, expect.any(Object), @@ -705,7 +710,7 @@ describe(MetadataService.name, () => { }); it('should delete old motion photo video assets if they do not match what is extracted', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoWithOriginalFileName]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.livePhotoWithOriginalFileName); mockReadTags({ Directory: 'foo/bar/', MotionPhoto: 1, @@ -727,7 +732,7 @@ describe(MetadataService.name, () => { }); it('should not create a new motion photo video asset if the hash of the extracted video matches an existing asset', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoStillAsset]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.livePhotoStillAsset); mockReadTags({ Directory: 'foo/bar/', MotionPhoto: 1, @@ -749,7 +754,10 @@ describe(MetadataService.name, () => { }); it('should link and hide motion video asset to still asset if the hash of the extracted video matches an existing asset', async () => { - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.livePhotoStillAsset, livePhotoVideoId: null }]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ + ...assetStub.livePhotoStillAsset, + livePhotoVideoId: null, + }); mockReadTags({ Directory: 'foo/bar/', MotionPhoto: 1, @@ -774,9 +782,11 @@ describe(MetadataService.name, () => { }); it('should not update storage usage if motion photo is external', async () => { - mocks.asset.getByIds.mockResolvedValue([ - { ...assetStub.livePhotoStillAsset, livePhotoVideoId: null, isExternal: true }, - ]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ + ...assetStub.livePhotoStillAsset, + livePhotoVideoId: null, + isExternal: true, + }); mockReadTags({ Directory: 'foo/bar/', MotionPhoto: 1, @@ -818,11 +828,11 @@ describe(MetadataService.name, () => { tz: 'UTC-11:30', Rating: 3, }; - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags(tags); await sut.handleMetadataExtraction({ id: assetStub.image.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.upsertExif).toHaveBeenCalledWith({ assetId: assetStub.image.id, bitsPerSample: expect.any(Number), @@ -878,11 +888,11 @@ describe(MetadataService.name, () => { DateTimeOriginal: ExifDateTime.fromISO(someDate + '+00:00'), tz: undefined, }; - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags(tags); await sut.handleMetadataExtraction({ id: assetStub.image.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.upsertExif).toHaveBeenCalledWith( expect.objectContaining({ timeZone: 'UTC+0', @@ -891,7 +901,7 @@ describe(MetadataService.name, () => { }); it('should extract duration', async () => { - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.video }]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.video); mocks.media.probe.mockResolvedValue({ ...probeStub.videoStreamH264, format: { @@ -902,7 +912,7 @@ describe(MetadataService.name, () => { await sut.handleMetadataExtraction({ id: assetStub.video.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.video.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.video.id); expect(mocks.asset.upsertExif).toHaveBeenCalled(); expect(mocks.asset.update).toHaveBeenCalledWith( expect.objectContaining({ @@ -913,7 +923,7 @@ describe(MetadataService.name, () => { }); it('should only extract duration for videos', async () => { - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image }]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mocks.media.probe.mockResolvedValue({ ...probeStub.videoStreamH264, format: { @@ -923,7 +933,7 @@ describe(MetadataService.name, () => { }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.upsertExif).toHaveBeenCalled(); expect(mocks.asset.update).toHaveBeenCalledWith( expect.objectContaining({ @@ -934,7 +944,7 @@ describe(MetadataService.name, () => { }); it('should omit duration of zero', async () => { - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.video }]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.video); mocks.media.probe.mockResolvedValue({ ...probeStub.videoStreamH264, format: { @@ -945,7 +955,7 @@ describe(MetadataService.name, () => { await sut.handleMetadataExtraction({ id: assetStub.video.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.video.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.video.id); expect(mocks.asset.upsertExif).toHaveBeenCalled(); expect(mocks.asset.update).toHaveBeenCalledWith( expect.objectContaining({ @@ -956,7 +966,7 @@ describe(MetadataService.name, () => { }); it('should a handle duration of 1 week', async () => { - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.video }]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.video); mocks.media.probe.mockResolvedValue({ ...probeStub.videoStreamH264, format: { @@ -967,7 +977,7 @@ describe(MetadataService.name, () => { await sut.handleMetadataExtraction({ id: assetStub.video.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.video.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.video.id); expect(mocks.asset.upsertExif).toHaveBeenCalled(); expect(mocks.asset.update).toHaveBeenCalledWith( expect.objectContaining({ @@ -978,7 +988,7 @@ describe(MetadataService.name, () => { }); it('should ignore duration from exif data', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({}, { Duration: { Value: 123 } }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -986,7 +996,7 @@ describe(MetadataService.name, () => { }); it('should trim whitespace from description', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ Description: '\t \v \f \n \r' }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -1006,7 +1016,7 @@ describe(MetadataService.name, () => { }); it('should handle a numeric description', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ Description: 1000 }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -1018,7 +1028,7 @@ describe(MetadataService.name, () => { }); it('should skip importing metadata when the feature is disabled', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.primaryImage]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.primaryImage); mocks.systemMetadata.get.mockResolvedValue({ metadata: { faces: { import: false } } }); mockReadTags(makeFaceTags({ Name: 'Person 1' })); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -1026,7 +1036,7 @@ describe(MetadataService.name, () => { }); it('should skip importing metadata face for assets without tags.RegionInfo', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.primaryImage]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.primaryImage); mocks.systemMetadata.get.mockResolvedValue({ metadata: { faces: { import: true } } }); mockReadTags(); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -1034,7 +1044,7 @@ describe(MetadataService.name, () => { }); it('should skip importing faces without name', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.primaryImage]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.primaryImage); mocks.systemMetadata.get.mockResolvedValue({ metadata: { faces: { import: true } } }); mockReadTags(makeFaceTags()); mocks.person.getDistinctNames.mockResolvedValue([]); @@ -1046,7 +1056,7 @@ describe(MetadataService.name, () => { }); it('should skip importing faces with empty name', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.primaryImage]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.primaryImage); mocks.systemMetadata.get.mockResolvedValue({ metadata: { faces: { import: true } } }); mockReadTags(makeFaceTags({ Name: '' })); mocks.person.getDistinctNames.mockResolvedValue([]); @@ -1058,14 +1068,14 @@ describe(MetadataService.name, () => { }); it('should apply metadata face tags creating new persons', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.primaryImage]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.primaryImage); mocks.systemMetadata.get.mockResolvedValue({ metadata: { faces: { import: true } } }); mockReadTags(makeFaceTags({ Name: personStub.withName.name })); mocks.person.getDistinctNames.mockResolvedValue([]); mocks.person.createAll.mockResolvedValue([personStub.withName.id]); mocks.person.update.mockResolvedValue(personStub.withName); await sut.handleMetadataExtraction({ id: assetStub.primaryImage.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.primaryImage.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.primaryImage.id); expect(mocks.person.getDistinctNames).toHaveBeenCalledWith(assetStub.primaryImage.ownerId, { withHidden: true }); expect(mocks.person.createAll).toHaveBeenCalledWith([ expect.objectContaining({ name: personStub.withName.name }), @@ -1099,14 +1109,14 @@ describe(MetadataService.name, () => { }); it('should assign metadata face tags to existing persons', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.primaryImage]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.primaryImage); mocks.systemMetadata.get.mockResolvedValue({ metadata: { faces: { import: true } } }); mockReadTags(makeFaceTags({ Name: personStub.withName.name })); mocks.person.getDistinctNames.mockResolvedValue([{ id: personStub.withName.id, name: personStub.withName.name }]); mocks.person.createAll.mockResolvedValue([]); mocks.person.update.mockResolvedValue(personStub.withName); await sut.handleMetadataExtraction({ id: assetStub.primaryImage.id }); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.primaryImage.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.primaryImage.id); expect(mocks.person.getDistinctNames).toHaveBeenCalledWith(assetStub.primaryImage.ownerId, { withHidden: true }); expect(mocks.person.createAll).not.toHaveBeenCalled(); expect(mocks.person.refreshFaces).toHaveBeenCalledWith( @@ -1131,7 +1141,7 @@ describe(MetadataService.name, () => { }); it('should handle invalid modify date', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ ModifyDate: '00:00:00.000' }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -1143,7 +1153,7 @@ describe(MetadataService.name, () => { }); it('should handle invalid rating value', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ Rating: 6 }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -1155,7 +1165,7 @@ describe(MetadataService.name, () => { }); it('should handle valid rating value', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ Rating: 5 }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -1167,7 +1177,7 @@ describe(MetadataService.name, () => { }); it('should handle valid negative rating value', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags({ Rating: -1 }); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -1179,11 +1189,11 @@ describe(MetadataService.name, () => { }); it('should handle livePhotoCID not set', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.findLivePhotoMatch).not.toHaveBeenCalled(); expect(mocks.asset.update).not.toHaveBeenCalledWith(expect.objectContaining({ isVisible: false })); expect(mocks.album.removeAsset).not.toHaveBeenCalled(); @@ -1191,20 +1201,19 @@ describe(MetadataService.name, () => { it('should handle not finding a match', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p); - mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoMotionAsset]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.livePhotoMotionAsset); mockReadTags({ ContentIdentifier: 'CID' }); await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoMotionAsset.id })).resolves.toBe( JobStatus.SUCCESS, ); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id], { - faces: { person: false }, - }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.id); expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({ livePhotoCID: 'CID', ownerId: assetStub.livePhotoMotionAsset.ownerId, otherAssetId: assetStub.livePhotoMotionAsset.id, + libraryId: null, type: AssetType.IMAGE, }); expect(mocks.asset.update).not.toHaveBeenCalledWith(expect.objectContaining({ isVisible: false })); @@ -1212,7 +1221,7 @@ describe(MetadataService.name, () => { }); it('should link photo and video', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoStillAsset]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.livePhotoStillAsset); mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset); mockReadTags({ ContentIdentifier: 'CID' }); @@ -1220,9 +1229,7 @@ describe(MetadataService.name, () => { JobStatus.SUCCESS, ); - expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.livePhotoStillAsset.id], { - faces: { person: false }, - }); + expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoStillAsset.id); expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({ livePhotoCID: 'CID', ownerId: assetStub.livePhotoStillAsset.ownerId, @@ -1238,12 +1245,9 @@ describe(MetadataService.name, () => { }); it('should notify clients on live photo link', async () => { - mocks.asset.getByIds.mockResolvedValue([ - { - ...assetStub.livePhotoStillAsset, - exifInfo: { livePhotoCID: assetStub.livePhotoMotionAsset.id } as Exif, - }, - ]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ + ...assetStub.livePhotoStillAsset, + }); mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset); mockReadTags({ ContentIdentifier: 'CID' }); @@ -1258,12 +1262,11 @@ describe(MetadataService.name, () => { }); it('should search by libraryId', async () => { - mocks.asset.getByIds.mockResolvedValue([ - { - ...assetStub.livePhotoStillAsset, - libraryId: 'library-id', - }, - ]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ + ...assetStub.livePhotoStillAsset, + libraryId: 'library-id', + }); + mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset); mockReadTags({ ContentIdentifier: 'CID' }); await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe( @@ -1296,7 +1299,7 @@ describe(MetadataService.name, () => { }, { exif: { AndroidMake: '1', AndroidModel: '2' }, expected: { make: '1', model: '2' } }, ])('should read camera make and model $exif -> $expected', async ({ exif, expected }) => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags(exif); await sut.handleMetadataExtraction({ id: assetStub.image.id }); @@ -1318,7 +1321,7 @@ describe(MetadataService.name, () => { { exif: { LensID: ' Unknown 6-30mm' }, expected: null }, { exif: { LensID: '' }, expected: null }, ])('should read camera lens information $exif -> $expected', async ({ exif, expected }) => { - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image); mockReadTags(exif); await sut.handleMetadataExtraction({ id: assetStub.image.id }); diff --git a/server/src/services/metadata.service.ts b/server/src/services/metadata.service.ts index e460170968..ab62c38ed0 100644 --- a/server/src/services/metadata.service.ts +++ b/server/src/services/metadata.service.ts @@ -9,9 +9,9 @@ import { constants } from 'node:fs/promises'; import path from 'node:path'; import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants'; import { StorageCore } from 'src/cores/storage.core'; +import { Asset, AssetFace } from 'src/database'; import { AssetFaces, Exif, Person } from 'src/db'; import { OnEvent, OnJob } from 'src/decorators'; -import { AssetEntity } from 'src/entities/asset.entity'; import { AssetType, DatabaseLock, @@ -134,7 +134,10 @@ export class MetadataService extends BaseService { } } - private async linkLivePhotos(asset: AssetEntity, exifInfo: Insertable): Promise { + private async linkLivePhotos( + asset: { id: string; type: AssetType; ownerId: string; libraryId: string | null }, + exifInfo: Insertable, + ): Promise { if (!exifInfo.livePhotoCID) { return; } @@ -182,9 +185,9 @@ export class MetadataService extends BaseService { @OnJob({ name: JobName.METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION }) async handleMetadataExtraction(data: JobOf): Promise { - const [{ metadata, reverseGeocoding }, [asset]] = await Promise.all([ + const [{ metadata, reverseGeocoding }, asset] = await Promise.all([ this.getConfig({ withCache: true }), - this.assetRepository.getByIds([data.id], { faces: { person: false } }), + this.assetJobRepository.getForMetadataExtraction(data.id), ]); if (!asset) { @@ -268,7 +271,7 @@ export class MetadataService extends BaseService { ]; if (this.isMotionPhoto(asset, exifTags)) { - promises.push(this.applyMotionPhotos(asset, exifTags, dates, stats)); + promises.push(this.applyMotionPhotos(asset as unknown as Asset, exifTags, dates, stats)); } if (isFaceImportEnabled(metadata) && this.hasTaggedFaces(exifTags)) { @@ -376,7 +379,11 @@ export class MetadataService extends BaseService { return { width, height }; } - private getExifTags(asset: AssetEntity): Promise { + private getExifTags(asset: { + originalPath: string; + sidecarPath: string | null; + type: AssetType; + }): Promise { if (!asset.sidecarPath && asset.type === AssetType.IMAGE) { return this.metadataRepository.readTags(asset.originalPath); } @@ -384,7 +391,11 @@ export class MetadataService extends BaseService { return this.mergeExifTags(asset); } - private async mergeExifTags(asset: AssetEntity): Promise { + private async mergeExifTags(asset: { + originalPath: string; + sidecarPath: string | null; + type: AssetType; + }): Promise { const [mediaTags, sidecarTags, videoTags] = await Promise.all([ this.metadataRepository.readTags(asset.originalPath), asset.sidecarPath ? this.metadataRepository.readTags(asset.sidecarPath) : null, @@ -434,7 +445,7 @@ export class MetadataService extends BaseService { return tags; } - private async applyTagList(asset: AssetEntity, exifTags: ImmichTags) { + private async applyTagList(asset: { id: string; ownerId: string }, exifTags: ImmichTags) { const tags = this.getTagList(exifTags); const results = await upsertTags(this.tagRepository, { userId: asset.ownerId, tags }); await this.tagRepository.replaceAssetTags( @@ -443,11 +454,11 @@ export class MetadataService extends BaseService { ); } - private isMotionPhoto(asset: AssetEntity, tags: ImmichTags): boolean { + private isMotionPhoto(asset: { type: AssetType }, tags: ImmichTags): boolean { return asset.type === AssetType.IMAGE && !!(tags.MotionPhoto || tags.MicroVideo); } - private async applyMotionPhotos(asset: AssetEntity, tags: ImmichTags, dates: Dates, stats: Stats) { + private async applyMotionPhotos(asset: Asset, tags: ImmichTags, dates: Dates, stats: Stats) { const isMotionPhoto = tags.MotionPhoto; const isMicroVideo = tags.MicroVideo; const videoOffset = tags.MicroVideoOffset; @@ -582,7 +593,10 @@ export class MetadataService extends BaseService { ); } - private async applyTaggedFaces(asset: AssetEntity, tags: ImmichTags) { + private async applyTaggedFaces( + asset: { id: string; ownerId: string; faces: AssetFace[]; originalPath: string }, + tags: ImmichTags, + ) { if (!tags.RegionInfo?.AppliedToDimensions || tags.RegionInfo.RegionList.length === 0) { return; } @@ -649,7 +663,7 @@ export class MetadataService extends BaseService { } } - private getDates(asset: AssetEntity, exifTags: ImmichTags, stats: Stats) { + private getDates(asset: { id: string; originalPath: string }, exifTags: ImmichTags, stats: Stats) { const dateTime = firstDateTime(exifTags as Maybe, EXIF_DATE_TAGS); this.logger.verbose(`Date and time is ${dateTime} for asset ${asset.id}: ${asset.originalPath}`); diff --git a/server/test/fixtures/asset.stub.ts b/server/test/fixtures/asset.stub.ts index 34d75fc762..b5c2f4fe00 100644 --- a/server/test/fixtures/asset.stub.ts +++ b/server/test/fixtures/asset.stub.ts @@ -157,7 +157,7 @@ export const assetStub = { isOffline: false, }), - primaryImage: Object.freeze({ + primaryImage: Object.freeze({ id: 'primary-asset-id', status: AssetStatus.ACTIVE, deviceAssetId: 'device-asset-id', @@ -200,9 +200,10 @@ export const assetStub = { ]), duplicateId: null, isOffline: false, + libraryId: null, }), - image: Object.freeze({ + image: Object.freeze({ id: 'asset-id', status: AssetStatus.ACTIVE, deviceAssetId: 'device-asset-id', @@ -239,6 +240,7 @@ export const assetStub = { } as Exif, duplicateId: null, isOffline: false, + libraryId: null, }), trashed: Object.freeze({ @@ -470,7 +472,7 @@ export const assetStub = { isOffline: false, }), - video: Object.freeze({ + video: Object.freeze({ id: 'asset-id', status: AssetStatus.ACTIVE, originalFileName: 'asset-id.ext', @@ -507,6 +509,7 @@ export const assetStub = { deletedAt: null, duplicateId: null, isOffline: false, + libraryId: null, }), livePhotoMotionAsset: Object.freeze({ @@ -522,7 +525,8 @@ export const assetStub = { fileSizeInByte: 100_000, timeZone: `America/New_York`, }, - } as AssetEntity), + libraryId: null, + } as AssetEntity & { libraryId: string | null; files: AssetFile[]; exifInfo: Exif }), livePhotoStillAsset: Object.freeze({ id: 'live-photo-still-asset', @@ -539,7 +543,7 @@ export const assetStub = { timeZone: `America/New_York`, }, files, - } as AssetEntity), + } as AssetEntity & { libraryId: string | null }), livePhotoWithOriginalFileName: Object.freeze({ id: 'live-photo-still-asset', @@ -556,9 +560,10 @@ export const assetStub = { fileSizeInByte: 25_000, timeZone: `America/New_York`, }, - } as AssetEntity), + libraryId: null, + } as AssetEntity & { libraryId: string | null }), - withLocation: Object.freeze({ + withLocation: Object.freeze({ id: 'asset-with-favorite-id', status: AssetStatus.ACTIVE, deviceAssetId: 'device-asset-id', @@ -598,9 +603,10 @@ export const assetStub = { deletedAt: null, duplicateId: null, isOffline: false, + libraryId: null, }), - sidecar: Object.freeze({ + sidecar: Object.freeze({ id: 'asset-id', status: AssetStatus.ACTIVE, deviceAssetId: 'device-asset-id', @@ -632,6 +638,7 @@ export const assetStub = { deletedAt: null, duplicateId: null, isOffline: false, + libraryId: null, }), sidecarWithoutExt: Object.freeze({ @@ -743,7 +750,7 @@ export const assetStub = { isOffline: false, }), - imageDng: Object.freeze({ + imageDng: Object.freeze({ id: 'asset-id', status: AssetStatus.ACTIVE, deviceAssetId: 'device-asset-id', @@ -782,7 +789,7 @@ export const assetStub = { isOffline: false, }), - imageHif: Object.freeze({ + imageHif: Object.freeze({ id: 'asset-id', status: AssetStatus.ACTIVE, deviceAssetId: 'device-asset-id', diff --git a/server/test/fixtures/shared-link.stub.ts b/server/test/fixtures/shared-link.stub.ts index be69147e7a..4eba0de845 100644 --- a/server/test/fixtures/shared-link.stub.ts +++ b/server/test/fixtures/shared-link.stub.ts @@ -116,6 +116,8 @@ export const sharedLinkStub = { album: undefined, description: null, assets: [assetStub.image], + password: 'password', + albumId: null, } as SharedLinkEntity), valid: Object.freeze({ id: '123', diff --git a/server/test/medium/specs/metadata.service.spec.ts b/server/test/medium/specs/metadata.service.spec.ts index fbdb8b51ac..b25cce2724 100644 --- a/server/test/medium/specs/metadata.service.spec.ts +++ b/server/test/medium/specs/metadata.service.spec.ts @@ -2,7 +2,6 @@ import { Stats } from 'node:fs'; import { writeFile } from 'node:fs/promises'; import { tmpdir } from 'node:os'; import { join } from 'node:path'; -import { AssetEntity } from 'src/entities/asset.entity'; import { LoggingRepository } from 'src/repositories/logging.repository'; import { MetadataRepository } from 'src/repositories/metadata.repository'; import { MetadataService } from 'src/services/metadata.service'; @@ -119,7 +118,7 @@ describe(MetadataService.name, () => { process.env.TZ = serverTimeZone ?? undefined; const { filePath } = await createTestFile(exifData); - mocks.asset.getByIds.mockResolvedValue([{ id: 'asset-1', originalPath: filePath } as AssetEntity]); + mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ id: 'asset-1', originalPath: filePath } as never); await sut.handleMetadataExtraction({ id: 'asset-1' }); From 8cefa0b84b34ac62bd7a105d8434db8774efc7b3 Mon Sep 17 00:00:00 2001 From: Jason Rasmussen Date: Wed, 16 Apr 2025 14:59:08 -0400 Subject: [PATCH 02/13] refactor: migrate some e2e to medium (#17640) --- e2e/src/api/specs/auth.e2e-spec.ts | 37 +----- e2e/src/api/specs/user.e2e-spec.ts | 60 --------- server/package-lock.json | 119 +++++++++++++++++ server/package.json | 1 + server/src/app.module.ts | 2 +- server/src/repositories/access.repository.ts | 2 + server/test/medium.factory.ts | 24 ++-- server/test/medium/responses.ts | 121 ++++++++++++++++++ .../specs/controllers/auth.controller.spec.ts | 60 +++++++++ .../specs/controllers/user.controller.spec.ts | 100 +++++++++++++++ .../{ => services}/asset.service.spec.ts | 0 .../{ => services}/audit.database.spec.ts | 0 .../{ => services}/memory.service.spec.ts | 0 .../{ => services}/metadata.service.spec.ts | 0 .../specs/{ => services}/sync.service.spec.ts | 0 .../specs/{ => services}/user.service.spec.ts | 19 +++ .../{ => services}/version.service.spec.ts | 0 server/test/medium/utils.ts | 100 +++++++++++++++ server/test/small.factory.ts | 1 + server/test/utils.ts | 16 ++- 20 files changed, 547 insertions(+), 115 deletions(-) create mode 100644 server/test/medium/responses.ts create mode 100644 server/test/medium/specs/controllers/auth.controller.spec.ts create mode 100644 server/test/medium/specs/controllers/user.controller.spec.ts rename server/test/medium/specs/{ => services}/asset.service.spec.ts (100%) rename server/test/medium/specs/{ => services}/audit.database.spec.ts (100%) rename server/test/medium/specs/{ => services}/memory.service.spec.ts (100%) rename server/test/medium/specs/{ => services}/metadata.service.spec.ts (100%) rename server/test/medium/specs/{ => services}/sync.service.spec.ts (100%) rename server/test/medium/specs/{ => services}/user.service.spec.ts (92%) rename server/test/medium/specs/{ => services}/version.service.spec.ts (100%) create mode 100644 server/test/medium/utils.ts diff --git a/e2e/src/api/specs/auth.e2e-spec.ts b/e2e/src/api/specs/auth.e2e-spec.ts index e871691ed5..1b653a781f 100644 --- a/e2e/src/api/specs/auth.e2e-spec.ts +++ b/e2e/src/api/specs/auth.e2e-spec.ts @@ -5,7 +5,7 @@ import { app, utils } from 'src/utils'; import request from 'supertest'; import { beforeEach, describe, expect, it } from 'vitest'; -const { name, email, password } = signupDto.admin; +const { email, password } = signupDto.admin; describe(`/auth/admin-sign-up`, () => { beforeEach(async () => { @@ -13,33 +13,6 @@ describe(`/auth/admin-sign-up`, () => { }); describe('POST /auth/admin-sign-up', () => { - const invalid = [ - { - should: 'require an email address', - data: { name, password }, - }, - { - should: 'require a password', - data: { name, email }, - }, - { - should: 'require a name', - data: { email, password }, - }, - { - should: 'require a valid email', - data: { name, email: 'immich', password }, - }, - ]; - - for (const { should, data } of invalid) { - it(`should ${should}`, async () => { - const { status, body } = await request(app).post('/auth/admin-sign-up').send(data); - expect(status).toEqual(400); - expect(body).toEqual(errorDto.badRequest()); - }); - } - it(`should sign up the admin`, async () => { const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin); expect(status).toBe(201); @@ -57,14 +30,6 @@ describe(`/auth/admin-sign-up`, () => { }); }); - it('should transform email to lower case', async () => { - const { status, body } = await request(app) - .post('/auth/admin-sign-up') - .send({ ...signupDto.admin, email: 'aDmIn@IMMICH.cloud' }); - expect(status).toEqual(201); - expect(body).toEqual(signupResponseDto.admin); - }); - it('should not allow a second admin to sign up', async () => { await signUpAdmin({ signUpDto: signupDto.admin }); diff --git a/e2e/src/api/specs/user.e2e-spec.ts b/e2e/src/api/specs/user.e2e-spec.ts index 9cffa5d754..54d11e5049 100644 --- a/e2e/src/api/specs/user.e2e-spec.ts +++ b/e2e/src/api/specs/user.e2e-spec.ts @@ -31,33 +31,7 @@ describe('/users', () => { ); }); - describe('GET /users', () => { - it('should require authentication', async () => { - const { status, body } = await request(app).get('/users'); - expect(status).toBe(401); - expect(body).toEqual(errorDto.unauthorized); - }); - - it('should get users', async () => { - const { status, body } = await request(app).get('/users').set('Authorization', `Bearer ${admin.accessToken}`); - expect(status).toEqual(200); - expect(body).toHaveLength(2); - expect(body).toEqual( - expect.arrayContaining([ - expect.objectContaining({ email: 'admin@immich.cloud' }), - expect.objectContaining({ email: 'user2@immich.cloud' }), - ]), - ); - }); - }); - describe('GET /users/me', () => { - it('should require authentication', async () => { - const { status, body } = await request(app).get(`/users/me`); - expect(status).toBe(401); - expect(body).toEqual(errorDto.unauthorized); - }); - it('should not work for shared links', async () => { const album = await utils.createAlbum(admin.accessToken, { albumName: 'Album' }); const sharedLink = await utils.createSharedLink(admin.accessToken, { @@ -99,24 +73,6 @@ describe('/users', () => { }); describe('PUT /users/me', () => { - it('should require authentication', async () => { - const { status, body } = await request(app).put(`/users/me`); - expect(status).toBe(401); - expect(body).toEqual(errorDto.unauthorized); - }); - - for (const key of ['email', 'name']) { - it(`should not allow null ${key}`, async () => { - const dto = { [key]: null }; - const { status, body } = await request(app) - .put(`/users/me`) - .set('Authorization', `Bearer ${admin.accessToken}`) - .send(dto); - expect(status).toBe(400); - expect(body).toEqual(errorDto.badRequest()); - }); - } - it('should update first and last name', async () => { const before = await getMyUser({ headers: asBearerAuth(admin.accessToken) }); @@ -269,11 +225,6 @@ describe('/users', () => { }); describe('GET /users/:id', () => { - it('should require authentication', async () => { - const { status } = await request(app).get(`/users/${admin.userId}`); - expect(status).toEqual(401); - }); - it('should get the user', async () => { const { status, body } = await request(app) .get(`/users/${admin.userId}`) @@ -292,12 +243,6 @@ describe('/users', () => { }); describe('GET /server/license', () => { - it('should require authentication', async () => { - const { status, body } = await request(app).get('/users/me/license'); - expect(status).toBe(401); - expect(body).toEqual(errorDto.unauthorized); - }); - it('should return the user license', async () => { await request(app) .put('/users/me/license') @@ -315,11 +260,6 @@ describe('/users', () => { }); describe('PUT /users/me/license', () => { - it('should require authentication', async () => { - const { status } = await request(app).put(`/users/me/license`); - expect(status).toEqual(401); - }); - it('should set the user license', async () => { const { status, body } = await request(app) .put(`/users/me/license`) diff --git a/server/package-lock.json b/server/package-lock.json index b5cbbc649c..8045976b3c 100644 --- a/server/package-lock.json +++ b/server/package-lock.json @@ -114,6 +114,7 @@ "rimraf": "^6.0.0", "source-map-support": "^0.5.21", "sql-formatter": "^15.0.0", + "supertest": "^7.1.0", "testcontainers": "^10.18.0", "tsconfig-paths": "^4.2.0", "typescript": "^5.3.3", @@ -7060,6 +7061,13 @@ "dev": true, "license": "MIT" }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", + "dev": true, + "license": "MIT" + }, "node_modules/asn1": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", @@ -7999,6 +8007,16 @@ "node": ">= 6" } }, + "node_modules/component-emitter": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.1.tgz", + "integrity": "sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/compress-commons": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", @@ -8129,6 +8147,13 @@ "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", "license": "MIT" }, + "node_modules/cookiejar": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.4.tgz", + "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==", + "dev": true, + "license": "MIT" + }, "node_modules/core-js-compat": { "version": "3.41.0", "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.41.0.tgz", @@ -8446,6 +8471,17 @@ "node": ">=8" } }, + "node_modules/dezalgo": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", + "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", + "dev": true, + "license": "ISC", + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, "node_modules/diacritics": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/diacritics/-/diacritics-1.3.0.tgz", @@ -9787,6 +9823,21 @@ "node": ">= 0.6" } }, + "node_modules/formidable": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-3.5.2.tgz", + "integrity": "sha512-Jqc1btCy3QzRbJaICGwKcBfGWuLADRerLzDqi2NwSt/UkXLsHJw2TVResiaoBufHVHy9aSgClOHCeJsSsFLTbg==", + "dev": true, + "license": "MIT", + "dependencies": { + "dezalgo": "^1.0.4", + "hexoid": "^2.0.0", + "once": "^1.4.0" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, "node_modules/forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -10325,6 +10376,16 @@ "he": "bin/he" } }, + "node_modules/hexoid": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-2.0.0.tgz", + "integrity": "sha512-qlspKUK7IlSQv2o+5I7yhUd7TxlOG2Vr5LTa3ve2XSNVKAL/n/u/7KLvKmFNimomDIKvZFXWHv0T12mv7rT8Aw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/hosted-git-info": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz", @@ -11511,6 +11572,16 @@ "node": ">= 8" } }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/micromatch": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", @@ -11536,6 +11607,19 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "dev": true, + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, "node_modules/mime-db": { "version": "1.54.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", @@ -15158,6 +15242,41 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/superagent": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-9.0.2.tgz", + "integrity": "sha512-xuW7dzkUpcJq7QnhOsnNUgtYp3xRwpt2F7abdRYIpCsAt0hhUqia0EdxyXZQQpNmGtsCzYHryaKSV3q3GJnq7w==", + "dev": true, + "license": "MIT", + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.4", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^3.5.1", + "methods": "^1.1.2", + "mime": "2.6.0", + "qs": "^6.11.0" + }, + "engines": { + "node": ">=14.18.0" + } + }, + "node_modules/supertest": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supertest/-/supertest-7.1.0.tgz", + "integrity": "sha512-5QeSO8hSrKghtcWEoPiO036fxH0Ii2wVQfFZSP0oqQhmjk8bOLhDFXr4JrvaFmPuEWUoq4znY3uSi8UzLKxGqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "methods": "^1.1.2", + "superagent": "^9.0.1" + }, + "engines": { + "node": ">=14.18.0" + } + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", diff --git a/server/package.json b/server/package.json index 451f6c581d..76415da7c8 100644 --- a/server/package.json +++ b/server/package.json @@ -140,6 +140,7 @@ "rimraf": "^6.0.0", "source-map-support": "^0.5.21", "sql-formatter": "^15.0.0", + "supertest": "^7.1.0", "testcontainers": "^10.18.0", "tsconfig-paths": "^4.2.0", "typescript": "^5.3.3", diff --git a/server/src/app.module.ts b/server/src/app.module.ts index c8c806f4a4..5720f7af0b 100644 --- a/server/src/app.module.ts +++ b/server/src/app.module.ts @@ -27,7 +27,7 @@ import { getKyselyConfig } from 'src/utils/database'; const common = [...repositories, ...services, GlobalExceptionFilter]; -const middleware = [ +export const middleware = [ FileUploadInterceptor, { provide: APP_FILTER, useClass: GlobalExceptionFilter }, { provide: APP_PIPE, useValue: new ValidationPipe({ transform: true, whitelist: true }) }, diff --git a/server/src/repositories/access.repository.ts b/server/src/repositories/access.repository.ts index 9fa8b6243c..961cccbf3e 100644 --- a/server/src/repositories/access.repository.ts +++ b/server/src/repositories/access.repository.ts @@ -1,3 +1,4 @@ +import { Injectable } from '@nestjs/common'; import { Kysely, sql } from 'kysely'; import { InjectKysely } from 'nestjs-kysely'; import { DB } from 'src/db'; @@ -418,6 +419,7 @@ class TagAccess { } } +@Injectable() export class AccessRepository { activity: ActivityAccess; album: AlbumAccess; diff --git a/server/test/medium.factory.ts b/server/test/medium.factory.ts index ce356b898b..d3ab876e07 100644 --- a/server/test/medium.factory.ts +++ b/server/test/medium.factory.ts @@ -34,7 +34,7 @@ import { Mocked } from 'vitest'; const sha256 = (value: string) => createHash('sha256').update(value).digest('base64'); // type Repositories = Omit; -type Repositories = { +type RepositoriesTypes = { activity: ActivityRepository; album: AlbumRepository; asset: AssetRepository; @@ -54,22 +54,22 @@ type Repositories = { systemMetadata: SystemMetadataRepository; versionHistory: VersionHistoryRepository; }; -type RepositoryMocks = { [K in keyof Repositories]: Mocked> }; -type RepositoryOptions = Partial<{ [K in keyof Repositories]: 'mock' | 'real' }>; +type RepositoryMocks = { [K in keyof RepositoriesTypes]: Mocked> }; +type RepositoryOptions = Partial<{ [K in keyof RepositoriesTypes]: 'mock' | 'real' }>; type ContextRepositoryMocks = { - [K in keyof Repositories as R[K] extends 'mock' ? K : never]: Mocked>; + [K in keyof RepositoriesTypes as R[K] extends 'mock' ? K : never]: Mocked>; }; type ContextRepositories = { - [K in keyof Repositories as R[K] extends 'real' ? K : never]: Repositories[K]; + [K in keyof RepositoriesTypes as R[K] extends 'real' ? K : never]: RepositoriesTypes[K]; }; export type Context = { sut: S; mocks: ContextRepositoryMocks; repos: ContextRepositories; - getRepository(key: T): Repositories[T]; + getRepository(key: T): RepositoriesTypes[T]; }; export const newMediumService = ( @@ -79,7 +79,7 @@ export const newMediumService = => { - const repos: Partial = {}; + const repos: Partial = {}; const mocks: Partial = {}; const loggerMock = getRepositoryMock('logger') as Mocked; @@ -88,7 +88,7 @@ export const newMediumService = (key: K) => { + const makeRepository = (key: K) => { return repos[key] || getRepository(key, options.database); }; @@ -115,7 +115,7 @@ export const newMediumService = ; }; -export const getRepository = (key: K, db: Kysely) => { +export const getRepository = (key: K, db: Kysely) => { switch (key) { case 'activity': { return new ActivityRepository(db); @@ -189,10 +189,10 @@ export const getRepository = (key: K, db: Kysely(key: K) => { +const getRepositoryMock = (key: K) => { switch (key) { case 'activity': { - return automock(ActivityRepository); + return automock(ActivityRepository) as Mocked>; } case 'album': { diff --git a/server/test/medium/responses.ts b/server/test/medium/responses.ts new file mode 100644 index 0000000000..0148f2e1e9 --- /dev/null +++ b/server/test/medium/responses.ts @@ -0,0 +1,121 @@ +import { expect } from 'vitest'; + +export const errorDto = { + unauthorized: { + error: 'Unauthorized', + statusCode: 401, + message: 'Authentication required', + correlationId: expect.any(String), + }, + forbidden: { + error: 'Forbidden', + statusCode: 403, + message: expect.any(String), + correlationId: expect.any(String), + }, + missingPermission: (permission: string) => ({ + error: 'Forbidden', + statusCode: 403, + message: `Missing required permission: ${permission}`, + correlationId: expect.any(String), + }), + wrongPassword: { + error: 'Bad Request', + statusCode: 400, + message: 'Wrong password', + correlationId: expect.any(String), + }, + invalidToken: { + error: 'Unauthorized', + statusCode: 401, + message: 'Invalid user token', + correlationId: expect.any(String), + }, + invalidShareKey: { + error: 'Unauthorized', + statusCode: 401, + message: 'Invalid share key', + correlationId: expect.any(String), + }, + invalidSharePassword: { + error: 'Unauthorized', + statusCode: 401, + message: 'Invalid password', + correlationId: expect.any(String), + }, + badRequest: (message: any = null) => ({ + error: 'Bad Request', + statusCode: 400, + message: message ?? expect.anything(), + correlationId: expect.any(String), + }), + noPermission: { + error: 'Bad Request', + statusCode: 400, + message: expect.stringContaining('Not found or no'), + correlationId: expect.any(String), + }, + incorrectLogin: { + error: 'Unauthorized', + statusCode: 401, + message: 'Incorrect email or password', + correlationId: expect.any(String), + }, + alreadyHasAdmin: { + error: 'Bad Request', + statusCode: 400, + message: 'The server already has an admin', + correlationId: expect.any(String), + }, + invalidEmail: { + error: 'Bad Request', + statusCode: 400, + message: ['email must be an email'], + correlationId: expect.any(String), + }, +}; + +export const signupResponseDto = { + admin: { + avatarColor: expect.any(String), + id: expect.any(String), + name: 'Immich Admin', + email: 'admin@immich.cloud', + storageLabel: 'admin', + profileImagePath: '', + // why? lol + shouldChangePassword: true, + isAdmin: true, + createdAt: expect.any(String), + updatedAt: expect.any(String), + deletedAt: null, + oauthId: '', + quotaUsageInBytes: 0, + quotaSizeInBytes: null, + status: 'active', + license: null, + profileChangedAt: expect.any(String), + }, +}; + +export const loginResponseDto = { + admin: { + accessToken: expect.any(String), + name: 'Immich Admin', + isAdmin: true, + profileImagePath: '', + shouldChangePassword: true, + userEmail: 'admin@immich.cloud', + userId: expect.any(String), + }, +}; +export const deviceDto = { + current: { + id: expect.any(String), + createdAt: expect.any(String), + updatedAt: expect.any(String), + current: true, + deviceOS: '', + deviceType: '', + }, +}; diff --git a/server/test/medium/specs/controllers/auth.controller.spec.ts b/server/test/medium/specs/controllers/auth.controller.spec.ts new file mode 100644 index 0000000000..ef2b904f48 --- /dev/null +++ b/server/test/medium/specs/controllers/auth.controller.spec.ts @@ -0,0 +1,60 @@ +import { AuthController } from 'src/controllers/auth.controller'; +import { AuthService } from 'src/services/auth.service'; +import request from 'supertest'; +import { errorDto } from 'test/medium/responses'; +import { createControllerTestApp, TestControllerApp } from 'test/medium/utils'; + +describe(AuthController.name, () => { + let app: TestControllerApp; + + beforeAll(async () => { + app = await createControllerTestApp(); + }); + + describe('POST /auth/admin-sign-up', () => { + const name = 'admin'; + const email = 'admin@immich.cloud'; + const password = 'password'; + + const invalid = [ + { + should: 'require an email address', + data: { name, password }, + }, + { + should: 'require a password', + data: { name, email }, + }, + { + should: 'require a name', + data: { email, password }, + }, + { + should: 'require a valid email', + data: { name, email: 'immich', password }, + }, + ]; + + for (const { should, data } of invalid) { + it(`should ${should}`, async () => { + const { status, body } = await request(app.getHttpServer()).post('/auth/admin-sign-up').send(data); + expect(status).toEqual(400); + expect(body).toEqual(errorDto.badRequest()); + }); + } + + it('should transform email to lower case', async () => { + const { status } = await request(app.getHttpServer()) + .post('/auth/admin-sign-up') + .send({ name: 'admin', password: 'password', email: 'aDmIn@IMMICH.cloud' }); + expect(status).toEqual(201); + expect(app.getMockedService(AuthService).adminSignUp).toHaveBeenCalledWith( + expect.objectContaining({ email: 'admin@immich.cloud' }), + ); + }); + }); + + afterAll(async () => { + await app.close(); + }); +}); diff --git a/server/test/medium/specs/controllers/user.controller.spec.ts b/server/test/medium/specs/controllers/user.controller.spec.ts new file mode 100644 index 0000000000..f4d90d5469 --- /dev/null +++ b/server/test/medium/specs/controllers/user.controller.spec.ts @@ -0,0 +1,100 @@ +import { UserController } from 'src/controllers/user.controller'; +import { AuthService } from 'src/services/auth.service'; +import { UserService } from 'src/services/user.service'; +import request from 'supertest'; +import { errorDto } from 'test/medium/responses'; +import { createControllerTestApp, TestControllerApp } from 'test/medium/utils'; +import { factory } from 'test/small.factory'; + +describe(UserController.name, () => { + let realApp: TestControllerApp; + let mockApp: TestControllerApp; + + beforeAll(async () => { + realApp = await createControllerTestApp({ authType: 'real' }); + mockApp = await createControllerTestApp({ authType: 'mock' }); + }); + + describe('GET /users', () => { + it('should require authentication', async () => { + const { status, body } = await request(realApp.getHttpServer()).get('/users'); + expect(status).toBe(401); + expect(body).toEqual(errorDto.unauthorized); + }); + + it('should call the service with an auth dto', async () => { + const user = factory.user(); + const authService = mockApp.getMockedService(AuthService); + const auth = factory.auth({ user }); + authService.authenticate.mockResolvedValue(auth); + + const userService = mockApp.getMockedService(UserService); + const { status } = await request(mockApp.getHttpServer()).get('/users').set('Authorization', `Bearer token`); + + expect(status).toBe(200); + expect(userService.search).toHaveBeenCalledWith(auth); + }); + }); + + describe('GET /users/me', () => { + it('should require authentication', async () => { + const { status, body } = await request(realApp.getHttpServer()).get(`/users/me`); + expect(status).toBe(401); + expect(body).toEqual(errorDto.unauthorized); + }); + }); + + describe('PUT /users/me', () => { + it('should require authentication', async () => { + const { status, body } = await request(realApp.getHttpServer()).put(`/users/me`); + expect(status).toBe(401); + expect(body).toEqual(errorDto.unauthorized); + }); + + for (const key of ['email', 'name']) { + it(`should not allow null ${key}`, async () => { + const dto = { [key]: null }; + const { status, body } = await request(mockApp.getHttpServer()) + .put(`/users/me`) + .set('Authorization', `Bearer token`) + .send(dto); + expect(status).toBe(400); + expect(body).toEqual(errorDto.badRequest()); + }); + } + }); + + describe('GET /users/:id', () => { + it('should require authentication', async () => { + const { status } = await request(realApp.getHttpServer()).get(`/users/${factory.uuid()}`); + expect(status).toEqual(401); + }); + }); + + describe('GET /server/license', () => { + it('should require authentication', async () => { + const { status, body } = await request(realApp.getHttpServer()).get('/users/me/license'); + expect(status).toBe(401); + expect(body).toEqual(errorDto.unauthorized); + }); + }); + + describe('PUT /users/me/license', () => { + it('should require authentication', async () => { + const { status } = await request(realApp.getHttpServer()).put(`/users/me/license`); + expect(status).toEqual(401); + }); + }); + + describe('DELETE /users/me/license', () => { + it('should require authentication', async () => { + const { status } = await request(realApp.getHttpServer()).put(`/users/me/license`); + expect(status).toEqual(401); + }); + }); + + afterAll(async () => { + await realApp.close(); + await mockApp.close(); + }); +}); diff --git a/server/test/medium/specs/asset.service.spec.ts b/server/test/medium/specs/services/asset.service.spec.ts similarity index 100% rename from server/test/medium/specs/asset.service.spec.ts rename to server/test/medium/specs/services/asset.service.spec.ts diff --git a/server/test/medium/specs/audit.database.spec.ts b/server/test/medium/specs/services/audit.database.spec.ts similarity index 100% rename from server/test/medium/specs/audit.database.spec.ts rename to server/test/medium/specs/services/audit.database.spec.ts diff --git a/server/test/medium/specs/memory.service.spec.ts b/server/test/medium/specs/services/memory.service.spec.ts similarity index 100% rename from server/test/medium/specs/memory.service.spec.ts rename to server/test/medium/specs/services/memory.service.spec.ts diff --git a/server/test/medium/specs/metadata.service.spec.ts b/server/test/medium/specs/services/metadata.service.spec.ts similarity index 100% rename from server/test/medium/specs/metadata.service.spec.ts rename to server/test/medium/specs/services/metadata.service.spec.ts diff --git a/server/test/medium/specs/sync.service.spec.ts b/server/test/medium/specs/services/sync.service.spec.ts similarity index 100% rename from server/test/medium/specs/sync.service.spec.ts rename to server/test/medium/specs/services/sync.service.spec.ts diff --git a/server/test/medium/specs/user.service.spec.ts b/server/test/medium/specs/services/user.service.spec.ts similarity index 92% rename from server/test/medium/specs/user.service.spec.ts rename to server/test/medium/specs/services/user.service.spec.ts index 60b5a8fc92..0113c70158 100644 --- a/server/test/medium/specs/user.service.spec.ts +++ b/server/test/medium/specs/services/user.service.spec.ts @@ -60,6 +60,25 @@ describe(UserService.name, () => { }); }); + describe('search', () => { + it('should get users', async () => { + const { sut, repos } = createSut(); + const user1 = mediumFactory.userInsert(); + const user2 = mediumFactory.userInsert(); + + await Promise.all([repos.user.create(user1), repos.user.create(user2)]); + + const auth = factory.auth({ user: user1 }); + + await expect(sut.search(auth)).resolves.toEqual( + expect.arrayContaining([ + expect.objectContaining({ email: user1.email }), + expect.objectContaining({ email: user2.email }), + ]), + ); + }); + }); + describe('get', () => { it('should get a user', async () => { const { sut, repos } = createSut(); diff --git a/server/test/medium/specs/version.service.spec.ts b/server/test/medium/specs/services/version.service.spec.ts similarity index 100% rename from server/test/medium/specs/version.service.spec.ts rename to server/test/medium/specs/services/version.service.spec.ts diff --git a/server/test/medium/utils.ts b/server/test/medium/utils.ts new file mode 100644 index 0000000000..030780b35b --- /dev/null +++ b/server/test/medium/utils.ts @@ -0,0 +1,100 @@ +import { Provider } from '@nestjs/common'; +import { SchedulerRegistry } from '@nestjs/schedule'; +import { Test } from '@nestjs/testing'; +import { ClassConstructor } from 'class-transformer'; +import { ClsService } from 'nestjs-cls'; +import { middleware } from 'src/app.module'; +import { controllers } from 'src/controllers'; +import { GlobalExceptionFilter } from 'src/middleware/global-exception.filter'; +import { LoggingRepository } from 'src/repositories/logging.repository'; +import { services } from 'src/services'; +import { ApiService } from 'src/services/api.service'; +import { AuthService } from 'src/services/auth.service'; +import { BaseService } from 'src/services/base.service'; +import { automock } from 'test/utils'; +import { Mocked } from 'vitest'; + +export const createControllerTestApp = async (options?: { authType?: 'mock' | 'real' }) => { + const { authType = 'mock' } = options || {}; + + const configMock = { getEnv: () => ({ noColor: true }) }; + const clsMock = { getId: vitest.fn().mockReturnValue('cls-id') }; + const loggerMock = automock(LoggingRepository, { args: [clsMock, configMock], strict: false }); + loggerMock.setContext.mockReturnValue(void 0); + loggerMock.error.mockImplementation((...args: any[]) => { + console.log('Logger.error was called with', ...args); + }); + + const mockBaseService = (service: ClassConstructor) => { + return automock(service, { args: [loggerMock], strict: false }); + }; + + const clsServiceMock = clsMock; + + const FAKE_MOCK = vitest.fn(); + + const providers: Provider[] = [ + ...middleware, + ...services.map((Service) => { + if ((authType === 'real' && Service === AuthService) || Service === ApiService) { + return Service; + } + return { provide: Service, useValue: mockBaseService(Service as ClassConstructor) }; + }), + GlobalExceptionFilter, + { provide: LoggingRepository, useValue: loggerMock }, + { provide: ClsService, useValue: clsServiceMock }, + ]; + + const moduleRef = await Test.createTestingModule({ + imports: [], + controllers: [...controllers], + providers, + }) + .useMocker((token) => { + if (token === LoggingRepository) { + return; + } + + if (token === SchedulerRegistry) { + return FAKE_MOCK; + } + + if (typeof token === 'function' && token.name.endsWith('Repository')) { + return FAKE_MOCK; + } + + if (typeof token === 'string' && token === 'KyselyModuleConnectionToken') { + return FAKE_MOCK; + } + }) + + .compile(); + + const app = moduleRef.createNestApplication(); + + await app.init(); + + const getMockedRepository = (token: ClassConstructor) => { + return app.get(token) as Mocked; + }; + + return { + getHttpServer: () => app.getHttpServer(), + getMockedService: (token: ClassConstructor) => { + if (authType === 'real' && token === AuthService) { + throw new Error('Auth type is real, cannot get mocked service'); + } + return app.get(token) as Mocked; + }, + getMockedRepository, + close: () => app.close(), + }; +}; + +export type TestControllerApp = { + getHttpServer: () => any; + getMockedService: (token: ClassConstructor) => Mocked; + getMockedRepository: (token: ClassConstructor) => Mocked; + close: () => Promise; +}; diff --git a/server/test/small.factory.ts b/server/test/small.factory.ts index 3337106b93..94a1a28665 100644 --- a/server/test/small.factory.ts +++ b/server/test/small.factory.ts @@ -310,4 +310,5 @@ export const factory = { jobAssets: { sidecarWrite: assetSidecarWriteFactory, }, + uuid: newUuid, }; diff --git a/server/test/utils.ts b/server/test/utils.ts index ff56a12feb..52984d97a2 100644 --- a/server/test/utils.ts +++ b/server/test/utils.ts @@ -93,13 +93,17 @@ export const automock = ( continue; } - const label = `${Dependency.name}.${property}`; - // console.log(`Automocking ${label}`); + try { + const label = `${Dependency.name}.${property}`; + // console.log(`Automocking ${label}`); - const target = instance[property as keyof T]; - if (typeof target === 'function') { - mock[property] = mockFn(label, { strict }); - continue; + const target = instance[property as keyof T]; + if (typeof target === 'function') { + mock[property] = mockFn(label, { strict }); + continue; + } + } catch { + // noop } } From 85c2d36d992d73d90f5e631d3633af927f0761cb Mon Sep 17 00:00:00 2001 From: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> Date: Wed, 16 Apr 2025 21:10:27 +0200 Subject: [PATCH 03/13] refactor: dedicated get album thumbnail files query (#17657) --- server/src/queries/asset.job.repository.sql | 10 ++++++ .../src/repositories/asset-job.repository.ts | 5 +++ .../src/services/notification.service.spec.ts | 32 +++++++++++-------- server/src/services/notification.service.ts | 11 ++++--- 4 files changed, 39 insertions(+), 19 deletions(-) diff --git a/server/src/queries/asset.job.repository.sql b/server/src/queries/asset.job.repository.sql index 5e28f2c4c9..7c2db418a4 100644 --- a/server/src/queries/asset.job.repository.sql +++ b/server/src/queries/asset.job.repository.sql @@ -183,6 +183,16 @@ from where "assets"."id" = $1 +-- AssetJobRepository.getAlbumThumbnailFile +select + "asset_files"."id", + "asset_files"."path", + "asset_files"."type" +from + "asset_files" +where + "asset_files"."assetId" = $1 + -- AssetJobRepository.getForStorageTemplateJob select "assets"."id", diff --git a/server/src/repositories/asset-job.repository.ts b/server/src/repositories/asset-job.repository.ts index 770ca93891..480972a8ae 100644 --- a/server/src/repositories/asset-job.repository.ts +++ b/server/src/repositories/asset-job.repository.ts @@ -117,6 +117,11 @@ export class AssetJobRepository { .executeTakeFirst(); } + @GenerateSql({ params: [DummyValue.UUID] }) + getAlbumThumbnailFile(id: string) { + return this.db.selectFrom('asset_files').select(columns.assetFiles).where('asset_files.assetId', '=', id).execute(); + } + private storageTemplateAssetQuery() { return this.db .selectFrom('assets') diff --git a/server/src/services/notification.service.spec.ts b/server/src/services/notification.service.spec.ts index c9a6f593ba..45663b4aa2 100644 --- a/server/src/services/notification.service.spec.ts +++ b/server/src/services/notification.service.spec.ts @@ -412,11 +412,12 @@ describe(NotificationService.name, () => { }); mocks.systemMetadata.get.mockResolvedValue({ server: {} }); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); + mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([]); await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS); - expect(mocks.asset.getById).toHaveBeenCalledWith(albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, { - files: true, - }); + expect(mocks.assetJob.getAlbumThumbnailFile).toHaveBeenCalledWith( + albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, + ); expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.SEND_EMAIL, data: expect.objectContaining({ @@ -439,15 +440,14 @@ describe(NotificationService.name, () => { }); mocks.systemMetadata.get.mockResolvedValue({ server: {} }); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); - mocks.asset.getById.mockResolvedValue({ - ...assetStub.image, - files: [{ id: '1', type: AssetFileType.THUMBNAIL, path: 'path-to-thumb.jpg' }], - }); + mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([ + { id: '1', type: AssetFileType.THUMBNAIL, path: 'path-to-thumb.jpg' }, + ]); await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS); - expect(mocks.asset.getById).toHaveBeenCalledWith(albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, { - files: true, - }); + expect(mocks.assetJob.getAlbumThumbnailFile).toHaveBeenCalledWith( + albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, + ); expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.SEND_EMAIL, data: expect.objectContaining({ @@ -470,12 +470,12 @@ describe(NotificationService.name, () => { }); mocks.systemMetadata.get.mockResolvedValue({ server: {} }); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); - mocks.asset.getById.mockResolvedValue(assetStub.image); + mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue(assetStub.image.files); await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS); - expect(mocks.asset.getById).toHaveBeenCalledWith(albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, { - files: true, - }); + expect(mocks.assetJob.getAlbumThumbnailFile).toHaveBeenCalledWith( + albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, + ); expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.SEND_EMAIL, data: expect.objectContaining({ @@ -506,6 +506,7 @@ describe(NotificationService.name, () => { }); mocks.user.get.mockResolvedValueOnce(userStub.user1); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); + mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([]); await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] }); expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false }); @@ -527,6 +528,7 @@ describe(NotificationService.name, () => { ], }); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); + mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([]); await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] }); expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false }); @@ -548,6 +550,7 @@ describe(NotificationService.name, () => { ], }); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); + mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([]); await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] }); expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false }); @@ -561,6 +564,7 @@ describe(NotificationService.name, () => { }); mocks.user.get.mockResolvedValue(userStub.user1); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); + mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([]); await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] }); expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false }); diff --git a/server/src/services/notification.service.ts b/server/src/services/notification.service.ts index dcec865f89..9396681bfe 100644 --- a/server/src/services/notification.service.ts +++ b/server/src/services/notification.service.ts @@ -1,7 +1,6 @@ import { BadRequestException, Injectable } from '@nestjs/common'; import { OnEvent, OnJob } from 'src/decorators'; import { SystemConfigSmtpDto } from 'src/dtos/system-config.dto'; -import { AlbumEntity } from 'src/entities/album.entity'; import { AssetFileType, JobName, JobStatus, QueueName } from 'src/enum'; import { ArgOf } from 'src/repositories/event.repository'; import { EmailTemplate } from 'src/repositories/notification.repository'; @@ -392,17 +391,19 @@ export class NotificationService extends BaseService { return JobStatus.SUCCESS; } - private async getAlbumThumbnailAttachment(album: AlbumEntity): Promise { + private async getAlbumThumbnailAttachment(album: { + albumThumbnailAssetId: string | null; + }): Promise { if (!album.albumThumbnailAssetId) { return; } - const albumThumbnail = await this.assetRepository.getById(album.albumThumbnailAssetId, { files: true }); - if (!albumThumbnail) { + const albumThumbnailFiles = await this.assetJobRepository.getAlbumThumbnailFile(album.albumThumbnailAssetId); + if (albumThumbnailFiles.length === 0) { return; } - const thumbnailFile = getAssetFile(albumThumbnail.files, AssetFileType.THUMBNAIL); + const thumbnailFile = getAssetFile(albumThumbnailFiles, AssetFileType.THUMBNAIL); if (!thumbnailFile) { return; } From 1bbfacfc09a19bef4323a3a7dd21deef41e70851 Mon Sep 17 00:00:00 2001 From: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> Date: Wed, 16 Apr 2025 22:10:20 +0200 Subject: [PATCH 04/13] refactor: more job query stuff (#17658) --- server/src/queries/asset.job.repository.sql | 28 ++++++++++++++++++- .../src/repositories/asset-job.repository.ts | 19 +++++++++++-- .../src/services/notification.service.spec.ts | 23 ++++++++------- server/src/services/notification.service.ts | 16 +++++------ .../src/services/smart-info.service.spec.ts | 11 ++++++-- server/src/services/smart-info.service.ts | 14 +++------- 6 files changed, 76 insertions(+), 35 deletions(-) diff --git a/server/src/queries/asset.job.repository.sql b/server/src/queries/asset.job.repository.sql index 7c2db418a4..d980ccb676 100644 --- a/server/src/queries/asset.job.repository.sql +++ b/server/src/queries/asset.job.repository.sql @@ -183,7 +183,7 @@ from where "assets"."id" = $1 --- AssetJobRepository.getAlbumThumbnailFile +-- AssetJobRepository.getAlbumThumbnailFiles select "asset_files"."id", "asset_files"."path", @@ -192,6 +192,32 @@ from "asset_files" where "asset_files"."assetId" = $1 + and "asset_files"."type" = $2 + +-- AssetJobRepository.getForClipEncoding +select + "assets"."id", + "assets"."isVisible", + ( + select + coalesce(json_agg(agg), '[]') + from + ( + select + "asset_files"."id", + "asset_files"."path", + "asset_files"."type" + from + "asset_files" + where + "asset_files"."assetId" = "assets"."id" + and "asset_files"."type" = $1 + ) as agg + ) as "files" +from + "assets" +where + "assets"."id" = $2 -- AssetJobRepository.getForStorageTemplateJob select diff --git a/server/src/repositories/asset-job.repository.ts b/server/src/repositories/asset-job.repository.ts index 480972a8ae..9ed40050d8 100644 --- a/server/src/repositories/asset-job.repository.ts +++ b/server/src/repositories/asset-job.repository.ts @@ -117,9 +117,24 @@ export class AssetJobRepository { .executeTakeFirst(); } + @GenerateSql({ params: [DummyValue.UUID, AssetFileType.THUMBNAIL] }) + getAlbumThumbnailFiles(id: string, fileType?: AssetFileType) { + return this.db + .selectFrom('asset_files') + .select(columns.assetFiles) + .where('asset_files.assetId', '=', id) + .$if(!!fileType, (qb) => qb.where('asset_files.type', '=', fileType!)) + .execute(); + } + @GenerateSql({ params: [DummyValue.UUID] }) - getAlbumThumbnailFile(id: string) { - return this.db.selectFrom('asset_files').select(columns.assetFiles).where('asset_files.assetId', '=', id).execute(); + getForClipEncoding(id: string) { + return this.db + .selectFrom('assets') + .select(['assets.id', 'assets.isVisible']) + .select((eb) => withFiles(eb, AssetFileType.PREVIEW)) + .where('assets.id', '=', id) + .executeTakeFirst(); } private storageTemplateAssetQuery() { diff --git a/server/src/services/notification.service.spec.ts b/server/src/services/notification.service.spec.ts index 45663b4aa2..85e425b11f 100644 --- a/server/src/services/notification.service.spec.ts +++ b/server/src/services/notification.service.spec.ts @@ -412,11 +412,12 @@ describe(NotificationService.name, () => { }); mocks.systemMetadata.get.mockResolvedValue({ server: {} }); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); - mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([]); + mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]); await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS); - expect(mocks.assetJob.getAlbumThumbnailFile).toHaveBeenCalledWith( + expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith( albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, + AssetFileType.THUMBNAIL, ); expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.SEND_EMAIL, @@ -440,13 +441,14 @@ describe(NotificationService.name, () => { }); mocks.systemMetadata.get.mockResolvedValue({ server: {} }); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); - mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([ + mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([ { id: '1', type: AssetFileType.THUMBNAIL, path: 'path-to-thumb.jpg' }, ]); await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS); - expect(mocks.assetJob.getAlbumThumbnailFile).toHaveBeenCalledWith( + expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith( albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, + AssetFileType.THUMBNAIL, ); expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.SEND_EMAIL, @@ -470,11 +472,12 @@ describe(NotificationService.name, () => { }); mocks.systemMetadata.get.mockResolvedValue({ server: {} }); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); - mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue(assetStub.image.files); + mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([assetStub.image.files[2]]); await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS); - expect(mocks.assetJob.getAlbumThumbnailFile).toHaveBeenCalledWith( + expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith( albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, + AssetFileType.THUMBNAIL, ); expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.SEND_EMAIL, @@ -506,7 +509,7 @@ describe(NotificationService.name, () => { }); mocks.user.get.mockResolvedValueOnce(userStub.user1); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); - mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([]); + mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]); await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] }); expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false }); @@ -528,7 +531,7 @@ describe(NotificationService.name, () => { ], }); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); - mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([]); + mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]); await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] }); expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false }); @@ -550,7 +553,7 @@ describe(NotificationService.name, () => { ], }); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); - mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([]); + mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]); await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] }); expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false }); @@ -564,7 +567,7 @@ describe(NotificationService.name, () => { }); mocks.user.get.mockResolvedValue(userStub.user1); mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' }); - mocks.assetJob.getAlbumThumbnailFile.mockResolvedValue([]); + mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]); await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] }); expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false }); diff --git a/server/src/services/notification.service.ts b/server/src/services/notification.service.ts index 9396681bfe..2c4cc76756 100644 --- a/server/src/services/notification.service.ts +++ b/server/src/services/notification.service.ts @@ -6,7 +6,6 @@ import { ArgOf } from 'src/repositories/event.repository'; import { EmailTemplate } from 'src/repositories/notification.repository'; import { BaseService } from 'src/services/base.service'; import { EmailImageAttachment, IEntityJob, INotifyAlbumUpdateJob, JobItem, JobOf } from 'src/types'; -import { getAssetFile } from 'src/utils/asset.util'; import { getFilenameExtension } from 'src/utils/file'; import { getExternalDomain } from 'src/utils/misc'; import { isEqualObject } from 'src/utils/object'; @@ -398,19 +397,18 @@ export class NotificationService extends BaseService { return; } - const albumThumbnailFiles = await this.assetJobRepository.getAlbumThumbnailFile(album.albumThumbnailAssetId); - if (albumThumbnailFiles.length === 0) { - return; - } + const albumThumbnailFiles = await this.assetJobRepository.getAlbumThumbnailFiles( + album.albumThumbnailAssetId, + AssetFileType.THUMBNAIL, + ); - const thumbnailFile = getAssetFile(albumThumbnailFiles, AssetFileType.THUMBNAIL); - if (!thumbnailFile) { + if (albumThumbnailFiles.length !== 1) { return; } return { - filename: `album-thumbnail${getFilenameExtension(thumbnailFile.path)}`, - path: thumbnailFile.path, + filename: `album-thumbnail${getFilenameExtension(albumThumbnailFiles[0].path)}`, + path: albumThumbnailFiles[0].path, cid: 'album-thumbnail', }; } diff --git a/server/src/services/smart-info.service.spec.ts b/server/src/services/smart-info.service.spec.ts index aef83a813d..df26e69108 100644 --- a/server/src/services/smart-info.service.spec.ts +++ b/server/src/services/smart-info.service.spec.ts @@ -264,7 +264,7 @@ describe(SmartInfoService.name, () => { }); it('should skip assets without a resize path', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.noResizePath]); + mocks.assetJob.getForClipEncoding.mockResolvedValue({ ...assetStub.noResizePath, files: [] }); expect(await sut.handleEncodeClip({ id: assetStub.noResizePath.id })).toEqual(JobStatus.FAILED); @@ -274,6 +274,7 @@ describe(SmartInfoService.name, () => { it('should save the returned objects', async () => { mocks.machineLearning.encodeImage.mockResolvedValue('[0.01, 0.02, 0.03]'); + mocks.assetJob.getForClipEncoding.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] }); expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.SUCCESS); @@ -286,7 +287,10 @@ describe(SmartInfoService.name, () => { }); it('should skip invisible assets', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoMotionAsset]); + mocks.assetJob.getForClipEncoding.mockResolvedValue({ + ...assetStub.livePhotoMotionAsset, + files: [assetStub.image.files[1]], + }); expect(await sut.handleEncodeClip({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.SKIPPED); @@ -295,7 +299,7 @@ describe(SmartInfoService.name, () => { }); it('should fail if asset could not be found', async () => { - mocks.asset.getByIds.mockResolvedValue([]); + mocks.assetJob.getForClipEncoding.mockResolvedValue(void 0); expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.FAILED); @@ -306,6 +310,7 @@ describe(SmartInfoService.name, () => { it('should wait for database', async () => { mocks.machineLearning.encodeImage.mockResolvedValue('[0.01, 0.02, 0.03]'); mocks.database.isBusy.mockReturnValue(true); + mocks.assetJob.getForClipEncoding.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] }); expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.SUCCESS); diff --git a/server/src/services/smart-info.service.ts b/server/src/services/smart-info.service.ts index 1909d2bb99..411114eb17 100644 --- a/server/src/services/smart-info.service.ts +++ b/server/src/services/smart-info.service.ts @@ -2,12 +2,11 @@ import { Injectable } from '@nestjs/common'; import { SystemConfig } from 'src/config'; import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants'; import { OnEvent, OnJob } from 'src/decorators'; -import { AssetFileType, DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName } from 'src/enum'; +import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName } from 'src/enum'; import { WithoutProperty } from 'src/repositories/asset.repository'; import { ArgOf } from 'src/repositories/event.repository'; import { BaseService } from 'src/services/base.service'; import { JobOf } from 'src/types'; -import { getAssetFile } from 'src/utils/asset.util'; import { getCLIPModelInfo, isSmartSearchEnabled } from 'src/utils/misc'; import { usePagination } from 'src/utils/pagination'; @@ -107,8 +106,8 @@ export class SmartInfoService extends BaseService { return JobStatus.SKIPPED; } - const [asset] = await this.assetRepository.getByIds([id], { files: true }); - if (!asset) { + const asset = await this.assetJobRepository.getForClipEncoding(id); + if (!asset || asset.files.length !== 1) { return JobStatus.FAILED; } @@ -116,14 +115,9 @@ export class SmartInfoService extends BaseService { return JobStatus.SKIPPED; } - const previewFile = getAssetFile(asset.files, AssetFileType.PREVIEW); - if (!previewFile) { - return JobStatus.FAILED; - } - const embedding = await this.machineLearningRepository.encodeImage( machineLearning.urls, - previewFile.path, + asset.files[0].path, machineLearning.clip, ); From 586a7a173bb6f8761e4e5fa6820b02923400d02b Mon Sep 17 00:00:00 2001 From: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> Date: Wed, 16 Apr 2025 22:52:54 +0200 Subject: [PATCH 05/13] refactor: handle detect faces job query (#17660) --- server/src/queries/asset.job.repository.sql | 40 ++++++++++++++++ .../src/repositories/asset-job.repository.ts | 12 +++++ server/src/services/person.service.spec.ts | 48 +++++++++---------- server/src/services/person.service.ts | 9 ++-- server/test/fixtures/asset.stub.ts | 3 +- 5 files changed, 80 insertions(+), 32 deletions(-) diff --git a/server/src/queries/asset.job.repository.sql b/server/src/queries/asset.job.repository.sql index d980ccb676..bf96ae80d6 100644 --- a/server/src/queries/asset.job.repository.sql +++ b/server/src/queries/asset.job.repository.sql @@ -219,6 +219,46 @@ from where "assets"."id" = $2 +-- AssetJobRepository.getForDetectFacesJob +select + "assets"."id", + "assets"."isVisible", + to_json("exif") as "exifInfo", + ( + select + coalesce(json_agg(agg), '[]') + from + ( + select + "asset_faces".* + from + "asset_faces" + where + "asset_faces"."assetId" = "assets"."id" + ) as agg + ) as "faces", + ( + select + coalesce(json_agg(agg), '[]') + from + ( + select + "asset_files"."id", + "asset_files"."path", + "asset_files"."type" + from + "asset_files" + where + "asset_files"."assetId" = "assets"."id" + and "asset_files"."type" = $1 + ) as agg + ) as "files" +from + "assets" + inner join "exif" on "assets"."id" = "exif"."assetId" +where + "assets"."id" = $2 + -- AssetJobRepository.getForStorageTemplateJob select "assets"."id", diff --git a/server/src/repositories/asset-job.repository.ts b/server/src/repositories/asset-job.repository.ts index 9ed40050d8..b507fa5445 100644 --- a/server/src/repositories/asset-job.repository.ts +++ b/server/src/repositories/asset-job.repository.ts @@ -137,6 +137,18 @@ export class AssetJobRepository { .executeTakeFirst(); } + @GenerateSql({ params: [DummyValue.UUID] }) + getForDetectFacesJob(id: string) { + return this.db + .selectFrom('assets') + .select(['assets.id', 'assets.isVisible']) + .$call(withExifInner) + .select((eb) => withFaces(eb, true)) + .select((eb) => withFiles(eb, AssetFileType.PREVIEW)) + .where('assets.id', '=', id) + .executeTakeFirst(); + } + private storageTemplateAssetQuery() { return this.db .selectFrom('assets') diff --git a/server/src/services/person.service.spec.ts b/server/src/services/person.service.spec.ts index d907ae1714..9808522434 100644 --- a/server/src/services/person.service.spec.ts +++ b/server/src/services/person.service.spec.ts @@ -1,5 +1,4 @@ import { BadRequestException, NotFoundException } from '@nestjs/common'; -import { AssetFace } from 'src/database'; import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto'; import { mapFaces, mapPerson, PersonResponseDto } from 'src/dtos/person.dto'; import { CacheControl, Colorspace, ImageFormat, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum'; @@ -719,24 +718,7 @@ describe(PersonService.name, () => { }); it('should skip when no resize path', async () => { - mocks.asset.getByIds.mockResolvedValue([assetStub.noResizePath]); - await sut.handleDetectFaces({ id: assetStub.noResizePath.id }); - expect(mocks.machineLearning.detectFaces).not.toHaveBeenCalled(); - }); - - it('should skip it the asset has already been processed', async () => { - mocks.asset.getByIds.mockResolvedValue([ - { - ...assetStub.noResizePath, - faces: [ - { - id: 'asset-face-1', - assetId: assetStub.noResizePath.id, - personId: faceStub.face1.personId, - } as AssetFace, - ], - }, - ]); + mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ ...assetStub.noResizePath, files: [] }); await sut.handleDetectFaces({ id: assetStub.noResizePath.id }); expect(mocks.machineLearning.detectFaces).not.toHaveBeenCalled(); }); @@ -745,7 +727,7 @@ describe(PersonService.name, () => { const start = Date.now(); mocks.machineLearning.detectFaces.mockResolvedValue({ imageHeight: 500, imageWidth: 400, faces: [] }); - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] }); await sut.handleDetectFaces({ id: assetStub.image.id }); expect(mocks.machineLearning.detectFaces).toHaveBeenCalledWith( ['http://immich-machine-learning:3003'], @@ -766,7 +748,7 @@ describe(PersonService.name, () => { it('should create a face with no person and queue recognition job', async () => { mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock); mocks.search.searchFaces.mockResolvedValue([{ ...faceStub.face1, distance: 0.7 }]); - mocks.asset.getByIds.mockResolvedValue([assetStub.image]); + mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] }); mocks.person.refreshFaces.mockResolvedValue(); await sut.handleDetectFaces({ id: assetStub.image.id }); @@ -782,7 +764,11 @@ describe(PersonService.name, () => { it('should delete an existing face not among the new detected faces', async () => { mocks.machineLearning.detectFaces.mockResolvedValue({ faces: [], imageHeight: 500, imageWidth: 400 }); - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.primaryFace1] }]); + mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ + ...assetStub.image, + faces: [faceStub.primaryFace1], + files: [assetStub.image.files[1]], + }); await sut.handleDetectFaces({ id: assetStub.image.id }); @@ -794,7 +780,11 @@ describe(PersonService.name, () => { it('should add new face and delete an existing face not among the new detected faces', async () => { mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock); - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.primaryFace1] }]); + mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ + ...assetStub.image, + faces: [faceStub.primaryFace1], + files: [assetStub.image.files[1]], + }); mocks.person.refreshFaces.mockResolvedValue(); await sut.handleDetectFaces({ id: assetStub.image.id }); @@ -810,7 +800,11 @@ describe(PersonService.name, () => { it('should add embedding to matching metadata face', async () => { mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock); - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.fromExif1] }]); + mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ + ...assetStub.image, + faces: [faceStub.fromExif1], + files: [assetStub.image.files[1]], + }); mocks.person.refreshFaces.mockResolvedValue(); await sut.handleDetectFaces({ id: assetStub.image.id }); @@ -827,7 +821,11 @@ describe(PersonService.name, () => { it('should not add embedding to non-matching metadata face', async () => { mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock); - mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.fromExif2] }]); + mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ + ...assetStub.image, + faces: [faceStub.fromExif2], + files: [assetStub.image.files[1]], + }); await sut.handleDetectFaces({ id: assetStub.image.id }); diff --git a/server/src/services/person.service.ts b/server/src/services/person.service.ts index ae59e2d82c..a413c688f0 100644 --- a/server/src/services/person.service.ts +++ b/server/src/services/person.service.ts @@ -24,7 +24,6 @@ import { PersonUpdateDto, } from 'src/dtos/person.dto'; import { - AssetFileType, AssetType, CacheControl, ImageFormat, @@ -41,7 +40,6 @@ import { BoundingBox } from 'src/repositories/machine-learning.repository'; import { UpdateFacesData } from 'src/repositories/person.repository'; import { BaseService } from 'src/services/base.service'; import { CropOptions, ImageDimensions, InputDimensions, JobItem, JobOf } from 'src/types'; -import { getAssetFile } from 'src/utils/asset.util'; import { ImmichFileResponse } from 'src/utils/file'; import { mimeTypes } from 'src/utils/mime-types'; import { isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc'; @@ -297,10 +295,9 @@ export class PersonService extends BaseService { return JobStatus.SKIPPED; } - const relations = { exifInfo: true, faces: { person: false, withDeleted: true }, files: true }; - const [asset] = await this.assetRepository.getByIds([id], relations); - const previewFile = getAssetFile(asset.files, AssetFileType.PREVIEW); - if (!asset || !previewFile) { + const asset = await this.assetJobRepository.getForDetectFacesJob(id); + const previewFile = asset?.files[0]; + if (!asset || asset.files.length !== 1 || !previewFile) { return JobStatus.FAILED; } diff --git a/server/test/fixtures/asset.stub.ts b/server/test/fixtures/asset.stub.ts index b5c2f4fe00..16e4f20bb3 100644 --- a/server/test/fixtures/asset.stub.ts +++ b/server/test/fixtures/asset.stub.ts @@ -52,7 +52,7 @@ export const assetStub = { fileSizeInByte: 12_345, ...asset, }), - noResizePath: Object.freeze({ + noResizePath: Object.freeze({ id: 'asset-id', status: AssetStatus.ACTIVE, originalFileName: 'IMG_123.jpg', @@ -79,6 +79,7 @@ export const assetStub = { livePhotoVideoId: null, sharedLinks: [], faces: [], + exifInfo: {} as Exif, sidecarPath: null, deletedAt: null, isExternal: false, From 29b30570bfa7fc177883aabe2347c00d66ef4f3c Mon Sep 17 00:00:00 2001 From: yparitcher Date: Wed, 16 Apr 2025 17:05:12 -0400 Subject: [PATCH 06/13] fix: use IMMICH_HOST in microservices (#17659) --- server/src/workers/microservices.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/server/src/workers/microservices.ts b/server/src/workers/microservices.ts index d20064c436..40a85a276d 100644 --- a/server/src/workers/microservices.ts +++ b/server/src/workers/microservices.ts @@ -16,14 +16,16 @@ export async function bootstrap() { const app = await NestFactory.create(MicroservicesModule, { bufferLogs: true }); const logger = await app.resolve(LoggingRepository); + const configRepository = app.get(ConfigRepository); + + const { environment, host } = configRepository.getEnv(); + logger.setContext('Bootstrap'); app.useLogger(logger); app.useWebSocketAdapter(new WebSocketAdapter(app)); - await app.listen(0); + await (host ? app.listen(0, host) : app.listen(0)); - const configRepository = app.get(ConfigRepository); - const { environment } = configRepository.getEnv(); logger.log(`Immich Microservices is running [v${serverVersion}] [${environment}] `); } From 8b38f8a58d8acb34c26d4db4525eb4f69932483f Mon Sep 17 00:00:00 2001 From: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> Date: Wed, 16 Apr 2025 23:52:22 +0200 Subject: [PATCH 07/13] fix: do not select album in time bucket query (#17662) --- server/src/entities/asset.entity.ts | 30 ------------------- server/src/repositories/asset.repository.ts | 17 ++++------- .../repositories/asset.repository.mock.ts | 1 - 3 files changed, 5 insertions(+), 43 deletions(-) diff --git a/server/src/entities/asset.entity.ts b/server/src/entities/asset.entity.ts index b17328f790..cc3a8bb1a0 100644 --- a/server/src/entities/asset.entity.ts +++ b/server/src/entities/asset.entity.ts @@ -2,7 +2,6 @@ import { DeduplicateJoinsPlugin, ExpressionBuilder, Kysely, SelectQueryBuilder, import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres'; import { AssetFace, AssetFile, AssetJobStatus, columns, Exif, Stack, Tag, User } from 'src/database'; import { DB } from 'src/db'; -import { AlbumEntity } from 'src/entities/album.entity'; import { SharedLinkEntity } from 'src/entities/shared-link.entity'; import { AssetFileType, AssetStatus, AssetType } from 'src/enum'; import { TimeBucketSize } from 'src/repositories/asset.repository'; @@ -45,7 +44,6 @@ export class AssetEntity { exifInfo?: Exif; tags?: Tag[]; sharedLinks!: SharedLinkEntity[]; - albums?: AlbumEntity[]; faces!: AssetFace[]; stackId?: string | null; stack?: Stack | null; @@ -158,34 +156,6 @@ export function withLibrary(eb: ExpressionBuilder) { ); } -export function withAlbums(qb: SelectQueryBuilder, { albumId }: { albumId?: string }) { - return qb - .select((eb) => - jsonArrayFrom( - eb - .selectFrom('albums') - .selectAll('albums') - .innerJoin('albums_assets_assets', (join) => - join - .onRef('albums.id', '=', 'albums_assets_assets.albumsId') - .onRef('assets.id', '=', 'albums_assets_assets.assetsId'), - ) - .whereRef('albums.id', '=', 'albums_assets_assets.albumsId') - .$if(!!albumId, (qb) => qb.where('albums.id', '=', asUuid(albumId!))), - ).as('albums'), - ) - .$if(!!albumId, (qb) => - qb.where((eb) => - eb.exists((eb) => - eb - .selectFrom('albums_assets_assets') - .whereRef('albums_assets_assets.assetsId', '=', 'assets.id') - .where('albums_assets_assets.albumsId', '=', asUuid(albumId!)), - ), - ), - ); -} - export function withTags(eb: ExpressionBuilder) { return jsonArrayFrom( eb diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index 9c0a4b1fad..52c390c162 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -9,7 +9,6 @@ import { hasPeople, searchAssetBuilder, truncatedDate, - withAlbums, withExif, withFaces, withFacesAndPeople, @@ -381,16 +380,6 @@ export class AssetRepository { await this.db.deleteFrom('assets').where('ownerId', '=', ownerId).execute(); } - async getByAlbumId(pagination: PaginationOptions, albumId: string): Paginated { - const items = await withAlbums(this.db.selectFrom('assets'), { albumId }) - .selectAll('assets') - .where('deletedAt', 'is', null) - .orderBy('fileCreatedAt', 'desc') - .execute(); - - return paginationHelper(items as any as AssetEntity[], pagination.take); - } - async getByDeviceIds(ownerId: string, deviceId: string, deviceAssetIds: string[]): Promise { const assets = await this.db .selectFrom('assets') @@ -760,7 +749,11 @@ export class AssetRepository { .selectFrom('assets') .selectAll('assets') .$call(withExif) - .$if(!!options.albumId, (qb) => withAlbums(qb, { albumId: options.albumId })) + .$if(!!options.albumId, (qb) => + qb + .innerJoin('albums_assets_assets', 'albums_assets_assets.assetsId', 'assets.id') + .where('albums_assets_assets.albumsId', '=', options.albumId!), + ) .$if(!!options.personId, (qb) => hasPeople(qb, [options.personId!])) .$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!))) .$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!)) diff --git a/server/test/repositories/asset.repository.mock.ts b/server/test/repositories/asset.repository.mock.ts index f33b487888..2418b6aa64 100644 --- a/server/test/repositories/asset.repository.mock.ts +++ b/server/test/repositories/asset.repository.mock.ts @@ -12,7 +12,6 @@ export const newAssetRepositoryMock = (): Mocked Date: Thu, 17 Apr 2025 00:40:08 +0200 Subject: [PATCH 08/13] chore(server): don't check null dates (#17664) --- server/src/services/library.service.ts | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index 0c98288d2d..8cc2cf48ff 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -582,12 +582,7 @@ export class LibraryService extends BaseService { return AssetSyncResult.CHECK_OFFLINE; } - if ( - !asset.fileCreatedAt || - !asset.localDateTime || - !asset.fileModifiedAt || - stat.mtime.valueOf() !== asset.fileModifiedAt.valueOf() - ) { + if (stat.mtime.valueOf() !== asset.fileModifiedAt.valueOf()) { this.logger.verbose(`Asset ${asset.originalPath} needs metadata extraction in library ${asset.libraryId}`); return AssetSyncResult.UPDATE; From 242a559e0f99de8d4886158c8760f966d17122b4 Mon Sep 17 00:00:00 2001 From: Mert <101130780+mertalev@users.noreply.github.com> Date: Wed, 16 Apr 2025 19:00:55 -0400 Subject: [PATCH 09/13] refactor: query for fetching faces and people of assets (#17661) * use json instead of jsonb * missing condition --- server/src/entities/asset.entity.ts | 37 +++++++++---------------- server/src/queries/asset.repository.sql | 34 +++++++++++++---------- 2 files changed, 32 insertions(+), 39 deletions(-) diff --git a/server/src/entities/asset.entity.ts b/server/src/entities/asset.entity.ts index cc3a8bb1a0..64c038a689 100644 --- a/server/src/entities/asset.entity.ts +++ b/server/src/entities/asset.entity.ts @@ -90,30 +90,19 @@ export function withFiles(eb: ExpressionBuilder, type?: AssetFileT } export function withFacesAndPeople(eb: ExpressionBuilder, withDeletedFace?: boolean) { - return eb - .selectFrom('asset_faces') - .leftJoin('person', 'person.id', 'asset_faces.personId') - .whereRef('asset_faces.assetId', '=', 'assets.id') - .$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)) - .select((eb) => - eb - .fn('jsonb_agg', [ - eb - .case() - .when('person.id', 'is not', null) - .then( - eb.fn('jsonb_insert', [ - eb.fn('to_jsonb', [eb.table('asset_faces')]), - sql`'{person}'::text[]`, - eb.fn('to_jsonb', [eb.table('person')]), - ]), - ) - .else(eb.fn('to_jsonb', [eb.table('asset_faces')])) - .end(), - ]) - .as('faces'), - ) - .as('faces'); + return jsonArrayFrom( + eb + .selectFrom('asset_faces') + .leftJoinLateral( + (eb) => + eb.selectFrom('person').selectAll('person').whereRef('asset_faces.personId', '=', 'person.id').as('person'), + (join) => join.onTrue(), + ) + .selectAll('asset_faces') + .select((eb) => eb.table('person').as('person')) + .whereRef('asset_faces.assetId', '=', 'assets.id') + .$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)), + ).as('faces'); } export function hasPeople(qb: SelectQueryBuilder, personIds: string[]) { diff --git a/server/src/queries/asset.repository.sql b/server/src/queries/asset.repository.sql index fa89814b8b..cf17bb0276 100644 --- a/server/src/queries/asset.repository.sql +++ b/server/src/queries/asset.repository.sql @@ -87,22 +87,26 @@ select "assets".*, ( select - jsonb_agg( - case - when "person"."id" is not null then jsonb_insert( - to_jsonb("asset_faces"), - '{person}'::text[], - to_jsonb("person") - ) - else to_jsonb("asset_faces") - end - ) as "faces" + coalesce(json_agg(agg), '[]') from - "asset_faces" - left join "person" on "person"."id" = "asset_faces"."personId" - where - "asset_faces"."assetId" = "assets"."id" - and "asset_faces"."deletedAt" is null + ( + select + "asset_faces".*, + "person" as "person" + from + "asset_faces" + left join lateral ( + select + "person".* + from + "person" + where + "asset_faces"."personId" = "person"."id" + ) as "person" on true + where + "asset_faces"."assetId" = "assets"."id" + and "asset_faces"."deletedAt" is null + ) as agg ) as "faces", ( select From 5e68f8c51940c48daab2ffcf4f16c3c691d5ee8b Mon Sep 17 00:00:00 2001 From: Min Idzelis Date: Wed, 16 Apr 2025 19:24:26 -0400 Subject: [PATCH 10/13] fix: longpress triggers contextmenu (#17602) --- .../assets/thumbnail/thumbnail.svelte | 28 +++++++++++++------ 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/web/src/lib/components/assets/thumbnail/thumbnail.svelte b/web/src/lib/components/assets/thumbnail/thumbnail.svelte index 93a4e3c6cc..c21acd8f86 100644 --- a/web/src/lib/components/assets/thumbnail/thumbnail.svelte +++ b/web/src/lib/components/assets/thumbnail/thumbnail.svelte @@ -130,18 +130,30 @@ }; let timer: ReturnType; - const clearLongPressTimer = () => clearTimeout(timer); + + const preventContextMenu = (evt: Event) => evt.preventDefault(); + let disposeables: (() => void)[] = []; + + const clearLongPressTimer = () => { + clearTimeout(timer); + for (const dispose of disposeables) { + dispose(); + } + disposeables = []; + }; let startX: number = 0; let startY: number = 0; function longPress(element: HTMLElement, { onLongPress }: { onLongPress: () => void }) { let didPress = false; - const start = (evt: TouchEvent) => { - startX = evt.changedTouches[0].clientX; - startY = evt.changedTouches[0].clientY; + const start = (evt: PointerEvent) => { + startX = evt.clientX; + startY = evt.clientY; didPress = false; timer = setTimeout(() => { onLongPress(); + element.addEventListener('contextmenu', preventContextMenu, { once: true }); + disposeables.push(() => element.removeEventListener('contextmenu', preventContextMenu)); didPress = true; }, 350); }; @@ -153,13 +165,13 @@ e.preventDefault(); }; element.addEventListener('click', click); - element.addEventListener('touchstart', start, true); - element.addEventListener('touchend', clearLongPressTimer, true); + element.addEventListener('pointerdown', start, true); + element.addEventListener('pointerup', clearLongPressTimer, true); return { destroy: () => { element.removeEventListener('click', click); - element.removeEventListener('touchstart', start, true); - element.removeEventListener('touchend', clearLongPressTimer, true); + element.removeEventListener('pointerdown', start, true); + element.removeEventListener('pointerup', clearLongPressTimer, true); }, }; } From 067338b0edd8e1ed9e3c3c4245c31c1a55978f6b Mon Sep 17 00:00:00 2001 From: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com> Date: Thu, 17 Apr 2025 16:46:52 +0200 Subject: [PATCH 11/13] chore: remove transfer encoding header (#17671) --- mobile/lib/services/backup.service.dart | 1 - 1 file changed, 1 deletion(-) diff --git a/mobile/lib/services/backup.service.dart b/mobile/lib/services/backup.service.dart index a6468f249b..596ad8dc2e 100644 --- a/mobile/lib/services/backup.service.dart +++ b/mobile/lib/services/backup.service.dart @@ -351,7 +351,6 @@ class BackupService { ); baseRequest.headers.addAll(ApiService.getRequestHeaders()); - baseRequest.headers["Transfer-Encoding"] = "chunked"; baseRequest.fields['deviceAssetId'] = asset.localId!; baseRequest.fields['deviceId'] = deviceId; baseRequest.fields['fileCreatedAt'] = From 81ed54aa61a0ff6e94de6e52709d04c1db0a0d44 Mon Sep 17 00:00:00 2001 From: shenlong <139912620+shenlong-tanwen@users.noreply.github.com> Date: Thu, 17 Apr 2025 20:55:27 +0530 Subject: [PATCH 12/13] feat: user sync stream (#16862) * refactor: user entity * chore: rebase fixes * refactor: remove int user Id * refactor: migrate store userId from int to string * refactor: rename uid to id * feat: drift * pr feedback * refactor: move common overrides to mixin * refactor: remove int user Id * refactor: migrate store userId from int to string * refactor: rename uid to id * feat: user & partner sync stream * pr changes * refactor: sync service and add tests * chore: remove generated change * chore: move sync model * rebase: convert string ids to byte uuids * rebase * add processing logs * batch db calls * rewrite isolate manager * rewrite with worker_manager * misc fixes * add sync order test --------- Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com> Co-authored-by: Alex --- mobile/analysis_options.yaml | 8 +- mobile/devtools_options.yaml | 1 + mobile/lib/constants/constants.dart | 4 + .../domain/interfaces/sync_api.interface.dart | 7 +- .../interfaces/sync_stream.interface.dart | 10 + .../domain/models/sync/sync_event.model.dart | 14 - .../lib/domain/models/sync_event.model.dart | 13 + .../domain/services/sync_stream.service.dart | 217 +++++++-- mobile/lib/domain/utils/background_sync.dart | 37 ++ mobile/lib/extensions/string_extensions.dart | 9 + .../repositories/sync_api.repository.dart | 93 ++-- .../repositories/sync_stream.repository.dart | 104 ++++ mobile/lib/main.dart | 5 +- .../providers/background_sync.provider.dart | 8 + .../infrastructure/cancel.provider.dart | 12 + .../providers/infrastructure/db.provider.dart | 9 + .../infrastructure/sync_stream.provider.dart | 27 +- mobile/lib/services/auth.service.dart | 11 +- mobile/lib/utils/bootstrap.dart | 6 +- mobile/lib/utils/isolate.dart | 69 +++ mobile/pubspec.lock | 10 +- mobile/pubspec.yaml | 2 + mobile/test/domain/service.mock.dart | 3 + .../services/sync_stream_service_test.dart | 443 ++++++++++++++++++ mobile/test/fixtures/sync_stream.stub.dart | 45 ++ .../test/infrastructure/repository.mock.dart | 6 + mobile/test/service.mocks.dart | 1 - mobile/test/services/auth.service_test.dart | 8 + 28 files changed, 1065 insertions(+), 117 deletions(-) create mode 100644 mobile/lib/domain/interfaces/sync_stream.interface.dart delete mode 100644 mobile/lib/domain/models/sync/sync_event.model.dart create mode 100644 mobile/lib/domain/models/sync_event.model.dart create mode 100644 mobile/lib/domain/utils/background_sync.dart create mode 100644 mobile/lib/infrastructure/repositories/sync_stream.repository.dart create mode 100644 mobile/lib/providers/background_sync.provider.dart create mode 100644 mobile/lib/providers/infrastructure/cancel.provider.dart create mode 100644 mobile/lib/utils/isolate.dart create mode 100644 mobile/test/domain/services/sync_stream_service_test.dart create mode 100644 mobile/test/fixtures/sync_stream.stub.dart diff --git a/mobile/analysis_options.yaml b/mobile/analysis_options.yaml index 04f3145908..854f852e3c 100644 --- a/mobile/analysis_options.yaml +++ b/mobile/analysis_options.yaml @@ -35,6 +35,7 @@ linter: analyzer: exclude: - openapi/** + - build/** - lib/generated_plugin_registrant.dart - lib/**/*.g.dart - lib/**/*.drift.dart @@ -92,6 +93,9 @@ custom_lint: allowed: # required / wanted - lib/repositories/*_api.repository.dart + - lib/domain/models/sync_event.model.dart + - lib/{domain,infrastructure}/**/sync_stream.* + - lib/{domain,infrastructure}/**/sync_api.* - lib/infrastructure/repositories/*_api.repository.dart - lib/infrastructure/utils/*.converter.dart # acceptable exceptions for the time being @@ -144,7 +148,9 @@ dart_code_metrics: - avoid-global-state - avoid-inverted-boolean-checks - avoid-late-final-reassignment - - avoid-local-functions + - avoid-local-functions: + exclude: + - test/**.dart - avoid-negated-conditions - avoid-nested-streams-and-futures - avoid-referencing-subclasses diff --git a/mobile/devtools_options.yaml b/mobile/devtools_options.yaml index fa0b357c4f..f592d85a9b 100644 --- a/mobile/devtools_options.yaml +++ b/mobile/devtools_options.yaml @@ -1,3 +1,4 @@ description: This file stores settings for Dart & Flutter DevTools. documentation: https://docs.flutter.dev/tools/devtools/extensions#configure-extension-enablement-states extensions: + - drift: true \ No newline at end of file diff --git a/mobile/lib/constants/constants.dart b/mobile/lib/constants/constants.dart index 83d540d54c..a91e0a715d 100644 --- a/mobile/lib/constants/constants.dart +++ b/mobile/lib/constants/constants.dart @@ -5,5 +5,9 @@ const double downloadFailed = -2; // Number of log entries to retain on app start const int kLogTruncateLimit = 250; +// Sync +const int kSyncEventBatchSize = 5000; + +// Hash batch limits const int kBatchHashFileLimit = 128; const int kBatchHashSizeLimit = 1024 * 1024 * 1024; // 1GB diff --git a/mobile/lib/domain/interfaces/sync_api.interface.dart b/mobile/lib/domain/interfaces/sync_api.interface.dart index fb8f1aa46e..44e22c5894 100644 --- a/mobile/lib/domain/interfaces/sync_api.interface.dart +++ b/mobile/lib/domain/interfaces/sync_api.interface.dart @@ -1,7 +1,8 @@ -import 'package:immich_mobile/domain/models/sync/sync_event.model.dart'; +import 'package:immich_mobile/domain/models/sync_event.model.dart'; +import 'package:openapi/api.dart'; abstract interface class ISyncApiRepository { - Future ack(String data); + Future ack(List data); - Stream> watchUserSyncEvent(); + Stream> getSyncEvents(List type); } diff --git a/mobile/lib/domain/interfaces/sync_stream.interface.dart b/mobile/lib/domain/interfaces/sync_stream.interface.dart new file mode 100644 index 0000000000..f9c52d7ee0 --- /dev/null +++ b/mobile/lib/domain/interfaces/sync_stream.interface.dart @@ -0,0 +1,10 @@ +import 'package:immich_mobile/domain/interfaces/db.interface.dart'; +import 'package:openapi/api.dart'; + +abstract interface class ISyncStreamRepository implements IDatabaseRepository { + Future updateUsersV1(Iterable data); + Future deleteUsersV1(Iterable data); + + Future updatePartnerV1(Iterable data); + Future deletePartnerV1(Iterable data); +} diff --git a/mobile/lib/domain/models/sync/sync_event.model.dart b/mobile/lib/domain/models/sync/sync_event.model.dart deleted file mode 100644 index f4642d59cf..0000000000 --- a/mobile/lib/domain/models/sync/sync_event.model.dart +++ /dev/null @@ -1,14 +0,0 @@ -class SyncEvent { - // dynamic - final dynamic data; - - final String ack; - - SyncEvent({ - required this.data, - required this.ack, - }); - - @override - String toString() => 'SyncEvent(data: $data, ack: $ack)'; -} diff --git a/mobile/lib/domain/models/sync_event.model.dart b/mobile/lib/domain/models/sync_event.model.dart new file mode 100644 index 0000000000..2ad8a75fe1 --- /dev/null +++ b/mobile/lib/domain/models/sync_event.model.dart @@ -0,0 +1,13 @@ +import 'package:openapi/api.dart'; + +class SyncEvent { + final SyncEntityType type; + // ignore: avoid-dynamic + final dynamic data; + final String ack; + + const SyncEvent({required this.type, required this.data, required this.ack}); + + @override + String toString() => 'SyncEvent(type: $type, data: $data, ack: $ack)'; +} diff --git a/mobile/lib/domain/services/sync_stream.service.dart b/mobile/lib/domain/services/sync_stream.service.dart index 72e29b3677..8d7d87e35e 100644 --- a/mobile/lib/domain/services/sync_stream.service.dart +++ b/mobile/lib/domain/services/sync_stream.service.dart @@ -1,49 +1,200 @@ +// ignore_for_file: avoid-passing-async-when-sync-expected + import 'dart:async'; -import 'package:flutter/foundation.dart'; +import 'package:collection/collection.dart'; import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart'; +import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart'; +import 'package:logging/logging.dart'; import 'package:openapi/api.dart'; +import 'package:worker_manager/worker_manager.dart'; + +const _kSyncTypeOrder = [ + SyncEntityType.userDeleteV1, + SyncEntityType.userV1, + SyncEntityType.partnerDeleteV1, + SyncEntityType.partnerV1, + SyncEntityType.assetDeleteV1, + SyncEntityType.assetV1, + SyncEntityType.assetExifV1, + SyncEntityType.partnerAssetDeleteV1, + SyncEntityType.partnerAssetV1, + SyncEntityType.partnerAssetExifV1, +]; class SyncStreamService { + final Logger _logger = Logger('SyncStreamService'); + final ISyncApiRepository _syncApiRepository; + final ISyncStreamRepository _syncStreamRepository; + final bool Function()? _cancelChecker; - SyncStreamService(this._syncApiRepository); + SyncStreamService({ + required ISyncApiRepository syncApiRepository, + required ISyncStreamRepository syncStreamRepository, + bool Function()? cancelChecker, + }) : _syncApiRepository = syncApiRepository, + _syncStreamRepository = syncStreamRepository, + _cancelChecker = cancelChecker; - StreamSubscription? _userSyncSubscription; + Future _handleSyncData( + SyncEntityType type, + // ignore: avoid-dynamic + Iterable data, + ) async { + if (data.isEmpty) { + _logger.warning("Received empty sync data for $type"); + return false; + } - void syncUsers() { - _userSyncSubscription = - _syncApiRepository.watchUserSyncEvent().listen((events) async { - for (final event in events) { - if (event.data is SyncUserV1) { - final data = event.data as SyncUserV1; - debugPrint("User Update: $data"); + _logger.fine("Processing sync data for $type of length ${data.length}"); - // final user = await _userRepository.get(data.id); - - // if (user == null) { - // continue; - // } - - // user.name = data.name; - // user.email = data.email; - // user.updatedAt = DateTime.now(); - - // await _userRepository.update(user); - // await _syncApiRepository.ack(event.ack); - } - - if (event.data is SyncUserDeleteV1) { - final data = event.data as SyncUserDeleteV1; - - debugPrint("User delete: $data"); - // await _syncApiRepository.ack(event.ack); - } + try { + if (type == SyncEntityType.partnerV1) { + return await _syncStreamRepository.updatePartnerV1(data.cast()); } + + if (type == SyncEntityType.partnerDeleteV1) { + return await _syncStreamRepository.deletePartnerV1(data.cast()); + } + + if (type == SyncEntityType.userV1) { + return await _syncStreamRepository.updateUsersV1(data.cast()); + } + + if (type == SyncEntityType.userDeleteV1) { + return await _syncStreamRepository.deleteUsersV1(data.cast()); + } + } catch (error, stack) { + _logger.severe("Error processing sync data for $type", error, stack); + return false; + } + + _logger.warning("Unknown sync data type: $type"); + return false; + } + + Future _syncEvent(List types) { + _logger.info("Syncing Events: $types"); + final streamCompleter = Completer(); + bool shouldComplete = false; + // the onDone callback might fire before the events are processed + // the following flag ensures that the onDone callback is not called + // before the events are processed and also that events are processed sequentially + Completer? mutex; + StreamSubscription? subscription; + try { + subscription = _syncApiRepository.getSyncEvents(types).listen( + (events) async { + if (events.isEmpty) { + _logger.warning("Received empty sync events"); + return; + } + + // If previous events are still being processed, wait for them to finish + if (mutex != null) { + await mutex!.future; + } + + if (_cancelChecker?.call() ?? false) { + _logger.info("Sync cancelled, stopping stream"); + subscription?.cancel(); + if (!streamCompleter.isCompleted) { + streamCompleter.completeError( + CanceledError(), + StackTrace.current, + ); + } + return; + } + + // Take control of the mutex and process the events + mutex = Completer(); + + try { + final eventsMap = events.groupListsBy((event) => event.type); + final Map acks = {}; + + for (final type in _kSyncTypeOrder) { + final data = eventsMap[type]; + if (data == null) { + continue; + } + + if (_cancelChecker?.call() ?? false) { + _logger.info("Sync cancelled, stopping stream"); + mutex?.complete(); + mutex = null; + if (!streamCompleter.isCompleted) { + streamCompleter.completeError( + CanceledError(), + StackTrace.current, + ); + } + + return; + } + + if (data.isEmpty) { + _logger.warning("Received empty sync events for $type"); + continue; + } + + if (await _handleSyncData(type, data.map((e) => e.data))) { + // ignore: avoid-unsafe-collection-methods + acks[type] = data.last.ack; + } else { + _logger.warning("Failed to handle sync events for $type"); + } + } + + if (acks.isNotEmpty) { + await _syncApiRepository.ack(acks.values.toList()); + } + _logger.info("$types events processed"); + } catch (error, stack) { + _logger.warning("Error handling sync events", error, stack); + } finally { + mutex?.complete(); + mutex = null; + } + + if (shouldComplete) { + _logger.info("Sync done, completing stream"); + if (!streamCompleter.isCompleted) streamCompleter.complete(); + } + }, + onError: (error, stack) { + _logger.warning("Error in sync stream for $types", error, stack); + // Do not proceed if the stream errors + if (!streamCompleter.isCompleted) { + // ignore: avoid-missing-completer-stack-trace + streamCompleter.completeError(error, stack); + } + }, + onDone: () { + _logger.info("$types stream done"); + if (mutex == null && !streamCompleter.isCompleted) { + streamCompleter.complete(); + } else { + // Marks the stream as done but does not complete the completer + // until the events are processed + shouldComplete = true; + } + }, + ); + } catch (error, stack) { + _logger.severe("Error starting sync stream", error, stack); + if (!streamCompleter.isCompleted) { + streamCompleter.completeError(error, stack); + } + } + return streamCompleter.future.whenComplete(() { + _logger.info("Sync stream completed"); + return subscription?.cancel(); }); } - Future dispose() async { - await _userSyncSubscription?.cancel(); - } + Future syncUsers() => + _syncEvent([SyncRequestType.usersV1, SyncRequestType.partnersV1]); } diff --git a/mobile/lib/domain/utils/background_sync.dart b/mobile/lib/domain/utils/background_sync.dart new file mode 100644 index 0000000000..0bd456f0bb --- /dev/null +++ b/mobile/lib/domain/utils/background_sync.dart @@ -0,0 +1,37 @@ +// ignore_for_file: avoid-passing-async-when-sync-expected + +import 'dart:async'; + +import 'package:immich_mobile/providers/infrastructure/sync_stream.provider.dart'; +import 'package:immich_mobile/utils/isolate.dart'; +import 'package:worker_manager/worker_manager.dart'; + +class BackgroundSyncManager { + Cancelable? _userSyncTask; + + BackgroundSyncManager(); + + Future cancel() { + final futures = []; + if (_userSyncTask != null) { + futures.add(_userSyncTask!.future); + } + _userSyncTask?.cancel(); + _userSyncTask = null; + return Future.wait(futures); + } + + Future syncUsers() { + if (_userSyncTask != null) { + return _userSyncTask!.future; + } + + _userSyncTask = runInIsolateGentle( + computation: (ref) => ref.read(syncStreamServiceProvider).syncUsers(), + ); + _userSyncTask!.whenComplete(() { + _userSyncTask = null; + }); + return _userSyncTask!.future; + } +} diff --git a/mobile/lib/extensions/string_extensions.dart b/mobile/lib/extensions/string_extensions.dart index 67411013ee..73c8c2d34c 100644 --- a/mobile/lib/extensions/string_extensions.dart +++ b/mobile/lib/extensions/string_extensions.dart @@ -1,3 +1,7 @@ +import 'dart:typed_data'; + +import 'package:uuid/parsing.dart'; + extension StringExtension on String { String capitalize() { return split(" ") @@ -29,3 +33,8 @@ extension DurationExtension on String { return int.parse(this); } } + +extension UUIDExtension on String { + Uint8List toUuidByte({bool shouldValidate = false}) => + UuidParsing.parseAsByteList(this, validate: shouldValidate); +} diff --git a/mobile/lib/infrastructure/repositories/sync_api.repository.dart b/mobile/lib/infrastructure/repositories/sync_api.repository.dart index 88a6838c44..a26b867df6 100644 --- a/mobile/lib/infrastructure/repositories/sync_api.repository.dart +++ b/mobile/lib/infrastructure/repositories/sync_api.repository.dart @@ -1,37 +1,36 @@ import 'dart:async'; import 'dart:convert'; -import 'package:flutter/foundation.dart'; -import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart'; -import 'package:immich_mobile/domain/models/sync/sync_event.model.dart'; -import 'package:immich_mobile/services/api.service.dart'; -import 'package:openapi/api.dart'; import 'package:http/http.dart' as http; +import 'package:immich_mobile/constants/constants.dart'; +import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart'; +import 'package:immich_mobile/domain/models/sync_event.model.dart'; +import 'package:immich_mobile/services/api.service.dart'; +import 'package:logging/logging.dart'; +import 'package:openapi/api.dart'; class SyncApiRepository implements ISyncApiRepository { + final Logger _logger = Logger('SyncApiRepository'); final ApiService _api; - const SyncApiRepository(this._api); + final int _batchSize; + SyncApiRepository(this._api, {int batchSize = kSyncEventBatchSize}) + : _batchSize = batchSize; @override - Stream> watchUserSyncEvent() { - return _getSyncStream( - SyncStreamDto(types: [SyncRequestType.usersV1]), - ); + Stream> getSyncEvents(List type) { + return _getSyncStream(SyncStreamDto(types: type)); } @override - Future ack(String data) { - return _api.syncApi.sendSyncAck(SyncAckSetDto(acks: [data])); + Future ack(List data) { + return _api.syncApi.sendSyncAck(SyncAckSetDto(acks: data)); } - Stream> _getSyncStream( - SyncStreamDto dto, { - int batchSize = 5000, - }) async* { + Stream> _getSyncStream(SyncStreamDto dto) async* { final client = http.Client(); final endpoint = "${_api.apiClient.basePath}/sync/stream"; - final headers = { + final headers = { 'Content-Type': 'application/json', 'Accept': 'application/jsonlines+json', }; @@ -61,52 +60,54 @@ class SyncApiRepository implements ISyncApiRepository { await for (final chunk in response.stream.transform(utf8.decoder)) { previousChunk += chunk; - final parts = previousChunk.split('\n'); + final parts = previousChunk.toString().split('\n'); previousChunk = parts.removeLast(); lines.addAll(parts); - if (lines.length < batchSize) { + if (lines.length < _batchSize) { continue; } - yield await compute(_parseSyncResponse, lines); + yield _parseSyncResponse(lines); lines.clear(); } } finally { if (lines.isNotEmpty) { - yield await compute(_parseSyncResponse, lines); + yield _parseSyncResponse(lines); } client.close(); } } + + List _parseSyncResponse(List lines) { + final List data = []; + + for (final line in lines) { + try { + final jsonData = jsonDecode(line); + final type = SyncEntityType.fromJson(jsonData['type'])!; + final dataJson = jsonData['data']; + final ack = jsonData['ack']; + final converter = _kResponseMap[type]; + if (converter == null) { + _logger.warning("[_parseSyncResponse] Unknown type $type"); + continue; + } + + data.add(SyncEvent(type: type, data: converter(dataJson), ack: ack)); + } catch (error, stack) { + _logger.severe("[_parseSyncResponse] Error parsing json", error, stack); + } + } + + return data; + } } +// ignore: avoid-dynamic const _kResponseMap = { SyncEntityType.userV1: SyncUserV1.fromJson, SyncEntityType.userDeleteV1: SyncUserDeleteV1.fromJson, + SyncEntityType.partnerV1: SyncPartnerV1.fromJson, + SyncEntityType.partnerDeleteV1: SyncPartnerDeleteV1.fromJson, }; - -// Need to be outside of the class to be able to use compute -List _parseSyncResponse(List lines) { - final List data = []; - - for (var line in lines) { - try { - final jsonData = jsonDecode(line); - final type = SyncEntityType.fromJson(jsonData['type'])!; - final dataJson = jsonData['data']; - final ack = jsonData['ack']; - final converter = _kResponseMap[type]; - if (converter == null) { - debugPrint("[_parseSyncReponse] Unknown type $type"); - continue; - } - - data.add(SyncEvent(data: converter(dataJson), ack: ack)); - } catch (error, stack) { - debugPrint("[_parseSyncReponse] Error parsing json $error $stack"); - } - } - - return data; -} diff --git a/mobile/lib/infrastructure/repositories/sync_stream.repository.dart b/mobile/lib/infrastructure/repositories/sync_stream.repository.dart new file mode 100644 index 0000000000..a947a9a66b --- /dev/null +++ b/mobile/lib/infrastructure/repositories/sync_stream.repository.dart @@ -0,0 +1,104 @@ +import 'package:drift/drift.dart'; +import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart'; +import 'package:immich_mobile/extensions/string_extensions.dart'; +import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart'; +import 'package:immich_mobile/infrastructure/entities/user.entity.drift.dart'; +import 'package:immich_mobile/infrastructure/repositories/db.repository.dart'; +import 'package:logging/logging.dart'; +import 'package:openapi/api.dart'; + +class DriftSyncStreamRepository extends DriftDatabaseRepository + implements ISyncStreamRepository { + final Logger _logger = Logger('DriftSyncStreamRepository'); + final Drift _db; + + DriftSyncStreamRepository(super.db) : _db = db; + + @override + Future deleteUsersV1(Iterable data) async { + try { + await _db.batch((batch) { + for (final user in data) { + batch.delete( + _db.userEntity, + UserEntityCompanion(id: Value(user.userId.toUuidByte())), + ); + } + }); + return true; + } catch (e, s) { + _logger.severe('Error while processing SyncUserDeleteV1', e, s); + return false; + } + } + + @override + Future updateUsersV1(Iterable data) async { + try { + await _db.batch((batch) { + for (final user in data) { + final companion = UserEntityCompanion( + name: Value(user.name), + email: Value(user.email), + ); + + batch.insert( + _db.userEntity, + companion.copyWith(id: Value(user.id.toUuidByte())), + onConflict: DoUpdate((_) => companion), + ); + } + }); + return true; + } catch (e, s) { + _logger.severe('Error while processing SyncUserV1', e, s); + return false; + } + } + + @override + Future deletePartnerV1(Iterable data) async { + try { + await _db.batch((batch) { + for (final partner in data) { + batch.delete( + _db.partnerEntity, + PartnerEntityCompanion( + sharedById: Value(partner.sharedById.toUuidByte()), + sharedWithId: Value(partner.sharedWithId.toUuidByte()), + ), + ); + } + }); + return true; + } catch (e, s) { + _logger.severe('Error while processing SyncPartnerDeleteV1', e, s); + return false; + } + } + + @override + Future updatePartnerV1(Iterable data) async { + try { + await _db.batch((batch) { + for (final partner in data) { + final companion = + PartnerEntityCompanion(inTimeline: Value(partner.inTimeline)); + + batch.insert( + _db.partnerEntity, + companion.copyWith( + sharedById: Value(partner.sharedById.toUuidByte()), + sharedWithId: Value(partner.sharedWithId.toUuidByte()), + ), + onConflict: DoUpdate((_) => companion), + ); + } + }); + return true; + } catch (e, s) { + _logger.severe('Error while processing SyncPartnerV1', e, s); + return false; + } + } +} diff --git a/mobile/lib/main.dart b/mobile/lib/main.dart index 1a434aa359..73af81d69d 100644 --- a/mobile/lib/main.dart +++ b/mobile/lib/main.dart @@ -11,6 +11,7 @@ import 'package:flutter_displaymode/flutter_displaymode.dart'; import 'package:hooks_riverpod/hooks_riverpod.dart'; import 'package:immich_mobile/constants/locales.dart'; import 'package:immich_mobile/extensions/build_context_extensions.dart'; +import 'package:immich_mobile/generated/codegen_loader.g.dart'; import 'package:immich_mobile/providers/app_life_cycle.provider.dart'; import 'package:immich_mobile/providers/asset_viewer/share_intent_upload.provider.dart'; import 'package:immich_mobile/providers/db.provider.dart'; @@ -31,13 +32,15 @@ import 'package:immich_mobile/utils/migration.dart'; import 'package:intl/date_symbol_data_local.dart'; import 'package:logging/logging.dart'; import 'package:timezone/data/latest.dart'; -import 'package:immich_mobile/generated/codegen_loader.g.dart'; +import 'package:worker_manager/worker_manager.dart'; void main() async { ImmichWidgetsBinding(); final db = await Bootstrap.initIsar(); await Bootstrap.initDomain(db); await initApp(); + // Warm-up isolate pool for worker manager + await workerManager.init(dynamicSpawning: true); await migrateDatabaseIfNeeded(db); HttpOverrides.global = HttpSSLCertOverride(); diff --git a/mobile/lib/providers/background_sync.provider.dart b/mobile/lib/providers/background_sync.provider.dart new file mode 100644 index 0000000000..83d103bb3b --- /dev/null +++ b/mobile/lib/providers/background_sync.provider.dart @@ -0,0 +1,8 @@ +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/domain/utils/background_sync.dart'; + +final backgroundSyncProvider = Provider((ref) { + final manager = BackgroundSyncManager(); + ref.onDispose(manager.cancel); + return manager; +}); diff --git a/mobile/lib/providers/infrastructure/cancel.provider.dart b/mobile/lib/providers/infrastructure/cancel.provider.dart new file mode 100644 index 0000000000..6851861e1a --- /dev/null +++ b/mobile/lib/providers/infrastructure/cancel.provider.dart @@ -0,0 +1,12 @@ +import 'package:hooks_riverpod/hooks_riverpod.dart'; + +/// Provider holding a boolean function that returns true when cancellation is requested. +/// A computation running in the isolate uses the function to implement cooperative cancellation. +final cancellationProvider = Provider( + // This will be overridden in the isolate's container. + // Throwing ensures it's not used without an override. + (ref) => throw UnimplementedError( + "cancellationProvider must be overridden in the isolate's ProviderContainer and not to be used in the root isolate", + ), + name: 'cancellationProvider', +); diff --git a/mobile/lib/providers/infrastructure/db.provider.dart b/mobile/lib/providers/infrastructure/db.provider.dart index 84010b3b96..4eefbc556c 100644 --- a/mobile/lib/providers/infrastructure/db.provider.dart +++ b/mobile/lib/providers/infrastructure/db.provider.dart @@ -1,4 +1,7 @@ +import 'dart:async'; + import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/infrastructure/repositories/db.repository.dart'; import 'package:isar/isar.dart'; import 'package:riverpod_annotation/riverpod_annotation.dart'; @@ -6,3 +9,9 @@ part 'db.provider.g.dart'; @Riverpod(keepAlive: true) Isar isar(Ref ref) => throw UnimplementedError('isar'); + +final driftProvider = Provider((ref) { + final drift = Drift(); + ref.onDispose(() => unawaited(drift.close())); + return drift; +}); diff --git a/mobile/lib/providers/infrastructure/sync_stream.provider.dart b/mobile/lib/providers/infrastructure/sync_stream.provider.dart index 64f1a6cb05..e313982a30 100644 --- a/mobile/lib/providers/infrastructure/sync_stream.provider.dart +++ b/mobile/lib/providers/infrastructure/sync_stream.provider.dart @@ -1,24 +1,23 @@ -import 'dart:async'; - import 'package:hooks_riverpod/hooks_riverpod.dart'; import 'package:immich_mobile/domain/services/sync_stream.service.dart'; import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart'; +import 'package:immich_mobile/infrastructure/repositories/sync_stream.repository.dart'; import 'package:immich_mobile/providers/api.provider.dart'; +import 'package:immich_mobile/providers/infrastructure/cancel.provider.dart'; +import 'package:immich_mobile/providers/infrastructure/db.provider.dart'; final syncStreamServiceProvider = Provider( - (ref) { - final instance = SyncStreamService( - ref.watch(syncApiRepositoryProvider), - ); - - ref.onDispose(() => unawaited(instance.dispose())); - - return instance; - }, + (ref) => SyncStreamService( + syncApiRepository: ref.watch(syncApiRepositoryProvider), + syncStreamRepository: ref.watch(syncStreamRepositoryProvider), + cancelChecker: ref.watch(cancellationProvider), + ), ); final syncApiRepositoryProvider = Provider( - (ref) => SyncApiRepository( - ref.watch(apiServiceProvider), - ), + (ref) => SyncApiRepository(ref.watch(apiServiceProvider)), +); + +final syncStreamRepositoryProvider = Provider( + (ref) => DriftSyncStreamRepository(ref.watch(driftProvider)), ); diff --git a/mobile/lib/services/auth.service.dart b/mobile/lib/services/auth.service.dart index 20fa62dc4b..ec053c078b 100644 --- a/mobile/lib/services/auth.service.dart +++ b/mobile/lib/services/auth.service.dart @@ -3,12 +3,14 @@ import 'dart:io'; import 'package:hooks_riverpod/hooks_riverpod.dart'; import 'package:immich_mobile/domain/models/store.model.dart'; +import 'package:immich_mobile/domain/utils/background_sync.dart'; import 'package:immich_mobile/entities/store.entity.dart'; import 'package:immich_mobile/interfaces/auth.interface.dart'; import 'package:immich_mobile/interfaces/auth_api.interface.dart'; import 'package:immich_mobile/models/auth/auxilary_endpoint.model.dart'; import 'package:immich_mobile/models/auth/login_response.model.dart'; import 'package:immich_mobile/providers/api.provider.dart'; +import 'package:immich_mobile/providers/background_sync.provider.dart'; import 'package:immich_mobile/repositories/auth.repository.dart'; import 'package:immich_mobile/repositories/auth_api.repository.dart'; import 'package:immich_mobile/services/api.service.dart'; @@ -22,6 +24,7 @@ final authServiceProvider = Provider( ref.watch(authRepositoryProvider), ref.watch(apiServiceProvider), ref.watch(networkServiceProvider), + ref.watch(backgroundSyncProvider), ), ); @@ -30,6 +33,7 @@ class AuthService { final IAuthRepository _authRepository; final ApiService _apiService; final NetworkService _networkService; + final BackgroundSyncManager _backgroundSyncManager; final _log = Logger("AuthService"); @@ -38,6 +42,7 @@ class AuthService { this._authRepository, this._apiService, this._networkService, + this._backgroundSyncManager, ); /// Validates the provided server URL by resolving and setting the endpoint. @@ -115,8 +120,10 @@ class AuthService { /// - Asset ETag /// /// All deletions are executed in parallel using [Future.wait]. - Future clearLocalData() { - return Future.wait([ + Future clearLocalData() async { + // Cancel any ongoing background sync operations before clearing data + await _backgroundSyncManager.cancel(); + await Future.wait([ _authRepository.clearLocalData(), Store.delete(StoreKey.currentUser), Store.delete(StoreKey.accessToken), diff --git a/mobile/lib/utils/bootstrap.dart b/mobile/lib/utils/bootstrap.dart index 570752c6d9..26f3b49242 100644 --- a/mobile/lib/utils/bootstrap.dart +++ b/mobile/lib/utils/bootstrap.dart @@ -48,11 +48,15 @@ abstract final class Bootstrap { ); } - static Future initDomain(Isar db) async { + static Future initDomain( + Isar db, { + bool shouldBufferLogs = true, + }) async { await StoreService.init(storeRepository: IsarStoreRepository(db)); await LogService.init( logRepository: IsarLogRepository(db), storeRepository: IsarStoreRepository(db), + shouldBuffer: shouldBufferLogs, ); } } diff --git a/mobile/lib/utils/isolate.dart b/mobile/lib/utils/isolate.dart new file mode 100644 index 0000000000..cfbb1b544f --- /dev/null +++ b/mobile/lib/utils/isolate.dart @@ -0,0 +1,69 @@ +import 'dart:async'; +import 'dart:ui'; + +import 'package:flutter/services.dart'; +import 'package:hooks_riverpod/hooks_riverpod.dart'; +import 'package:immich_mobile/providers/db.provider.dart'; +import 'package:immich_mobile/providers/infrastructure/cancel.provider.dart'; +import 'package:immich_mobile/providers/infrastructure/db.provider.dart'; +import 'package:immich_mobile/utils/bootstrap.dart'; +import 'package:logging/logging.dart'; +import 'package:worker_manager/worker_manager.dart'; + +class InvalidIsolateUsageException implements Exception { + const InvalidIsolateUsageException(); + + @override + String toString() => + "IsolateHelper should only be used from the root isolate"; +} + +// !! Should be used only from the root isolate +Cancelable runInIsolateGentle({ + required Future Function(ProviderContainer ref) computation, + String? debugLabel, +}) { + final token = RootIsolateToken.instance; + if (token == null) { + throw const InvalidIsolateUsageException(); + } + + return workerManager.executeGentle((cancelledChecker) async { + BackgroundIsolateBinaryMessenger.ensureInitialized(token); + DartPluginRegistrant.ensureInitialized(); + + final db = await Bootstrap.initIsar(); + await Bootstrap.initDomain(db, shouldBufferLogs: false); + final ref = ProviderContainer( + overrides: [ + // TODO: Remove once isar is removed + dbProvider.overrideWithValue(db), + isarProvider.overrideWithValue(db), + cancellationProvider.overrideWithValue(cancelledChecker), + ], + ); + + Logger log = Logger("IsolateLogger"); + + try { + return await computation(ref); + } on CanceledError { + log.warning( + "Computation cancelled ${debugLabel == null ? '' : ' for $debugLabel'}", + ); + } catch (error, stack) { + log.severe( + "Error in runInIsolateGentle ${debugLabel == null ? '' : ' for $debugLabel'}", + error, + stack, + ); + } finally { + // Wait for the logs to flush + await Future.delayed(const Duration(seconds: 2)); + // Always close the new db connection on Isolate end + ref.read(driftProvider).close(); + ref.read(isarProvider).close(); + } + return null; + }); +} diff --git a/mobile/pubspec.lock b/mobile/pubspec.lock index 3731832296..235b3f71c3 100644 --- a/mobile/pubspec.lock +++ b/mobile/pubspec.lock @@ -1806,7 +1806,7 @@ packages: source: hosted version: "3.1.4" uuid: - dependency: transitive + dependency: "direct main" description: name: uuid sha256: a5be9ef6618a7ac1e964353ef476418026db906c4facdedaa299b7a2e71690ff @@ -1933,6 +1933,14 @@ packages: url: "https://pub.dev" source: hosted version: "0.0.3" + worker_manager: + dependency: "direct main" + description: + name: worker_manager + sha256: "086ed63e9b36266e851404ca90fd44e37c0f4c9bbf819e5f8d7c87f9741c0591" + url: "https://pub.dev" + source: hosted + version: "7.2.3" xdg_directories: dependency: transitive description: diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index 03c39810f6..fdd91e1f87 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -60,7 +60,9 @@ dependencies: thumbhash: 0.1.0+1 timezone: ^0.9.4 url_launcher: ^6.3.1 + uuid: ^4.5.1 wakelock_plus: ^1.2.10 + worker_manager: ^7.2.3 native_video_player: git: diff --git a/mobile/test/domain/service.mock.dart b/mobile/test/domain/service.mock.dart index 53a173fc28..97a3f30294 100644 --- a/mobile/test/domain/service.mock.dart +++ b/mobile/test/domain/service.mock.dart @@ -1,7 +1,10 @@ import 'package:immich_mobile/domain/services/store.service.dart'; import 'package:immich_mobile/domain/services/user.service.dart'; +import 'package:immich_mobile/domain/utils/background_sync.dart'; import 'package:mocktail/mocktail.dart'; class MockStoreService extends Mock implements StoreService {} class MockUserService extends Mock implements UserService {} + +class MockBackgroundSyncManager extends Mock implements BackgroundSyncManager {} diff --git a/mobile/test/domain/services/sync_stream_service_test.dart b/mobile/test/domain/services/sync_stream_service_test.dart new file mode 100644 index 0000000000..e1d8e6987f --- /dev/null +++ b/mobile/test/domain/services/sync_stream_service_test.dart @@ -0,0 +1,443 @@ +// ignore_for_file: avoid-unnecessary-futures, avoid-async-call-in-sync-function + +import 'dart:async'; + +import 'package:flutter_test/flutter_test.dart'; +import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart'; +import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart'; +import 'package:immich_mobile/domain/models/sync_event.model.dart'; +import 'package:immich_mobile/domain/services/sync_stream.service.dart'; +import 'package:mocktail/mocktail.dart'; +import 'package:openapi/api.dart'; +import 'package:worker_manager/worker_manager.dart'; + +import '../../fixtures/sync_stream.stub.dart'; +import '../../infrastructure/repository.mock.dart'; + +class _CancellationWrapper { + const _CancellationWrapper(); + + bool isCancelled() => false; +} + +class _MockCancellationWrapper extends Mock implements _CancellationWrapper {} + +void main() { + late SyncStreamService sut; + late ISyncStreamRepository mockSyncStreamRepo; + late ISyncApiRepository mockSyncApiRepo; + late StreamController> streamController; + + successHandler(Invocation _) async => true; + failureHandler(Invocation _) async => false; + + setUp(() { + mockSyncStreamRepo = MockSyncStreamRepository(); + mockSyncApiRepo = MockSyncApiRepository(); + streamController = StreamController>.broadcast(); + + sut = SyncStreamService( + syncApiRepository: mockSyncApiRepo, + syncStreamRepository: mockSyncStreamRepo, + ); + + // Default stream setup - emits one batch and closes + when(() => mockSyncApiRepo.getSyncEvents(any())) + .thenAnswer((_) => streamController.stream); + + // Default ack setup + when(() => mockSyncApiRepo.ack(any())).thenAnswer((_) async => {}); + + // Register fallbacks for mocktail verification + registerFallbackValue([]); + registerFallbackValue([]); + registerFallbackValue([]); + registerFallbackValue([]); + + // Default successful repository calls + when(() => mockSyncStreamRepo.updateUsersV1(any())) + .thenAnswer(successHandler); + when(() => mockSyncStreamRepo.deleteUsersV1(any())) + .thenAnswer(successHandler); + when(() => mockSyncStreamRepo.updatePartnerV1(any())) + .thenAnswer(successHandler); + when(() => mockSyncStreamRepo.deletePartnerV1(any())) + .thenAnswer(successHandler); + }); + + tearDown(() async { + if (!streamController.isClosed) { + await streamController.close(); + } + }); + + // Helper to trigger sync and add events to the stream + Future triggerSyncAndEmit(List events) async { + final future = sut.syncUsers(); // Start listening + await Future.delayed(Duration.zero); // Allow listener to attach + if (!streamController.isClosed) { + streamController.add(events); + await streamController.close(); // Close after emitting + } + await future; // Wait for processing to complete + } + + group("SyncStreamService", () { + test( + "completes successfully when stream emits data and handlers succeed", + () async { + final events = [ + ...SyncStreamStub.userEvents, + ...SyncStreamStub.partnerEvents, + ]; + final future = triggerSyncAndEmit(events); + await expectLater(future, completes); + // Verify ack includes last ack from each successfully handled type + verify( + () => + mockSyncApiRepo.ack(any(that: containsAll(["5", "2", "4", "3"]))), + ).called(1); + }, + ); + + test("completes successfully when stream emits an error", () async { + when(() => mockSyncApiRepo.getSyncEvents(any())) + .thenAnswer((_) => Stream.error(Exception("Stream Error"))); + // Should complete gracefully without throwing + await expectLater(sut.syncUsers(), throwsException); + verifyNever(() => mockSyncApiRepo.ack(any())); // No ack on stream error + }); + + test("throws when initial getSyncEvents call fails", () async { + final apiException = Exception("API Error"); + when(() => mockSyncApiRepo.getSyncEvents(any())).thenThrow(apiException); + // Should rethrow the exception from the initial call + await expectLater(sut.syncUsers(), throwsA(apiException)); + verifyNever(() => mockSyncApiRepo.ack(any())); + }); + + test( + "completes successfully when a repository handler throws an exception", + () async { + when(() => mockSyncStreamRepo.updateUsersV1(any())) + .thenThrow(Exception("Repo Error")); + final events = [ + ...SyncStreamStub.userEvents, + ...SyncStreamStub.partnerEvents, + ]; + // Should complete, but ack only for the successful types + await triggerSyncAndEmit(events); + // Only partner delete was successful by default setup + verify(() => mockSyncApiRepo.ack(["2", "4", "3"])).called(1); + }, + ); + + test( + "completes successfully but sends no ack when all handlers fail", + () async { + when(() => mockSyncStreamRepo.updateUsersV1(any())) + .thenAnswer(failureHandler); + when(() => mockSyncStreamRepo.deleteUsersV1(any())) + .thenAnswer(failureHandler); + when(() => mockSyncStreamRepo.updatePartnerV1(any())) + .thenAnswer(failureHandler); + when(() => mockSyncStreamRepo.deletePartnerV1(any())) + .thenAnswer(failureHandler); + + final events = [ + ...SyncStreamStub.userEvents, + ...SyncStreamStub.partnerEvents, + ]; + await triggerSyncAndEmit(events); + verifyNever(() => mockSyncApiRepo.ack(any())); + }, + ); + + test("sends ack only for types where handler returns true", () async { + // Mock specific handlers: user update fails, user delete succeeds + when(() => mockSyncStreamRepo.updateUsersV1(any())) + .thenAnswer(failureHandler); + when(() => mockSyncStreamRepo.deleteUsersV1(any())) + .thenAnswer(successHandler); + // partner update fails, partner delete succeeds + when(() => mockSyncStreamRepo.updatePartnerV1(any())) + .thenAnswer(failureHandler); + + final events = [ + ...SyncStreamStub.userEvents, + ...SyncStreamStub.partnerEvents, + ]; + await triggerSyncAndEmit(events); + + // Expect ack only for userDeleteV1 (ack: "2") and partnerDeleteV1 (ack: "4") + verify(() => mockSyncApiRepo.ack(any(that: containsAll(["2", "4"])))) + .called(1); + }); + + test("does not process or ack when stream emits an empty list", () async { + final future = sut.syncUsers(); + streamController.add([]); // Emit empty list + await streamController.close(); + await future; // Wait for completion + + verifyNever(() => mockSyncStreamRepo.updateUsersV1(any())); + verifyNever(() => mockSyncStreamRepo.deleteUsersV1(any())); + verifyNever(() => mockSyncStreamRepo.updatePartnerV1(any())); + verifyNever(() => mockSyncStreamRepo.deletePartnerV1(any())); + verifyNever(() => mockSyncApiRepo.ack(any())); + }); + + test("processes multiple batches sequentially using mutex", () async { + final completer1 = Completer(); + final completer2 = Completer(); + int callOrder = 0; + int handler1StartOrder = -1; + int handler2StartOrder = -1; + int handler1Calls = 0; + int handler2Calls = 0; + + when(() => mockSyncStreamRepo.updateUsersV1(any())).thenAnswer((_) async { + handler1Calls++; + handler1StartOrder = ++callOrder; + await completer1.future; + return true; + }); + when(() => mockSyncStreamRepo.updatePartnerV1(any())) + .thenAnswer((_) async { + handler2Calls++; + handler2StartOrder = ++callOrder; + await completer2.future; + return true; + }); + + final batch1 = SyncStreamStub.userEvents; + final batch2 = SyncStreamStub.partnerEvents; + + final syncFuture = sut.syncUsers(); + await pumpEventQueue(); + + streamController.add(batch1); + await pumpEventQueue(); + // Small delay to ensure the first handler starts + await Future.delayed(const Duration(milliseconds: 20)); + + expect(handler1StartOrder, 1, reason: "Handler 1 should start first"); + expect(handler1Calls, 1); + + streamController.add(batch2); + await pumpEventQueue(); + // Small delay + await Future.delayed(const Duration(milliseconds: 20)); + + expect(handler2StartOrder, -1, reason: "Handler 2 should wait"); + expect(handler2Calls, 0); + + completer1.complete(); + await pumpEventQueue(times: 40); + // Small delay to ensure the second handler starts + await Future.delayed(const Duration(milliseconds: 20)); + + expect(handler2StartOrder, 2, reason: "Handler 2 should start after H1"); + expect(handler2Calls, 1); + + completer2.complete(); + await pumpEventQueue(times: 40); + // Small delay before closing the stream + await Future.delayed(const Duration(milliseconds: 20)); + + if (!streamController.isClosed) { + await streamController.close(); + } + await pumpEventQueue(times: 40); + // Small delay to ensure the sync completes + await Future.delayed(const Duration(milliseconds: 20)); + + await syncFuture; + + verify(() => mockSyncStreamRepo.updateUsersV1(any())).called(1); + verify(() => mockSyncStreamRepo.updatePartnerV1(any())).called(1); + verify(() => mockSyncApiRepo.ack(any())).called(2); + }); + + test( + "stops processing and ack when cancel checker is completed", + () async { + final cancellationChecker = _MockCancellationWrapper(); + when(() => cancellationChecker.isCancelled()).thenAnswer((_) => false); + + sut = SyncStreamService( + syncApiRepository: mockSyncApiRepo, + syncStreamRepository: mockSyncStreamRepo, + cancelChecker: cancellationChecker.isCancelled, + ); + + final processingCompleter = Completer(); + bool handlerStarted = false; + + // Make handler wait so we can cancel it mid-flight + when(() => mockSyncStreamRepo.deleteUsersV1(any())) + .thenAnswer((_) async { + handlerStarted = true; + await processingCompleter + .future; // Wait indefinitely until test completes it + return true; + }); + + final syncFuture = sut.syncUsers(); + await pumpEventQueue(times: 30); + + streamController.add(SyncStreamStub.userEvents); + // Ensure processing starts + await Future.delayed(const Duration(milliseconds: 10)); + + expect(handlerStarted, isTrue, reason: "Handler should have started"); + + when(() => cancellationChecker.isCancelled()).thenAnswer((_) => true); + + // Allow cancellation logic to propagate + await Future.delayed(const Duration(milliseconds: 10)); + + // Complete the handler's completer after cancellation signal + // to ensure the cancellation logic itself isn't blocked by the handler. + processingCompleter.complete(); + + await expectLater(syncFuture, throwsA(isA())); + + // Verify that ack was NOT called because processing was cancelled + verifyNever(() => mockSyncApiRepo.ack(any())); + }, + ); + + test("completes successfully when ack call throws an exception", () async { + when(() => mockSyncApiRepo.ack(any())).thenThrow(Exception("Ack Error")); + final events = [ + ...SyncStreamStub.userEvents, + ...SyncStreamStub.partnerEvents, + ]; + + // Should still complete even if ack fails + await triggerSyncAndEmit(events); + verify(() => mockSyncApiRepo.ack(any())) + .called(1); // Verify ack was attempted + }); + + test("waits for processing to finish if onDone called early", () async { + final processingCompleter = Completer(); + bool handlerFinished = false; + + when(() => mockSyncStreamRepo.updateUsersV1(any())).thenAnswer((_) async { + await processingCompleter.future; // Wait inside handler + handlerFinished = true; + return true; + }); + + final syncFuture = sut.syncUsers(); + // Allow listener to attach + // This is necessary to ensure the stream is ready to receive events + await Future.delayed(Duration.zero); + + streamController.add(SyncStreamStub.userEvents); // Emit batch + await Future.delayed( + const Duration(milliseconds: 10), + ); // Ensure processing starts + + await streamController + .close(); // Close stream (triggers onDone internally) + await Future.delayed( + const Duration(milliseconds: 10), + ); // Give onDone a chance to fire + + // At this point, onDone was called, but processing is blocked + expect(handlerFinished, isFalse); + + processingCompleter.complete(); // Allow processing to finish + await syncFuture; // Now the main future should complete + + expect(handlerFinished, isTrue); + verify(() => mockSyncApiRepo.ack(any())).called(1); + }); + + test("processes events in the defined _kSyncTypeOrder", () async { + final future = sut.syncUsers(); + await pumpEventQueue(); + if (!streamController.isClosed) { + final events = [ + SyncEvent( + type: SyncEntityType.partnerV1, + data: SyncStreamStub.partnerV1, + ack: "1", + ), // Should be processed last + SyncEvent( + type: SyncEntityType.userV1, + data: SyncStreamStub.userV1Admin, + ack: "2", + ), // Should be processed second + SyncEvent( + type: SyncEntityType.partnerDeleteV1, + data: SyncStreamStub.partnerDeleteV1, + ack: "3", + ), // Should be processed third + SyncEvent( + type: SyncEntityType.userDeleteV1, + data: SyncStreamStub.userDeleteV1, + ack: "4", + ), // Should be processed first + ]; + + streamController.add(events); + await streamController.close(); + } + await future; + + verifyInOrder([ + () => mockSyncStreamRepo.deleteUsersV1(any()), + () => mockSyncStreamRepo.updateUsersV1(any()), + () => mockSyncStreamRepo.deletePartnerV1(any()), + () => mockSyncStreamRepo.updatePartnerV1(any()), + // Verify ack happens after all processing + () => mockSyncApiRepo.ack(any()), + ]); + }); + }); + + group("syncUsers", () { + test("calls getSyncEvents with correct types", () async { + // Need to close the stream for the future to complete + final future = sut.syncUsers(); + await streamController.close(); + await future; + + verify( + () => mockSyncApiRepo.getSyncEvents([ + SyncRequestType.usersV1, + SyncRequestType.partnersV1, + ]), + ).called(1); + }); + + test("calls repository methods with correctly grouped data", () async { + final events = [ + ...SyncStreamStub.userEvents, + ...SyncStreamStub.partnerEvents, + ]; + await triggerSyncAndEmit(events); + + // Verify each handler was called with the correct list of data payloads + verify( + () => mockSyncStreamRepo.updateUsersV1( + [SyncStreamStub.userV1Admin, SyncStreamStub.userV1User], + ), + ).called(1); + verify( + () => mockSyncStreamRepo.deleteUsersV1([SyncStreamStub.userDeleteV1]), + ).called(1); + verify( + () => mockSyncStreamRepo.updatePartnerV1([SyncStreamStub.partnerV1]), + ).called(1); + verify( + () => mockSyncStreamRepo + .deletePartnerV1([SyncStreamStub.partnerDeleteV1]), + ).called(1); + }); + }); +} diff --git a/mobile/test/fixtures/sync_stream.stub.dart b/mobile/test/fixtures/sync_stream.stub.dart new file mode 100644 index 0000000000..781e63a2bb --- /dev/null +++ b/mobile/test/fixtures/sync_stream.stub.dart @@ -0,0 +1,45 @@ +import 'package:immich_mobile/domain/models/sync_event.model.dart'; +import 'package:openapi/api.dart'; + +abstract final class SyncStreamStub { + static final userV1Admin = SyncUserV1( + deletedAt: DateTime(2020), + email: "admin@admin", + id: "1", + name: "Admin", + ); + static final userV1User = SyncUserV1( + deletedAt: DateTime(2021), + email: "user@user", + id: "2", + name: "User", + ); + static final userDeleteV1 = SyncUserDeleteV1(userId: "2"); + static final userEvents = [ + SyncEvent(type: SyncEntityType.userV1, data: userV1Admin, ack: "1"), + SyncEvent( + type: SyncEntityType.userDeleteV1, + data: userDeleteV1, + ack: "2", + ), + SyncEvent(type: SyncEntityType.userV1, data: userV1User, ack: "5"), + ]; + + static final partnerV1 = SyncPartnerV1( + inTimeline: true, + sharedById: "1", + sharedWithId: "2", + ); + static final partnerDeleteV1 = SyncPartnerDeleteV1( + sharedById: "3", + sharedWithId: "4", + ); + static final partnerEvents = [ + SyncEvent( + type: SyncEntityType.partnerDeleteV1, + data: partnerDeleteV1, + ack: "4", + ), + SyncEvent(type: SyncEntityType.partnerV1, data: partnerV1, ack: "3"), + ]; +} diff --git a/mobile/test/infrastructure/repository.mock.dart b/mobile/test/infrastructure/repository.mock.dart index 192858adff..c4a5680f71 100644 --- a/mobile/test/infrastructure/repository.mock.dart +++ b/mobile/test/infrastructure/repository.mock.dart @@ -1,6 +1,8 @@ import 'package:immich_mobile/domain/interfaces/device_asset.interface.dart'; import 'package:immich_mobile/domain/interfaces/log.interface.dart'; import 'package:immich_mobile/domain/interfaces/store.interface.dart'; +import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart'; +import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart'; import 'package:immich_mobile/domain/interfaces/user.interface.dart'; import 'package:immich_mobile/domain/interfaces/user_api.interface.dart'; import 'package:mocktail/mocktail.dart'; @@ -14,5 +16,9 @@ class MockUserRepository extends Mock implements IUserRepository {} class MockDeviceAssetRepository extends Mock implements IDeviceAssetRepository {} +class MockSyncStreamRepository extends Mock implements ISyncStreamRepository {} + // API Repos class MockUserApiRepository extends Mock implements IUserApiRepository {} + +class MockSyncApiRepository extends Mock implements ISyncApiRepository {} diff --git a/mobile/test/service.mocks.dart b/mobile/test/service.mocks.dart index e1b8df40a3..87a8c01cf0 100644 --- a/mobile/test/service.mocks.dart +++ b/mobile/test/service.mocks.dart @@ -29,4 +29,3 @@ class MockSearchApi extends Mock implements SearchApi {} class MockAppSettingService extends Mock implements AppSettingsService {} class MockBackgroundService extends Mock implements BackgroundService {} - diff --git a/mobile/test/services/auth.service_test.dart b/mobile/test/services/auth.service_test.dart index e4f011d940..4ada98a6c9 100644 --- a/mobile/test/services/auth.service_test.dart +++ b/mobile/test/services/auth.service_test.dart @@ -8,6 +8,7 @@ import 'package:isar/isar.dart'; import 'package:mocktail/mocktail.dart'; import 'package:openapi/api.dart'; +import '../domain/service.mock.dart'; import '../repository.mocks.dart'; import '../service.mocks.dart'; import '../test_utils.dart'; @@ -18,6 +19,7 @@ void main() { late MockAuthRepository authRepository; late MockApiService apiService; late MockNetworkService networkService; + late MockBackgroundSyncManager backgroundSyncManager; late Isar db; setUp(() async { @@ -25,12 +27,14 @@ void main() { authRepository = MockAuthRepository(); apiService = MockApiService(); networkService = MockNetworkService(); + backgroundSyncManager = MockBackgroundSyncManager(); sut = AuthService( authApiRepository, authRepository, apiService, networkService, + backgroundSyncManager, ); registerFallbackValue(Uri()); @@ -116,24 +120,28 @@ void main() { group('logout', () { test('Should logout user', () async { when(() => authApiRepository.logout()).thenAnswer((_) async => {}); + when(() => backgroundSyncManager.cancel()).thenAnswer((_) async => {}); when(() => authRepository.clearLocalData()) .thenAnswer((_) => Future.value(null)); await sut.logout(); verify(() => authApiRepository.logout()).called(1); + verify(() => backgroundSyncManager.cancel()).called(1); verify(() => authRepository.clearLocalData()).called(1); }); test('Should clear local data even on server error', () async { when(() => authApiRepository.logout()) .thenThrow(Exception('Server error')); + when(() => backgroundSyncManager.cancel()).thenAnswer((_) async => {}); when(() => authRepository.clearLocalData()) .thenAnswer((_) => Future.value(null)); await sut.logout(); verify(() => authApiRepository.logout()).called(1); + verify(() => backgroundSyncManager.cancel()).called(1); verify(() => authRepository.clearLocalData()).called(1); }); }); From e275f2d8b390cb95cb861118747be27507e34493 Mon Sep 17 00:00:00 2001 From: Jason Rasmussen Date: Thu, 17 Apr 2025 14:41:06 -0400 Subject: [PATCH 13/13] feat: add foreign key indexes (#17672) --- server/src/decorators.ts | 3 +- .../1744900200559-AddForeignKeyIndexes.ts | 51 ++++++++++++++ server/src/schema/tables/activity.table.ts | 4 +- server/src/schema/tables/album-asset.table.ts | 18 +---- server/src/schema/tables/album.table.ts | 4 +- server/src/schema/tables/api-key.table.ts | 4 +- server/src/schema/tables/asset-audit.table.ts | 11 ++- server/src/schema/tables/asset-face.table.ts | 15 ++++- server/src/schema/tables/asset-files.table.ts | 11 +-- server/src/schema/tables/asset.table.ts | 16 ++--- server/src/schema/tables/exif.table.ts | 14 ++-- server/src/schema/tables/library.table.ts | 4 +- server/src/schema/tables/memory.table.ts | 4 +- .../src/schema/tables/memory_asset.table.ts | 4 +- .../src/schema/tables/partner-audit.table.ts | 11 ++- server/src/schema/tables/partner.table.ts | 20 +++--- server/src/schema/tables/person.table.ts | 4 +- server/src/schema/tables/session.table.ts | 4 +- .../schema/tables/shared-link-asset.table.ts | 4 +- server/src/schema/tables/shared-link.table.ts | 21 +++--- .../schema/tables/sync-checkpoint.table.ts | 13 +--- server/src/schema/tables/tag-asset.table.ts | 8 +-- server/src/schema/tables/tag-closure.table.ts | 8 +-- server/src/schema/tables/tag.table.ts | 11 +-- server/src/schema/tables/user-audit.table.ts | 5 +- .../src/schema/tables/user-metadata.table.ts | 8 ++- server/src/schema/tables/user.table.ts | 4 +- .../decorators/column-index.decorator.ts | 16 ----- .../from-code/decorators/column.decorator.ts | 6 +- .../foreign-key-column.decorator.ts | 2 - .../from-code/decorators/index.decorator.ts | 9 ++- server/src/sql-tools/from-code/index.ts | 16 +++-- .../processors/check-constraint.processor.ts | 4 +- .../processors/column-index.processor.ts | 32 --------- .../from-code/processors/column.processor.ts | 14 +--- .../foreign-key-constriant.processor.ts | 7 +- .../from-code/processors/index.processor.ts | 67 ++++++++++++++++++- .../primary-key-contraint.processor.ts | 4 +- .../from-code/processors/trigger.processor.ts | 6 +- .../sql-tools/from-code/processors/type.ts | 3 +- .../processors/unique-constraint.processor.ts | 33 ++++++++- .../sql-tools/from-code/register-function.ts | 35 +++++++++- .../src/sql-tools/from-code/register-item.ts | 2 - server/src/sql-tools/helpers.ts | 55 --------------- server/src/sql-tools/public_api.ts | 1 - .../sql-tools/column-index-name-default.ts | 5 +- server/test/sql-tools/column-index-name.ts | 46 +++++++++++++ .../foreign-key-inferred-type.stub.ts | 10 ++- ...foreign-key-with-unique-constraint.stub.ts | 10 ++- 49 files changed, 382 insertions(+), 285 deletions(-) create mode 100644 server/src/migrations/1744900200559-AddForeignKeyIndexes.ts delete mode 100644 server/src/sql-tools/from-code/decorators/column-index.decorator.ts delete mode 100644 server/src/sql-tools/from-code/processors/column-index.processor.ts create mode 100644 server/test/sql-tools/column-index-name.ts diff --git a/server/src/decorators.ts b/server/src/decorators.ts index 7085899af7..1af9342e0b 100644 --- a/server/src/decorators.ts +++ b/server/src/decorators.ts @@ -11,7 +11,8 @@ import { setUnion } from 'src/utils/set'; const GeneratedUuidV7Column = (options: Omit = {}) => Column({ ...options, type: 'uuid', nullable: false, default: () => `${immich_uuid_v7.name}()` }); -export const UpdateIdColumn = () => GeneratedUuidV7Column(); +export const UpdateIdColumn = (options: Omit = {}) => + GeneratedUuidV7Column(options); export const PrimaryGeneratedUuidV7Column = () => GeneratedUuidV7Column({ primary: true }); diff --git a/server/src/migrations/1744900200559-AddForeignKeyIndexes.ts b/server/src/migrations/1744900200559-AddForeignKeyIndexes.ts new file mode 100644 index 0000000000..db351d5bab --- /dev/null +++ b/server/src/migrations/1744900200559-AddForeignKeyIndexes.ts @@ -0,0 +1,51 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class AddForeignKeyIndexes1744900200559 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE INDEX "IDX_0f6fc2fb195f24d19b0fb0d57c" ON "libraries" ("ownerId")`); + await queryRunner.query(`CREATE INDEX "IDX_91704e101438fd0653f582426d" ON "asset_stack" ("primaryAssetId")`); + await queryRunner.query(`CREATE INDEX "IDX_c05079e542fd74de3b5ecb5c1c" ON "asset_stack" ("ownerId")`); + await queryRunner.query(`CREATE INDEX "IDX_2c5ac0d6fb58b238fd2068de67" ON "assets" ("ownerId")`); + await queryRunner.query(`CREATE INDEX "IDX_16294b83fa8c0149719a1f631e" ON "assets" ("livePhotoVideoId")`); + await queryRunner.query(`CREATE INDEX "IDX_9977c3c1de01c3d848039a6b90" ON "assets" ("libraryId")`); + await queryRunner.query(`CREATE INDEX "IDX_f15d48fa3ea5e4bda05ca8ab20" ON "assets" ("stackId")`); + await queryRunner.query(`CREATE INDEX "IDX_b22c53f35ef20c28c21637c85f" ON "albums" ("ownerId")`); + await queryRunner.query(`CREATE INDEX "IDX_05895aa505a670300d4816debc" ON "albums" ("albumThumbnailAssetId")`); + await queryRunner.query(`CREATE INDEX "IDX_1af8519996fbfb3684b58df280" ON "activity" ("albumId")`); + await queryRunner.query(`CREATE INDEX "IDX_3571467bcbe021f66e2bdce96e" ON "activity" ("userId")`); + await queryRunner.query(`CREATE INDEX "IDX_8091ea76b12338cb4428d33d78" ON "activity" ("assetId")`); + await queryRunner.query(`CREATE INDEX "IDX_6c2e267ae764a9413b863a2934" ON "api_keys" ("userId")`); + await queryRunner.query(`CREATE INDEX "IDX_5527cc99f530a547093f9e577b" ON "person" ("ownerId")`); + await queryRunner.query(`CREATE INDEX "IDX_2bbabe31656b6778c6b87b6102" ON "person" ("faceAssetId")`); + await queryRunner.query(`CREATE INDEX "IDX_575842846f0c28fa5da46c99b1" ON "memories" ("ownerId")`); + await queryRunner.query(`CREATE INDEX "IDX_d7e875c6c60e661723dbf372fd" ON "partners" ("sharedWithId")`); + await queryRunner.query(`CREATE INDEX "IDX_57de40bc620f456c7311aa3a1e" ON "sessions" ("userId")`); + await queryRunner.query(`CREATE INDEX "IDX_66fe3837414c5a9f1c33ca4934" ON "shared_links" ("userId")`); + await queryRunner.query(`CREATE INDEX "IDX_d8ddd9d687816cc490432b3d4b" ON "session_sync_checkpoints" ("sessionId")`); + await queryRunner.query(`CREATE INDEX "IDX_9f9590cc11561f1f48ff034ef9" ON "tags" ("parentId")`); + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP INDEX "IDX_66fe3837414c5a9f1c33ca4934";`); + await queryRunner.query(`DROP INDEX "IDX_91704e101438fd0653f582426d";`); + await queryRunner.query(`DROP INDEX "IDX_c05079e542fd74de3b5ecb5c1c";`); + await queryRunner.query(`DROP INDEX "IDX_5527cc99f530a547093f9e577b";`); + await queryRunner.query(`DROP INDEX "IDX_2bbabe31656b6778c6b87b6102";`); + await queryRunner.query(`DROP INDEX "IDX_0f6fc2fb195f24d19b0fb0d57c";`); + await queryRunner.query(`DROP INDEX "IDX_9f9590cc11561f1f48ff034ef9";`); + await queryRunner.query(`DROP INDEX "IDX_2c5ac0d6fb58b238fd2068de67";`); + await queryRunner.query(`DROP INDEX "IDX_16294b83fa8c0149719a1f631e";`); + await queryRunner.query(`DROP INDEX "IDX_9977c3c1de01c3d848039a6b90";`); + await queryRunner.query(`DROP INDEX "IDX_f15d48fa3ea5e4bda05ca8ab20";`); + await queryRunner.query(`DROP INDEX "IDX_b22c53f35ef20c28c21637c85f";`); + await queryRunner.query(`DROP INDEX "IDX_05895aa505a670300d4816debc";`); + await queryRunner.query(`DROP INDEX "IDX_57de40bc620f456c7311aa3a1e";`); + await queryRunner.query(`DROP INDEX "IDX_d8ddd9d687816cc490432b3d4b";`); + await queryRunner.query(`DROP INDEX "IDX_d7e875c6c60e661723dbf372fd";`); + await queryRunner.query(`DROP INDEX "IDX_575842846f0c28fa5da46c99b1";`); + await queryRunner.query(`DROP INDEX "IDX_6c2e267ae764a9413b863a2934";`); + await queryRunner.query(`DROP INDEX "IDX_1af8519996fbfb3684b58df280";`); + await queryRunner.query(`DROP INDEX "IDX_3571467bcbe021f66e2bdce96e";`); + await queryRunner.query(`DROP INDEX "IDX_8091ea76b12338cb4428d33d78";`); + } +} diff --git a/server/src/schema/tables/activity.table.ts b/server/src/schema/tables/activity.table.ts index e7a144722c..802a86a303 100644 --- a/server/src/schema/tables/activity.table.ts +++ b/server/src/schema/tables/activity.table.ts @@ -5,7 +5,6 @@ import { UserTable } from 'src/schema/tables/user.table'; import { Check, Column, - ColumnIndex, CreateDateColumn, ForeignKeyColumn, Index, @@ -51,7 +50,6 @@ export class ActivityTable { @Column({ type: 'boolean', default: false }) isLiked!: boolean; - @ColumnIndex('IDX_activity_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_activity_update_id' }) updateId!: string; } diff --git a/server/src/schema/tables/album-asset.table.ts b/server/src/schema/tables/album-asset.table.ts index 1b931e3116..8054009c39 100644 --- a/server/src/schema/tables/album-asset.table.ts +++ b/server/src/schema/tables/album-asset.table.ts @@ -1,25 +1,13 @@ import { AlbumTable } from 'src/schema/tables/album.table'; import { AssetTable } from 'src/schema/tables/asset.table'; -import { ColumnIndex, CreateDateColumn, ForeignKeyColumn, Table } from 'src/sql-tools'; +import { CreateDateColumn, ForeignKeyColumn, Table } from 'src/sql-tools'; @Table({ name: 'albums_assets_assets', primaryConstraintName: 'PK_c67bc36fa845fb7b18e0e398180' }) export class AlbumAssetTable { - @ForeignKeyColumn(() => AlbumTable, { - onDelete: 'CASCADE', - onUpdate: 'CASCADE', - nullable: false, - primary: true, - }) - @ColumnIndex() + @ForeignKeyColumn(() => AlbumTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true }) albumsId!: string; - @ForeignKeyColumn(() => AssetTable, { - onDelete: 'CASCADE', - onUpdate: 'CASCADE', - nullable: false, - primary: true, - }) - @ColumnIndex() + @ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true }) assetsId!: string; @CreateDateColumn() diff --git a/server/src/schema/tables/album.table.ts b/server/src/schema/tables/album.table.ts index cdfd092b1b..428947fa51 100644 --- a/server/src/schema/tables/album.table.ts +++ b/server/src/schema/tables/album.table.ts @@ -4,7 +4,6 @@ import { AssetTable } from 'src/schema/tables/asset.table'; import { UserTable } from 'src/schema/tables/user.table'; import { Column, - ColumnIndex, CreateDateColumn, DeleteDateColumn, ForeignKeyColumn, @@ -51,7 +50,6 @@ export class AlbumTable { @Column({ default: AssetOrder.DESC }) order!: AssetOrder; - @ColumnIndex('IDX_albums_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_albums_update_id' }) updateId?: string; } diff --git a/server/src/schema/tables/api-key.table.ts b/server/src/schema/tables/api-key.table.ts index 29c4ad2b0f..1d4cc83172 100644 --- a/server/src/schema/tables/api-key.table.ts +++ b/server/src/schema/tables/api-key.table.ts @@ -3,7 +3,6 @@ import { Permission } from 'src/enum'; import { UserTable } from 'src/schema/tables/user.table'; import { Column, - ColumnIndex, CreateDateColumn, ForeignKeyColumn, PrimaryGeneratedColumn, @@ -35,7 +34,6 @@ export class APIKeyTable { @Column({ array: true, type: 'character varying' }) permissions!: Permission[]; - @ColumnIndex({ name: 'IDX_api_keys_update_id' }) - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_api_keys_update_id' }) updateId?: string; } diff --git a/server/src/schema/tables/asset-audit.table.ts b/server/src/schema/tables/asset-audit.table.ts index 55d6f5c911..030256480c 100644 --- a/server/src/schema/tables/asset-audit.table.ts +++ b/server/src/schema/tables/asset-audit.table.ts @@ -1,20 +1,17 @@ import { PrimaryGeneratedUuidV7Column } from 'src/decorators'; -import { Column, ColumnIndex, CreateDateColumn, Table } from 'src/sql-tools'; +import { Column, CreateDateColumn, Table } from 'src/sql-tools'; @Table('assets_audit') export class AssetAuditTable { @PrimaryGeneratedUuidV7Column() id!: string; - @ColumnIndex('IDX_assets_audit_asset_id') - @Column({ type: 'uuid' }) + @Column({ type: 'uuid', indexName: 'IDX_assets_audit_asset_id' }) assetId!: string; - @ColumnIndex('IDX_assets_audit_owner_id') - @Column({ type: 'uuid' }) + @Column({ type: 'uuid', indexName: 'IDX_assets_audit_owner_id' }) ownerId!: string; - @ColumnIndex('IDX_assets_audit_deleted_at') - @CreateDateColumn({ default: () => 'clock_timestamp()' }) + @CreateDateColumn({ default: () => 'clock_timestamp()', indexName: 'IDX_assets_audit_deleted_at' }) deletedAt!: Date; } diff --git a/server/src/schema/tables/asset-face.table.ts b/server/src/schema/tables/asset-face.table.ts index 0ae99f44bf..52f4364a93 100644 --- a/server/src/schema/tables/asset-face.table.ts +++ b/server/src/schema/tables/asset-face.table.ts @@ -8,10 +8,21 @@ import { Column, DeleteDateColumn, ForeignKeyColumn, Index, PrimaryGeneratedColu @Index({ name: 'IDX_asset_faces_assetId_personId', columns: ['assetId', 'personId'] }) @Index({ columns: ['personId', 'assetId'] }) export class AssetFaceTable { - @ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' }) + @ForeignKeyColumn(() => AssetTable, { + onDelete: 'CASCADE', + onUpdate: 'CASCADE', + // [assetId, personId] is the PK constraint + index: false, + }) assetId!: string; - @ForeignKeyColumn(() => PersonTable, { onDelete: 'SET NULL', onUpdate: 'CASCADE', nullable: true }) + @ForeignKeyColumn(() => PersonTable, { + onDelete: 'SET NULL', + onUpdate: 'CASCADE', + nullable: true, + // [personId, assetId] makes this redundant + index: false, + }) personId!: string | null; @Column({ default: 0, type: 'integer' }) diff --git a/server/src/schema/tables/asset-files.table.ts b/server/src/schema/tables/asset-files.table.ts index fb8750a8ef..0859bd5cf0 100644 --- a/server/src/schema/tables/asset-files.table.ts +++ b/server/src/schema/tables/asset-files.table.ts @@ -3,7 +3,6 @@ import { AssetFileType } from 'src/enum'; import { AssetTable } from 'src/schema/tables/asset.table'; import { Column, - ColumnIndex, CreateDateColumn, ForeignKeyColumn, PrimaryGeneratedColumn, @@ -19,8 +18,11 @@ export class AssetFileTable { @PrimaryGeneratedColumn() id!: string; - @ColumnIndex('IDX_asset_files_assetId') - @ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' }) + @ForeignKeyColumn(() => AssetTable, { + onDelete: 'CASCADE', + onUpdate: 'CASCADE', + indexName: 'IDX_asset_files_assetId', + }) assetId?: string; @CreateDateColumn() @@ -35,7 +37,6 @@ export class AssetFileTable { @Column() path!: string; - @ColumnIndex('IDX_asset_files_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_asset_files_update_id' }) updateId?: string; } diff --git a/server/src/schema/tables/asset.table.ts b/server/src/schema/tables/asset.table.ts index 250c3546a2..9a9670cd0e 100644 --- a/server/src/schema/tables/asset.table.ts +++ b/server/src/schema/tables/asset.table.ts @@ -9,7 +9,6 @@ import { UserTable } from 'src/schema/tables/user.table'; import { AfterDeleteTrigger, Column, - ColumnIndex, CreateDateColumn, DeleteDateColumn, ForeignKeyColumn, @@ -78,8 +77,7 @@ export class AssetTable { @Column() originalPath!: string; - @ColumnIndex('idx_asset_file_created_at') - @Column({ type: 'timestamp with time zone' }) + @Column({ type: 'timestamp with time zone', indexName: 'idx_asset_file_created_at' }) fileCreatedAt!: Date; @Column({ type: 'timestamp with time zone' }) @@ -94,8 +92,7 @@ export class AssetTable { @Column({ type: 'character varying', nullable: true, default: '' }) encodedVideoPath!: string | null; - @Column({ type: 'bytea' }) - @ColumnIndex() + @Column({ type: 'bytea', index: true }) checksum!: Buffer; // sha1 checksum @Column({ type: 'boolean', default: true }) @@ -113,8 +110,7 @@ export class AssetTable { @Column({ type: 'boolean', default: false }) isArchived!: boolean; - @Column() - @ColumnIndex() + @Column({ index: true }) originalFileName!: string; @Column({ nullable: true }) @@ -141,14 +137,12 @@ export class AssetTable { @ForeignKeyColumn(() => StackTable, { nullable: true, onDelete: 'SET NULL', onUpdate: 'CASCADE' }) stackId?: string | null; - @ColumnIndex('IDX_assets_duplicateId') - @Column({ type: 'uuid', nullable: true }) + @Column({ type: 'uuid', nullable: true, indexName: 'IDX_assets_duplicateId' }) duplicateId!: string | null; @Column({ enum: assets_status_enum, default: AssetStatus.ACTIVE }) status!: AssetStatus; - @ColumnIndex('IDX_assets_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_assets_update_id' }) updateId?: string; } diff --git a/server/src/schema/tables/exif.table.ts b/server/src/schema/tables/exif.table.ts index e40ce94b4f..ca300945c3 100644 --- a/server/src/schema/tables/exif.table.ts +++ b/server/src/schema/tables/exif.table.ts @@ -1,6 +1,6 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators'; import { AssetTable } from 'src/schema/tables/asset.table'; -import { Column, ColumnIndex, ForeignKeyColumn, Table, UpdateDateColumn } from 'src/sql-tools'; +import { Column, ForeignKeyColumn, Table, UpdateDateColumn } from 'src/sql-tools'; @Table('exif') @UpdatedAtTrigger('asset_exif_updated_at') @@ -50,8 +50,7 @@ export class ExifTable { @Column({ type: 'double precision', nullable: true }) longitude!: number | null; - @ColumnIndex('exif_city') - @Column({ type: 'character varying', nullable: true }) + @Column({ type: 'character varying', nullable: true, indexName: 'exif_city' }) city!: string | null; @Column({ type: 'character varying', nullable: true }) @@ -69,8 +68,7 @@ export class ExifTable { @Column({ type: 'character varying', nullable: true }) exposureTime!: string | null; - @ColumnIndex('IDX_live_photo_cid') - @Column({ type: 'character varying', nullable: true }) + @Column({ type: 'character varying', nullable: true, indexName: 'IDX_live_photo_cid' }) livePhotoCID!: string | null; @Column({ type: 'character varying', nullable: true }) @@ -88,8 +86,7 @@ export class ExifTable { @Column({ type: 'integer', nullable: true }) bitsPerSample!: number | null; - @ColumnIndex('IDX_auto_stack_id') - @Column({ type: 'character varying', nullable: true }) + @Column({ type: 'character varying', nullable: true, indexName: 'IDX_auto_stack_id' }) autoStackId!: string | null; @Column({ type: 'integer', nullable: true }) @@ -98,7 +95,6 @@ export class ExifTable { @UpdateDateColumn({ default: () => 'clock_timestamp()' }) updatedAt?: Date; - @ColumnIndex('IDX_asset_exif_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_asset_exif_update_id' }) updateId?: string; } diff --git a/server/src/schema/tables/library.table.ts b/server/src/schema/tables/library.table.ts index 54b3752f41..8b21d5feb0 100644 --- a/server/src/schema/tables/library.table.ts +++ b/server/src/schema/tables/library.table.ts @@ -2,7 +2,6 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators'; import { UserTable } from 'src/schema/tables/user.table'; import { Column, - ColumnIndex, CreateDateColumn, DeleteDateColumn, ForeignKeyColumn, @@ -41,7 +40,6 @@ export class LibraryTable { @Column({ type: 'timestamp with time zone', nullable: true }) refreshedAt!: Date | null; - @ColumnIndex('IDX_libraries_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_libraries_update_id' }) updateId?: string; } diff --git a/server/src/schema/tables/memory.table.ts b/server/src/schema/tables/memory.table.ts index 1926405565..32dafe3384 100644 --- a/server/src/schema/tables/memory.table.ts +++ b/server/src/schema/tables/memory.table.ts @@ -3,7 +3,6 @@ import { MemoryType } from 'src/enum'; import { UserTable } from 'src/schema/tables/user.table'; import { Column, - ColumnIndex, CreateDateColumn, DeleteDateColumn, ForeignKeyColumn, @@ -55,7 +54,6 @@ export class MemoryTable { @Column({ type: 'timestamp with time zone', nullable: true }) hideAt?: Date; - @ColumnIndex('IDX_memories_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_memories_update_id' }) updateId?: string; } diff --git a/server/src/schema/tables/memory_asset.table.ts b/server/src/schema/tables/memory_asset.table.ts index 864e6291c7..0e5ca29a08 100644 --- a/server/src/schema/tables/memory_asset.table.ts +++ b/server/src/schema/tables/memory_asset.table.ts @@ -1,14 +1,12 @@ import { AssetTable } from 'src/schema/tables/asset.table'; import { MemoryTable } from 'src/schema/tables/memory.table'; -import { ColumnIndex, ForeignKeyColumn, Table } from 'src/sql-tools'; +import { ForeignKeyColumn, Table } from 'src/sql-tools'; @Table('memories_assets_assets') export class MemoryAssetTable { - @ColumnIndex() @ForeignKeyColumn(() => MemoryTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true }) memoriesId!: string; - @ColumnIndex() @ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true }) assetsId!: string; } diff --git a/server/src/schema/tables/partner-audit.table.ts b/server/src/schema/tables/partner-audit.table.ts index 08b6e94626..da5243dc75 100644 --- a/server/src/schema/tables/partner-audit.table.ts +++ b/server/src/schema/tables/partner-audit.table.ts @@ -1,20 +1,17 @@ import { PrimaryGeneratedUuidV7Column } from 'src/decorators'; -import { Column, ColumnIndex, CreateDateColumn, Table } from 'src/sql-tools'; +import { Column, CreateDateColumn, Table } from 'src/sql-tools'; @Table('partners_audit') export class PartnerAuditTable { @PrimaryGeneratedUuidV7Column() id!: string; - @ColumnIndex('IDX_partners_audit_shared_by_id') - @Column({ type: 'uuid' }) + @Column({ type: 'uuid', indexName: 'IDX_partners_audit_shared_by_id' }) sharedById!: string; - @ColumnIndex('IDX_partners_audit_shared_with_id') - @Column({ type: 'uuid' }) + @Column({ type: 'uuid', indexName: 'IDX_partners_audit_shared_with_id' }) sharedWithId!: string; - @ColumnIndex('IDX_partners_audit_deleted_at') - @CreateDateColumn({ default: () => 'clock_timestamp()' }) + @CreateDateColumn({ default: () => 'clock_timestamp()', indexName: 'IDX_partners_audit_deleted_at' }) deletedAt!: Date; } diff --git a/server/src/schema/tables/partner.table.ts b/server/src/schema/tables/partner.table.ts index 770107fe7a..0da60cfc0c 100644 --- a/server/src/schema/tables/partner.table.ts +++ b/server/src/schema/tables/partner.table.ts @@ -1,15 +1,7 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators'; import { partners_delete_audit } from 'src/schema/functions'; import { UserTable } from 'src/schema/tables/user.table'; -import { - AfterDeleteTrigger, - Column, - ColumnIndex, - CreateDateColumn, - ForeignKeyColumn, - Table, - UpdateDateColumn, -} from 'src/sql-tools'; +import { AfterDeleteTrigger, Column, CreateDateColumn, ForeignKeyColumn, Table, UpdateDateColumn } from 'src/sql-tools'; @Table('partners') @UpdatedAtTrigger('partners_updated_at') @@ -21,7 +13,12 @@ import { when: 'pg_trigger_depth() = 0', }) export class PartnerTable { - @ForeignKeyColumn(() => UserTable, { onDelete: 'CASCADE', primary: true }) + @ForeignKeyColumn(() => UserTable, { + onDelete: 'CASCADE', + primary: true, + // [sharedById, sharedWithId] is the PK constraint + index: false, + }) sharedById!: string; @ForeignKeyColumn(() => UserTable, { onDelete: 'CASCADE', primary: true }) @@ -36,7 +33,6 @@ export class PartnerTable { @Column({ type: 'boolean', default: false }) inTimeline!: boolean; - @ColumnIndex('IDX_partners_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_partners_update_id' }) updateId!: string; } diff --git a/server/src/schema/tables/person.table.ts b/server/src/schema/tables/person.table.ts index b96fc5b709..1320b91f18 100644 --- a/server/src/schema/tables/person.table.ts +++ b/server/src/schema/tables/person.table.ts @@ -4,7 +4,6 @@ import { UserTable } from 'src/schema/tables/user.table'; import { Check, Column, - ColumnIndex, CreateDateColumn, ForeignKeyColumn, PrimaryGeneratedColumn, @@ -49,7 +48,6 @@ export class PersonTable { @Column({ type: 'character varying', nullable: true, default: null }) color?: string | null; - @ColumnIndex('IDX_person_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_person_update_id' }) updateId!: string; } diff --git a/server/src/schema/tables/session.table.ts b/server/src/schema/tables/session.table.ts index a66732a7d9..ad43d0d6e4 100644 --- a/server/src/schema/tables/session.table.ts +++ b/server/src/schema/tables/session.table.ts @@ -2,7 +2,6 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators'; import { UserTable } from 'src/schema/tables/user.table'; import { Column, - ColumnIndex, CreateDateColumn, ForeignKeyColumn, PrimaryGeneratedColumn, @@ -35,7 +34,6 @@ export class SessionTable { @Column({ default: '' }) deviceOS!: string; - @ColumnIndex('IDX_sessions_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_sessions_update_id' }) updateId!: string; } diff --git a/server/src/schema/tables/shared-link-asset.table.ts b/server/src/schema/tables/shared-link-asset.table.ts index 1eb294c1e8..66c9068441 100644 --- a/server/src/schema/tables/shared-link-asset.table.ts +++ b/server/src/schema/tables/shared-link-asset.table.ts @@ -1,14 +1,12 @@ import { AssetTable } from 'src/schema/tables/asset.table'; import { SharedLinkTable } from 'src/schema/tables/shared-link.table'; -import { ColumnIndex, ForeignKeyColumn, Table } from 'src/sql-tools'; +import { ForeignKeyColumn, Table } from 'src/sql-tools'; @Table('shared_link__asset') export class SharedLinkAssetTable { - @ColumnIndex() @ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true }) assetsId!: string; - @ColumnIndex() @ForeignKeyColumn(() => SharedLinkTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true }) sharedLinksId!: string; } diff --git a/server/src/schema/tables/shared-link.table.ts b/server/src/schema/tables/shared-link.table.ts index 36237c58ef..3bb36b36ed 100644 --- a/server/src/schema/tables/shared-link.table.ts +++ b/server/src/schema/tables/shared-link.table.ts @@ -1,15 +1,7 @@ import { SharedLinkType } from 'src/enum'; import { AlbumTable } from 'src/schema/tables/album.table'; import { UserTable } from 'src/schema/tables/user.table'; -import { - Column, - ColumnIndex, - CreateDateColumn, - ForeignKeyColumn, - PrimaryGeneratedColumn, - Table, - Unique, -} from 'src/sql-tools'; +import { Column, CreateDateColumn, ForeignKeyColumn, PrimaryGeneratedColumn, Table, Unique } from 'src/sql-tools'; @Table('shared_links') @Unique({ name: 'UQ_sharedlink_key', columns: ['key'] }) @@ -23,8 +15,7 @@ export class SharedLinkTable { @ForeignKeyColumn(() => UserTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' }) userId!: string; - @ColumnIndex('IDX_sharedlink_key') - @Column({ type: 'bytea' }) + @Column({ type: 'bytea', indexName: 'IDX_sharedlink_key' }) key!: Buffer; // use to access the inidividual asset @Column() @@ -39,8 +30,12 @@ export class SharedLinkTable { @Column({ type: 'boolean', default: false }) allowUpload!: boolean; - @ColumnIndex('IDX_sharedlink_albumId') - @ForeignKeyColumn(() => AlbumTable, { nullable: true, onDelete: 'CASCADE', onUpdate: 'CASCADE' }) + @ForeignKeyColumn(() => AlbumTable, { + nullable: true, + onDelete: 'CASCADE', + onUpdate: 'CASCADE', + indexName: 'IDX_sharedlink_albumId', + }) albumId!: string; @Column({ type: 'boolean', default: true }) diff --git a/server/src/schema/tables/sync-checkpoint.table.ts b/server/src/schema/tables/sync-checkpoint.table.ts index 831205ce7a..21fd7983ac 100644 --- a/server/src/schema/tables/sync-checkpoint.table.ts +++ b/server/src/schema/tables/sync-checkpoint.table.ts @@ -1,15 +1,7 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators'; import { SyncEntityType } from 'src/enum'; import { SessionTable } from 'src/schema/tables/session.table'; -import { - Column, - ColumnIndex, - CreateDateColumn, - ForeignKeyColumn, - PrimaryColumn, - Table, - UpdateDateColumn, -} from 'src/sql-tools'; +import { Column, CreateDateColumn, ForeignKeyColumn, PrimaryColumn, Table, UpdateDateColumn } from 'src/sql-tools'; @Table('session_sync_checkpoints') @UpdatedAtTrigger('session_sync_checkpoints_updated_at') @@ -29,7 +21,6 @@ export class SessionSyncCheckpointTable { @Column() ack!: string; - @ColumnIndex('IDX_session_sync_checkpoints_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_session_sync_checkpoints_update_id' }) updateId!: string; } diff --git a/server/src/schema/tables/tag-asset.table.ts b/server/src/schema/tables/tag-asset.table.ts index 5f24799cec..8793af0a8a 100644 --- a/server/src/schema/tables/tag-asset.table.ts +++ b/server/src/schema/tables/tag-asset.table.ts @@ -1,15 +1,13 @@ import { AssetTable } from 'src/schema/tables/asset.table'; import { TagTable } from 'src/schema/tables/tag.table'; -import { ColumnIndex, ForeignKeyColumn, Index, Table } from 'src/sql-tools'; +import { ForeignKeyColumn, Index, Table } from 'src/sql-tools'; @Index({ name: 'IDX_tag_asset_assetsId_tagsId', columns: ['assetsId', 'tagsId'] }) @Table('tag_asset') export class TagAssetTable { - @ColumnIndex() - @ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true }) + @ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true, index: true }) assetsId!: string; - @ColumnIndex() - @ForeignKeyColumn(() => TagTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true }) + @ForeignKeyColumn(() => TagTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true, index: true }) tagsId!: string; } diff --git a/server/src/schema/tables/tag-closure.table.ts b/server/src/schema/tables/tag-closure.table.ts index acde84b91d..8829e802e1 100644 --- a/server/src/schema/tables/tag-closure.table.ts +++ b/server/src/schema/tables/tag-closure.table.ts @@ -1,13 +1,11 @@ import { TagTable } from 'src/schema/tables/tag.table'; -import { ColumnIndex, ForeignKeyColumn, Table } from 'src/sql-tools'; +import { ForeignKeyColumn, Table } from 'src/sql-tools'; @Table('tags_closure') export class TagClosureTable { - @ColumnIndex() - @ForeignKeyColumn(() => TagTable, { primary: true, onDelete: 'CASCADE', onUpdate: 'NO ACTION' }) + @ForeignKeyColumn(() => TagTable, { primary: true, onDelete: 'CASCADE', onUpdate: 'NO ACTION', index: true }) id_ancestor!: string; - @ColumnIndex() - @ForeignKeyColumn(() => TagTable, { primary: true, onDelete: 'CASCADE', onUpdate: 'NO ACTION' }) + @ForeignKeyColumn(() => TagTable, { primary: true, onDelete: 'CASCADE', onUpdate: 'NO ACTION', index: true }) id_descendant!: string; } diff --git a/server/src/schema/tables/tag.table.ts b/server/src/schema/tables/tag.table.ts index 5042e2eb0e..a9f2a57f27 100644 --- a/server/src/schema/tables/tag.table.ts +++ b/server/src/schema/tables/tag.table.ts @@ -2,7 +2,6 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators'; import { UserTable } from 'src/schema/tables/user.table'; import { Column, - ColumnIndex, CreateDateColumn, ForeignKeyColumn, PrimaryGeneratedColumn, @@ -18,7 +17,12 @@ export class TagTable { @PrimaryGeneratedColumn() id!: string; - @ForeignKeyColumn(() => UserTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE' }) + @ForeignKeyColumn(() => UserTable, { + onUpdate: 'CASCADE', + onDelete: 'CASCADE', + // [userId, value] makes this redundant + index: false, + }) userId!: string; @Column() @@ -36,7 +40,6 @@ export class TagTable { @ForeignKeyColumn(() => TagTable, { nullable: true, onDelete: 'CASCADE' }) parentId?: string; - @ColumnIndex('IDX_tags_update_id') - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_tags_update_id' }) updateId!: string; } diff --git a/server/src/schema/tables/user-audit.table.ts b/server/src/schema/tables/user-audit.table.ts index 0f881ccc9a..e0c9afcdc3 100644 --- a/server/src/schema/tables/user-audit.table.ts +++ b/server/src/schema/tables/user-audit.table.ts @@ -1,13 +1,12 @@ import { PrimaryGeneratedUuidV7Column } from 'src/decorators'; -import { Column, ColumnIndex, CreateDateColumn, Table } from 'src/sql-tools'; +import { Column, CreateDateColumn, Table } from 'src/sql-tools'; @Table('users_audit') export class UserAuditTable { @Column({ type: 'uuid' }) userId!: string; - @ColumnIndex('IDX_users_audit_deleted_at') - @CreateDateColumn({ default: () => 'clock_timestamp()' }) + @CreateDateColumn({ default: () => 'clock_timestamp()', indexName: 'IDX_users_audit_deleted_at' }) deletedAt!: Date; @PrimaryGeneratedUuidV7Column() diff --git a/server/src/schema/tables/user-metadata.table.ts b/server/src/schema/tables/user-metadata.table.ts index 6d03acaf80..04b457867f 100644 --- a/server/src/schema/tables/user-metadata.table.ts +++ b/server/src/schema/tables/user-metadata.table.ts @@ -5,7 +5,13 @@ import { UserMetadata, UserMetadataItem } from 'src/types'; @Table('user_metadata') export class UserMetadataTable implements UserMetadataItem { - @ForeignKeyColumn(() => UserTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true }) + @ForeignKeyColumn(() => UserTable, { + onUpdate: 'CASCADE', + onDelete: 'CASCADE', + primary: true, + // [userId, key] is the PK constraint + index: false, + }) userId!: string; @PrimaryColumn({ type: 'character varying' }) diff --git a/server/src/schema/tables/user.table.ts b/server/src/schema/tables/user.table.ts index 5160f979b9..eeef923796 100644 --- a/server/src/schema/tables/user.table.ts +++ b/server/src/schema/tables/user.table.ts @@ -5,7 +5,6 @@ import { users_delete_audit } from 'src/schema/functions'; import { AfterDeleteTrigger, Column, - ColumnIndex, CreateDateColumn, DeleteDateColumn, Index, @@ -77,7 +76,6 @@ export class UserTable { @Column({ type: 'timestamp with time zone', default: () => 'now()' }) profileChangedAt!: Generated; - @ColumnIndex({ name: 'IDX_users_update_id' }) - @UpdateIdColumn() + @UpdateIdColumn({ indexName: 'IDX_users_update_id' }) updateId!: Generated; } diff --git a/server/src/sql-tools/from-code/decorators/column-index.decorator.ts b/server/src/sql-tools/from-code/decorators/column-index.decorator.ts deleted file mode 100644 index ab15292612..0000000000 --- a/server/src/sql-tools/from-code/decorators/column-index.decorator.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { register } from 'src/sql-tools/from-code/register'; -import { asOptions } from 'src/sql-tools/helpers'; - -export type ColumnIndexOptions = { - name?: string; - unique?: boolean; - expression?: string; - using?: string; - with?: string; - where?: string; - synchronize?: boolean; -}; -export const ColumnIndex = (options: string | ColumnIndexOptions = {}): PropertyDecorator => { - return (object: object, propertyName: string | symbol) => - void register({ type: 'columnIndex', item: { object, propertyName, options: asOptions(options) } }); -}; diff --git a/server/src/sql-tools/from-code/decorators/column.decorator.ts b/server/src/sql-tools/from-code/decorators/column.decorator.ts index 74a83cbcf3..7b00af80cc 100644 --- a/server/src/sql-tools/from-code/decorators/column.decorator.ts +++ b/server/src/sql-tools/from-code/decorators/column.decorator.ts @@ -15,13 +15,15 @@ export type ColumnBaseOptions = { synchronize?: boolean; storage?: ColumnStorage; identity?: boolean; + index?: boolean; + indexName?: string; + unique?: boolean; + uniqueConstraintName?: string; }; export type ColumnOptions = ColumnBaseOptions & { enum?: DatabaseEnum; array?: boolean; - unique?: boolean; - uniqueConstraintName?: string; }; export const Column = (options: string | ColumnOptions = {}): PropertyDecorator => { diff --git a/server/src/sql-tools/from-code/decorators/foreign-key-column.decorator.ts b/server/src/sql-tools/from-code/decorators/foreign-key-column.decorator.ts index 070aa5cb51..beb3aa6fd6 100644 --- a/server/src/sql-tools/from-code/decorators/foreign-key-column.decorator.ts +++ b/server/src/sql-tools/from-code/decorators/foreign-key-column.decorator.ts @@ -7,8 +7,6 @@ export type ForeignKeyColumnOptions = ColumnBaseOptions & { onUpdate?: Action; onDelete?: Action; constraintName?: string; - unique?: boolean; - uniqueConstraintName?: string; }; export const ForeignKeyColumn = (target: () => object, options: ForeignKeyColumnOptions): PropertyDecorator => { diff --git a/server/src/sql-tools/from-code/decorators/index.decorator.ts b/server/src/sql-tools/from-code/decorators/index.decorator.ts index cd76b5e36d..5d90c4f58d 100644 --- a/server/src/sql-tools/from-code/decorators/index.decorator.ts +++ b/server/src/sql-tools/from-code/decorators/index.decorator.ts @@ -1,8 +1,13 @@ -import { ColumnIndexOptions } from 'src/sql-tools/from-code/decorators/column-index.decorator'; import { register } from 'src/sql-tools/from-code/register'; import { asOptions } from 'src/sql-tools/helpers'; -export type IndexOptions = ColumnIndexOptions & { +export type IndexOptions = { + name?: string; + unique?: boolean; + expression?: string; + using?: string; + with?: string; + where?: string; columns?: string[]; synchronize?: boolean; }; diff --git a/server/src/sql-tools/from-code/index.ts b/server/src/sql-tools/from-code/index.ts index 3c74d2763c..95f1dbb22d 100644 --- a/server/src/sql-tools/from-code/index.ts +++ b/server/src/sql-tools/from-code/index.ts @@ -1,6 +1,5 @@ import 'reflect-metadata'; import { processCheckConstraints } from 'src/sql-tools/from-code/processors/check-constraint.processor'; -import { processColumnIndexes } from 'src/sql-tools/from-code/processors/column-index.processor'; import { processColumns } from 'src/sql-tools/from-code/processors/column.processor'; import { processConfigurationParameters } from 'src/sql-tools/from-code/processors/configuration-parameter.processor'; import { processDatabases } from 'src/sql-tools/from-code/processors/database.processor'; @@ -36,14 +35,21 @@ const processors: Processor[] = [ processUniqueConstraints, processCheckConstraints, processPrimaryKeyConstraints, - processIndexes, - processColumnIndexes, processForeignKeyConstraints, + processIndexes, processTriggers, ]; -export const schemaFromCode = () => { +export type SchemaFromCodeOptions = { + /** automatically create indexes on foreign key columns */ + createForeignKeyIndexes?: boolean; +}; +export const schemaFromCode = (options: SchemaFromCodeOptions = {}) => { if (!initialized) { + const globalOptions = { + createForeignKeyIndexes: options.createForeignKeyIndexes ?? true, + }; + const builder: SchemaBuilder = { name: 'postgres', schemaName: 'public', @@ -58,7 +64,7 @@ export const schemaFromCode = () => { const items = getRegisteredItems(); for (const processor of processors) { - processor(builder, items); + processor(builder, items, globalOptions); } schema = { ...builder, tables: builder.tables.map(({ metadata: _, ...table }) => table) }; diff --git a/server/src/sql-tools/from-code/processors/check-constraint.processor.ts b/server/src/sql-tools/from-code/processors/check-constraint.processor.ts index d61ee18277..feb21b9894 100644 --- a/server/src/sql-tools/from-code/processors/check-constraint.processor.ts +++ b/server/src/sql-tools/from-code/processors/check-constraint.processor.ts @@ -1,6 +1,6 @@ import { onMissingTable, resolveTable } from 'src/sql-tools/from-code/processors/table.processor'; import { Processor } from 'src/sql-tools/from-code/processors/type'; -import { asCheckConstraintName } from 'src/sql-tools/helpers'; +import { asKey } from 'src/sql-tools/helpers'; import { DatabaseConstraintType } from 'src/sql-tools/types'; export const processCheckConstraints: Processor = (builder, items) => { @@ -24,3 +24,5 @@ export const processCheckConstraints: Processor = (builder, items) => { }); } }; + +const asCheckConstraintName = (table: string, expression: string) => asKey('CHK_', table, [expression]); diff --git a/server/src/sql-tools/from-code/processors/column-index.processor.ts b/server/src/sql-tools/from-code/processors/column-index.processor.ts deleted file mode 100644 index 0e40fa1ee3..0000000000 --- a/server/src/sql-tools/from-code/processors/column-index.processor.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { onMissingColumn, resolveColumn } from 'src/sql-tools/from-code/processors/column.processor'; -import { onMissingTable } from 'src/sql-tools/from-code/processors/table.processor'; -import { Processor } from 'src/sql-tools/from-code/processors/type'; -import { asIndexName } from 'src/sql-tools/helpers'; - -export const processColumnIndexes: Processor = (builder, items) => { - for (const { - item: { object, propertyName, options }, - } of items.filter((item) => item.type === 'columnIndex')) { - const { table, column } = resolveColumn(builder, object, propertyName); - if (!table) { - onMissingTable(builder, '@ColumnIndex', object); - continue; - } - - if (!column) { - onMissingColumn(builder, `@ColumnIndex`, object, propertyName); - continue; - } - - table.indexes.push({ - name: options.name || asIndexName(table.name, [column.name], options.where), - tableName: table.name, - unique: options.unique ?? false, - expression: options.expression, - using: options.using, - where: options.where, - columnNames: [column.name], - synchronize: options.synchronize ?? true, - }); - } -}; diff --git a/server/src/sql-tools/from-code/processors/column.processor.ts b/server/src/sql-tools/from-code/processors/column.processor.ts index 37f3f5d082..e8c2544f87 100644 --- a/server/src/sql-tools/from-code/processors/column.processor.ts +++ b/server/src/sql-tools/from-code/processors/column.processor.ts @@ -1,8 +1,8 @@ import { ColumnOptions } from 'src/sql-tools/from-code/decorators/column.decorator'; import { onMissingTable, resolveTable } from 'src/sql-tools/from-code/processors/table.processor'; import { Processor, SchemaBuilder } from 'src/sql-tools/from-code/processors/type'; -import { asMetadataKey, asUniqueConstraintName, fromColumnValue } from 'src/sql-tools/helpers'; -import { DatabaseColumn, DatabaseConstraintType } from 'src/sql-tools/types'; +import { asMetadataKey, fromColumnValue } from 'src/sql-tools/helpers'; +import { DatabaseColumn } from 'src/sql-tools/types'; export const processColumns: Processor = (builder, items) => { for (const { @@ -54,16 +54,6 @@ export const processColumns: Processor = (builder, items) => { writeMetadata(object, propertyName, { name: column.name, options }); table.columns.push(column); - - if (type === 'column' && !options.primary && options.unique) { - table.constraints.push({ - type: DatabaseConstraintType.UNIQUE, - name: options.uniqueConstraintName || asUniqueConstraintName(table.name, [column.name]), - tableName: table.name, - columnNames: [column.name], - synchronize: options.synchronize ?? true, - }); - } } }; diff --git a/server/src/sql-tools/from-code/processors/foreign-key-constriant.processor.ts b/server/src/sql-tools/from-code/processors/foreign-key-constriant.processor.ts index 784a8b8e99..612b74c30f 100644 --- a/server/src/sql-tools/from-code/processors/foreign-key-constriant.processor.ts +++ b/server/src/sql-tools/from-code/processors/foreign-key-constriant.processor.ts @@ -1,7 +1,7 @@ import { onMissingColumn, resolveColumn } from 'src/sql-tools/from-code/processors/column.processor'; import { onMissingTable, resolveTable } from 'src/sql-tools/from-code/processors/table.processor'; import { Processor } from 'src/sql-tools/from-code/processors/type'; -import { asForeignKeyConstraintName, asRelationKeyConstraintName } from 'src/sql-tools/helpers'; +import { asKey } from 'src/sql-tools/helpers'; import { DatabaseActionType, DatabaseConstraintType } from 'src/sql-tools/types'; export const processForeignKeyConstraints: Processor = (builder, items) => { @@ -46,7 +46,7 @@ export const processForeignKeyConstraints: Processor = (builder, items) => { synchronize: options.synchronize ?? true, }); - if (options.unique) { + if (options.unique || options.uniqueConstraintName) { table.constraints.push({ name: options.uniqueConstraintName || asRelationKeyConstraintName(table.name, columnNames), tableName: table.name, @@ -57,3 +57,6 @@ export const processForeignKeyConstraints: Processor = (builder, items) => { } } }; + +const asForeignKeyConstraintName = (table: string, columns: string[]) => asKey('FK_', table, columns); +const asRelationKeyConstraintName = (table: string, columns: string[]) => asKey('REL_', table, columns); diff --git a/server/src/sql-tools/from-code/processors/index.processor.ts b/server/src/sql-tools/from-code/processors/index.processor.ts index 3625bf9784..f4c9c7cec1 100644 --- a/server/src/sql-tools/from-code/processors/index.processor.ts +++ b/server/src/sql-tools/from-code/processors/index.processor.ts @@ -1,8 +1,9 @@ +import { onMissingColumn, resolveColumn } from 'src/sql-tools/from-code/processors/column.processor'; import { onMissingTable, resolveTable } from 'src/sql-tools/from-code/processors/table.processor'; import { Processor } from 'src/sql-tools/from-code/processors/type'; -import { asIndexName } from 'src/sql-tools/helpers'; +import { asKey } from 'src/sql-tools/helpers'; -export const processIndexes: Processor = (builder, items) => { +export const processIndexes: Processor = (builder, items, config) => { for (const { item: { object, options }, } of items.filter((item) => item.type === 'index')) { @@ -24,4 +25,66 @@ export const processIndexes: Processor = (builder, items) => { synchronize: options.synchronize ?? true, }); } + + // column indexes + for (const { + type, + item: { object, propertyName, options }, + } of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) { + const { table, column } = resolveColumn(builder, object, propertyName); + if (!table) { + onMissingTable(builder, '@Column', object); + continue; + } + + if (!column) { + // should be impossible since they are created in `column.processor.ts` + onMissingColumn(builder, '@Column', object, propertyName); + continue; + } + + if (options.index === false) { + continue; + } + + const isIndexRequested = + options.indexName || options.index || (type === 'foreignKeyColumn' && config.createForeignKeyIndexes); + if (!isIndexRequested) { + continue; + } + + const indexName = options.indexName || asIndexName(table.name, [column.name]); + + const isIndexPresent = table.indexes.some((index) => index.name === indexName); + if (isIndexPresent) { + continue; + } + + const isOnlyPrimaryColumn = options.primary && table.columns.filter(({ primary }) => primary === true).length === 1; + if (isOnlyPrimaryColumn) { + // will have an index created by the primary key constraint + continue; + } + + table.indexes.push({ + name: indexName, + tableName: table.name, + unique: false, + columnNames: [column.name], + synchronize: options.synchronize ?? true, + }); + } +}; + +const asIndexName = (table: string, columns?: string[], where?: string) => { + const items: string[] = []; + for (const columnName of columns ?? []) { + items.push(columnName); + } + + if (where) { + items.push(where); + } + + return asKey('IDX_', table, items); }; diff --git a/server/src/sql-tools/from-code/processors/primary-key-contraint.processor.ts b/server/src/sql-tools/from-code/processors/primary-key-contraint.processor.ts index f123f2e495..74aecc5ea0 100644 --- a/server/src/sql-tools/from-code/processors/primary-key-contraint.processor.ts +++ b/server/src/sql-tools/from-code/processors/primary-key-contraint.processor.ts @@ -1,5 +1,5 @@ import { Processor } from 'src/sql-tools/from-code/processors/type'; -import { asPrimaryKeyConstraintName } from 'src/sql-tools/helpers'; +import { asKey } from 'src/sql-tools/helpers'; import { DatabaseConstraintType } from 'src/sql-tools/types'; export const processPrimaryKeyConstraints: Processor = (builder) => { @@ -22,3 +22,5 @@ export const processPrimaryKeyConstraints: Processor = (builder) => { } } }; + +const asPrimaryKeyConstraintName = (table: string, columns: string[]) => asKey('PK_', table, columns); diff --git a/server/src/sql-tools/from-code/processors/trigger.processor.ts b/server/src/sql-tools/from-code/processors/trigger.processor.ts index 2f4cc04326..4b875f353b 100644 --- a/server/src/sql-tools/from-code/processors/trigger.processor.ts +++ b/server/src/sql-tools/from-code/processors/trigger.processor.ts @@ -1,6 +1,7 @@ +import { TriggerOptions } from 'src/sql-tools/from-code/decorators/trigger.decorator'; import { onMissingTable, resolveTable } from 'src/sql-tools/from-code/processors/table.processor'; import { Processor } from 'src/sql-tools/from-code/processors/type'; -import { asTriggerName } from 'src/sql-tools/helpers'; +import { asKey } from 'src/sql-tools/helpers'; export const processTriggers: Processor = (builder, items) => { for (const { @@ -26,3 +27,6 @@ export const processTriggers: Processor = (builder, items) => { }); } }; + +const asTriggerName = (table: string, trigger: TriggerOptions) => + asKey('TR_', table, [...trigger.actions, trigger.scope, trigger.timing, trigger.functionName]); diff --git a/server/src/sql-tools/from-code/processors/type.ts b/server/src/sql-tools/from-code/processors/type.ts index 5a69efbcf0..deb142d278 100644 --- a/server/src/sql-tools/from-code/processors/type.ts +++ b/server/src/sql-tools/from-code/processors/type.ts @@ -1,3 +1,4 @@ +import { SchemaFromCodeOptions } from 'src/sql-tools/from-code'; import { TableOptions } from 'src/sql-tools/from-code/decorators/table.decorator'; import { RegisterItem } from 'src/sql-tools/from-code/register-item'; import { DatabaseSchema, DatabaseTable } from 'src/sql-tools/types'; @@ -6,4 +7,4 @@ import { DatabaseSchema, DatabaseTable } from 'src/sql-tools/types'; export type TableWithMetadata = DatabaseTable & { metadata: { options: TableOptions; object: Function } }; export type SchemaBuilder = Omit & { tables: TableWithMetadata[] }; -export type Processor = (builder: SchemaBuilder, items: RegisterItem[]) => void; +export type Processor = (builder: SchemaBuilder, items: RegisterItem[], options: SchemaFromCodeOptions) => void; diff --git a/server/src/sql-tools/from-code/processors/unique-constraint.processor.ts b/server/src/sql-tools/from-code/processors/unique-constraint.processor.ts index 74c0504f7e..9014378085 100644 --- a/server/src/sql-tools/from-code/processors/unique-constraint.processor.ts +++ b/server/src/sql-tools/from-code/processors/unique-constraint.processor.ts @@ -1,6 +1,7 @@ +import { onMissingColumn, resolveColumn } from 'src/sql-tools/from-code/processors/column.processor'; import { onMissingTable, resolveTable } from 'src/sql-tools/from-code/processors/table.processor'; import { Processor } from 'src/sql-tools/from-code/processors/type'; -import { asUniqueConstraintName } from 'src/sql-tools/helpers'; +import { asKey } from 'src/sql-tools/helpers'; import { DatabaseConstraintType } from 'src/sql-tools/types'; export const processUniqueConstraints: Processor = (builder, items) => { @@ -24,4 +25,34 @@ export const processUniqueConstraints: Processor = (builder, items) => { synchronize: options.synchronize ?? true, }); } + + // column level constraints + for (const { + type, + item: { object, propertyName, options }, + } of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) { + const { table, column } = resolveColumn(builder, object, propertyName); + if (!table) { + onMissingTable(builder, '@Column', object); + continue; + } + + if (!column) { + // should be impossible since they are created in `column.processor.ts` + onMissingColumn(builder, '@Column', object, propertyName); + continue; + } + + if (type === 'column' && !options.primary && (options.unique || options.uniqueConstraintName)) { + table.constraints.push({ + type: DatabaseConstraintType.UNIQUE, + name: options.uniqueConstraintName || asUniqueConstraintName(table.name, [column.name]), + tableName: table.name, + columnNames: [column.name], + synchronize: options.synchronize ?? true, + }); + } + } }; + +const asUniqueConstraintName = (table: string, columns: string[]) => asKey('UQ_', table, columns); diff --git a/server/src/sql-tools/from-code/register-function.ts b/server/src/sql-tools/from-code/register-function.ts index 69e1a0f8f3..be71e0dfd7 100644 --- a/server/src/sql-tools/from-code/register-function.ts +++ b/server/src/sql-tools/from-code/register-function.ts @@ -1,5 +1,4 @@ import { register } from 'src/sql-tools/from-code/register'; -import { asFunctionExpression } from 'src/sql-tools/helpers'; import { ColumnType, DatabaseFunction } from 'src/sql-tools/types'; export type FunctionOptions = { @@ -27,3 +26,37 @@ export const registerFunction = (options: FunctionOptions) => { return item; }; + +const asFunctionExpression = (options: FunctionOptions) => { + const name = options.name; + const sql: string[] = [ + `CREATE OR REPLACE FUNCTION ${name}(${(options.arguments || []).join(', ')})`, + `RETURNS ${options.returnType}`, + ]; + + const flags = [ + options.parallel ? `PARALLEL ${options.parallel.toUpperCase()}` : undefined, + options.strict ? 'STRICT' : undefined, + options.behavior ? options.behavior.toUpperCase() : undefined, + `LANGUAGE ${options.language ?? 'SQL'}`, + ].filter((x) => x !== undefined); + + if (flags.length > 0) { + sql.push(flags.join(' ')); + } + + if ('return' in options) { + sql.push(` RETURN ${options.return}`); + } + + if ('body' in options) { + sql.push( + // + `AS $$`, + ' ' + options.body.trim(), + `$$;`, + ); + } + + return sql.join('\n ').trim(); +}; diff --git a/server/src/sql-tools/from-code/register-item.ts b/server/src/sql-tools/from-code/register-item.ts index 08200cbc4f..4889ae34b9 100644 --- a/server/src/sql-tools/from-code/register-item.ts +++ b/server/src/sql-tools/from-code/register-item.ts @@ -1,5 +1,4 @@ import { CheckOptions } from 'src/sql-tools/from-code/decorators/check.decorator'; -import { ColumnIndexOptions } from 'src/sql-tools/from-code/decorators/column-index.decorator'; import { ColumnOptions } from 'src/sql-tools/from-code/decorators/column.decorator'; import { ConfigurationParameterOptions } from 'src/sql-tools/from-code/decorators/configuration-parameter.decorator'; import { DatabaseOptions } from 'src/sql-tools/from-code/decorators/database.decorator'; @@ -21,7 +20,6 @@ export type RegisterItem = | { type: 'uniqueConstraint'; item: ClassBased<{ options: UniqueOptions }> } | { type: 'checkConstraint'; item: ClassBased<{ options: CheckOptions }> } | { type: 'column'; item: PropertyBased<{ options: ColumnOptions }> } - | { type: 'columnIndex'; item: PropertyBased<{ options: ColumnIndexOptions }> } | { type: 'function'; item: DatabaseFunction } | { type: 'enum'; item: DatabaseEnum } | { type: 'trigger'; item: ClassBased<{ options: TriggerOptions }> } diff --git a/server/src/sql-tools/helpers.ts b/server/src/sql-tools/helpers.ts index 364b695194..2802407ea6 100644 --- a/server/src/sql-tools/helpers.ts +++ b/server/src/sql-tools/helpers.ts @@ -1,7 +1,5 @@ import { createHash } from 'node:crypto'; import { ColumnValue } from 'src/sql-tools/from-code/decorators/column.decorator'; -import { TriggerOptions } from 'src/sql-tools/from-code/decorators/trigger.decorator'; -import { FunctionOptions } from 'src/sql-tools/from-code/register-function'; import { Comparer, DatabaseColumn, @@ -18,25 +16,6 @@ export const asSnakeCase = (name: string): string => name.replaceAll(/([a-z])([A // match TypeORM export const asKey = (prefix: string, tableName: string, values: string[]) => (prefix + sha1(`${tableName}_${values.toSorted().join('_')}`)).slice(0, 30); -export const asPrimaryKeyConstraintName = (table: string, columns: string[]) => asKey('PK_', table, columns); -export const asForeignKeyConstraintName = (table: string, columns: string[]) => asKey('FK_', table, columns); -export const asTriggerName = (table: string, trigger: TriggerOptions) => - asKey('TR_', table, [...trigger.actions, trigger.scope, trigger.timing, trigger.functionName]); -export const asRelationKeyConstraintName = (table: string, columns: string[]) => asKey('REL_', table, columns); -export const asUniqueConstraintName = (table: string, columns: string[]) => asKey('UQ_', table, columns); -export const asCheckConstraintName = (table: string, expression: string) => asKey('CHK_', table, [expression]); -export const asIndexName = (table: string, columns: string[] | undefined, where: string | undefined) => { - const items: string[] = []; - for (const columnName of columns ?? []) { - items.push(columnName); - } - - if (where) { - items.push(where); - } - - return asKey('IDX_', table, items); -}; export const asOptions = (options: string | T): T => { if (typeof options === 'string') { @@ -46,40 +25,6 @@ export const asOptions = (options: string | T): T = return options; }; -export const asFunctionExpression = (options: FunctionOptions) => { - const name = options.name; - const sql: string[] = [ - `CREATE OR REPLACE FUNCTION ${name}(${(options.arguments || []).join(', ')})`, - `RETURNS ${options.returnType}`, - ]; - - const flags = [ - options.parallel ? `PARALLEL ${options.parallel.toUpperCase()}` : undefined, - options.strict ? 'STRICT' : undefined, - options.behavior ? options.behavior.toUpperCase() : undefined, - `LANGUAGE ${options.language ?? 'SQL'}`, - ].filter((x) => x !== undefined); - - if (flags.length > 0) { - sql.push(flags.join(' ')); - } - - if ('return' in options) { - sql.push(` RETURN ${options.return}`); - } - - if ('body' in options) { - sql.push( - // - `AS $$`, - ' ' + options.body.trim(), - `$$;`, - ); - } - - return sql.join('\n ').trim(); -}; - export const sha1 = (value: string) => createHash('sha1').update(value).digest('hex'); export const hasMask = (input: number, mask: number) => (input & mask) === mask; diff --git a/server/src/sql-tools/public_api.ts b/server/src/sql-tools/public_api.ts index d916678d4a..b41cce4ab5 100644 --- a/server/src/sql-tools/public_api.ts +++ b/server/src/sql-tools/public_api.ts @@ -3,7 +3,6 @@ export { schemaFromCode } from 'src/sql-tools/from-code'; export * from 'src/sql-tools/from-code/decorators/after-delete.decorator'; export * from 'src/sql-tools/from-code/decorators/before-update.decorator'; export * from 'src/sql-tools/from-code/decorators/check.decorator'; -export * from 'src/sql-tools/from-code/decorators/column-index.decorator'; export * from 'src/sql-tools/from-code/decorators/column.decorator'; export * from 'src/sql-tools/from-code/decorators/configuration-parameter.decorator'; export * from 'src/sql-tools/from-code/decorators/create-date-column.decorator'; diff --git a/server/test/sql-tools/column-index-name-default.ts b/server/test/sql-tools/column-index-name-default.ts index e8b36ec119..cedae006be 100644 --- a/server/test/sql-tools/column-index-name-default.ts +++ b/server/test/sql-tools/column-index-name-default.ts @@ -1,9 +1,8 @@ -import { Column, ColumnIndex, DatabaseSchema, Table } from 'src/sql-tools'; +import { Column, DatabaseSchema, Table } from 'src/sql-tools'; @Table() export class Table1 { - @ColumnIndex() - @Column() + @Column({ index: true }) column1!: string; } diff --git a/server/test/sql-tools/column-index-name.ts b/server/test/sql-tools/column-index-name.ts new file mode 100644 index 0000000000..8ba18a8851 --- /dev/null +++ b/server/test/sql-tools/column-index-name.ts @@ -0,0 +1,46 @@ +import { Column, DatabaseSchema, Table } from 'src/sql-tools'; + +@Table() +export class Table1 { + @Column({ indexName: 'IDX_test' }) + column1!: string; +} + +export const description = 'should create a column with an index if a name is provided'; +export const schema: DatabaseSchema = { + name: 'postgres', + schemaName: 'public', + functions: [], + enums: [], + extensions: [], + parameters: [], + tables: [ + { + name: 'table1', + columns: [ + { + name: 'column1', + tableName: 'table1', + type: 'character varying', + nullable: false, + isArray: false, + primary: false, + synchronize: true, + }, + ], + indexes: [ + { + name: 'IDX_test', + columnNames: ['column1'], + tableName: 'table1', + unique: false, + synchronize: true, + }, + ], + triggers: [], + constraints: [], + synchronize: true, + }, + ], + warnings: [], +}; diff --git a/server/test/sql-tools/foreign-key-inferred-type.stub.ts b/server/test/sql-tools/foreign-key-inferred-type.stub.ts index 2ecaafdcad..0b66a1acd4 100644 --- a/server/test/sql-tools/foreign-key-inferred-type.stub.ts +++ b/server/test/sql-tools/foreign-key-inferred-type.stub.ts @@ -60,7 +60,15 @@ export const schema: DatabaseSchema = { synchronize: true, }, ], - indexes: [], + indexes: [ + { + name: 'IDX_3fcca5cc563abf256fc346e3ff', + tableName: 'table2', + columnNames: ['parentId'], + unique: false, + synchronize: true, + }, + ], triggers: [], constraints: [ { diff --git a/server/test/sql-tools/foreign-key-with-unique-constraint.stub.ts b/server/test/sql-tools/foreign-key-with-unique-constraint.stub.ts index 0601a02d42..109a3dfc85 100644 --- a/server/test/sql-tools/foreign-key-with-unique-constraint.stub.ts +++ b/server/test/sql-tools/foreign-key-with-unique-constraint.stub.ts @@ -60,7 +60,15 @@ export const schema: DatabaseSchema = { synchronize: true, }, ], - indexes: [], + indexes: [ + { + name: 'IDX_3fcca5cc563abf256fc346e3ff', + tableName: 'table2', + columnNames: ['parentId'], + unique: false, + synchronize: true, + }, + ], triggers: [], constraints: [ {