diff --git a/server/src/app.module.ts b/server/src/app.module.ts index c06087edea..9ea75d78c4 100644 --- a/server/src/app.module.ts +++ b/server/src/app.module.ts @@ -85,13 +85,13 @@ class BaseModule implements OnModuleInit, OnModuleDestroy { @Module({ imports: [...imports, ScheduleModule.forRoot()], controllers: [...controllers], - providers: [...common, ...middleware, { provide: IWorker, useValue: ImmichWorker.API }], + providers: [...common, ...middleware, { provide: IWorker, useValue: ImmichWorker.Api }], }) export class ApiModule extends BaseModule {} @Module({ imports: [...imports], - providers: [...common, { provide: IWorker, useValue: ImmichWorker.MICROSERVICES }, SchedulerRegistry], + providers: [...common, { provide: IWorker, useValue: ImmichWorker.Microservices }, SchedulerRegistry], }) export class MicroservicesModule extends BaseModule {} diff --git a/server/src/config.ts b/server/src/config.ts index 90ca2c1529..33a6f19ba1 100644 --- a/server/src/config.ts +++ b/server/src/config.ts @@ -8,7 +8,7 @@ import { OAuthTokenEndpointAuthMethod, QueueName, ToneMapping, - TranscodeHWAccel, + TranscodeHardwareAcceleration, TranscodePolicy, VideoCodec, VideoContainer, @@ -42,7 +42,7 @@ export interface SystemConfig { twoPass: boolean; preferredHwDevice: string; transcode: TranscodePolicy; - accel: TranscodeHWAccel; + accel: TranscodeHardwareAcceleration; accelDecode: boolean; tonemap: ToneMapping; }; @@ -190,39 +190,39 @@ export const defaults = Object.freeze({ preset: 'ultrafast', targetVideoCodec: VideoCodec.H264, acceptedVideoCodecs: [VideoCodec.H264], - targetAudioCodec: AudioCodec.AAC, - acceptedAudioCodecs: [AudioCodec.AAC, AudioCodec.MP3, AudioCodec.LIBOPUS, AudioCodec.PCMS16LE], - acceptedContainers: [VideoContainer.MOV, VideoContainer.OGG, VideoContainer.WEBM], + targetAudioCodec: AudioCodec.Aac, + acceptedAudioCodecs: [AudioCodec.Aac, AudioCodec.Mp3, AudioCodec.LibOpus, AudioCodec.PcmS16le], + acceptedContainers: [VideoContainer.Mov, VideoContainer.Ogg, VideoContainer.Webm], targetResolution: '720', maxBitrate: '0', bframes: -1, refs: 0, gopSize: 0, temporalAQ: false, - cqMode: CQMode.AUTO, + cqMode: CQMode.Auto, twoPass: false, preferredHwDevice: 'auto', - transcode: TranscodePolicy.REQUIRED, - tonemap: ToneMapping.HABLE, - accel: TranscodeHWAccel.DISABLED, + transcode: TranscodePolicy.Required, + tonemap: ToneMapping.Hable, + accel: TranscodeHardwareAcceleration.Disabled, accelDecode: false, }, job: { - [QueueName.BACKGROUND_TASK]: { concurrency: 5 }, - [QueueName.SMART_SEARCH]: { concurrency: 2 }, - [QueueName.METADATA_EXTRACTION]: { concurrency: 5 }, - [QueueName.FACE_DETECTION]: { concurrency: 2 }, - [QueueName.SEARCH]: { concurrency: 5 }, - [QueueName.SIDECAR]: { concurrency: 5 }, - [QueueName.LIBRARY]: { concurrency: 5 }, - [QueueName.MIGRATION]: { concurrency: 5 }, - [QueueName.THUMBNAIL_GENERATION]: { concurrency: 3 }, - [QueueName.VIDEO_CONVERSION]: { concurrency: 1 }, - [QueueName.NOTIFICATION]: { concurrency: 5 }, + [QueueName.BackgroundTask]: { concurrency: 5 }, + [QueueName.SmartSearch]: { concurrency: 2 }, + [QueueName.MetadataExtraction]: { concurrency: 5 }, + [QueueName.FaceDetection]: { concurrency: 2 }, + [QueueName.Search]: { concurrency: 5 }, + [QueueName.Sidecar]: { concurrency: 5 }, + [QueueName.Library]: { concurrency: 5 }, + [QueueName.Migration]: { concurrency: 5 }, + [QueueName.ThumbnailGeneration]: { concurrency: 3 }, + [QueueName.VideoConversion]: { concurrency: 1 }, + [QueueName.Notification]: { concurrency: 5 }, }, logging: { enabled: true, - level: LogLevel.LOG, + level: LogLevel.Log, }, machineLearning: { enabled: process.env.IMMICH_MACHINE_LEARNING_ENABLED !== 'false', @@ -273,7 +273,7 @@ export const defaults = Object.freeze({ storageLabelClaim: 'preferred_username', storageQuotaClaim: 'immich_quota', roleClaim: 'immich_role', - tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod.CLIENT_SECRET_POST, + tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod.ClientSecretPost, timeout: 30_000, }, passwordLogin: { @@ -286,12 +286,12 @@ export const defaults = Object.freeze({ }, image: { thumbnail: { - format: ImageFormat.WEBP, + format: ImageFormat.Webp, size: 250, quality: 80, }, preview: { - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, size: 1440, quality: 80, }, @@ -299,7 +299,7 @@ export const defaults = Object.freeze({ extractEmbedded: false, fullsize: { enabled: false, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, }, }, diff --git a/server/src/constants.ts b/server/src/constants.ts index 2e25797938..447d8a09c9 100644 --- a/server/src/constants.ts +++ b/server/src/constants.ts @@ -25,14 +25,14 @@ export const EXTENSION_NAMES: Record = { } as const; export const VECTOR_EXTENSIONS = [ - DatabaseExtension.VECTORCHORD, - DatabaseExtension.VECTORS, - DatabaseExtension.VECTOR, + DatabaseExtension.VectorChord, + DatabaseExtension.Vectors, + DatabaseExtension.Vector, ] as const; export const VECTOR_INDEX_TABLES = { - [VectorIndex.CLIP]: 'smart_search', - [VectorIndex.FACE]: 'face_search', + [VectorIndex.Clip]: 'smart_search', + [VectorIndex.Face]: 'face_search', } as const; export const VECTORCHORD_LIST_SLACK_FACTOR = 1.2; diff --git a/server/src/controllers/activity.controller.ts b/server/src/controllers/activity.controller.ts index b91f2902d5..d2d34da102 100644 --- a/server/src/controllers/activity.controller.ts +++ b/server/src/controllers/activity.controller.ts @@ -20,13 +20,13 @@ export class ActivityController { constructor(private service: ActivityService) {} @Get() - @Authenticated({ permission: Permission.ACTIVITY_READ }) + @Authenticated({ permission: Permission.ActivityRead }) getActivities(@Auth() auth: AuthDto, @Query() dto: ActivitySearchDto): Promise { return this.service.getAll(auth, dto); } @Post() - @Authenticated({ permission: Permission.ACTIVITY_CREATE }) + @Authenticated({ permission: Permission.ActivityCreate }) async createActivity( @Auth() auth: AuthDto, @Body() dto: ActivityCreateDto, @@ -40,14 +40,14 @@ export class ActivityController { } @Get('statistics') - @Authenticated({ permission: Permission.ACTIVITY_STATISTICS }) + @Authenticated({ permission: Permission.ActivityStatistics }) getActivityStatistics(@Auth() auth: AuthDto, @Query() dto: ActivityDto): Promise { return this.service.getStatistics(auth, dto); } @Delete(':id') @HttpCode(HttpStatus.NO_CONTENT) - @Authenticated({ permission: Permission.ACTIVITY_DELETE }) + @Authenticated({ permission: Permission.ActivityDelete }) deleteActivity(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.delete(auth, id); } diff --git a/server/src/controllers/album.controller.ts b/server/src/controllers/album.controller.ts index 49ec5a82ea..2e6d1e7e43 100644 --- a/server/src/controllers/album.controller.ts +++ b/server/src/controllers/album.controller.ts @@ -23,24 +23,24 @@ export class AlbumController { constructor(private service: AlbumService) {} @Get() - @Authenticated({ permission: Permission.ALBUM_READ }) + @Authenticated({ permission: Permission.AlbumRead }) getAllAlbums(@Auth() auth: AuthDto, @Query() query: GetAlbumsDto): Promise { return this.service.getAll(auth, query); } @Post() - @Authenticated({ permission: Permission.ALBUM_CREATE }) + @Authenticated({ permission: Permission.AlbumCreate }) createAlbum(@Auth() auth: AuthDto, @Body() dto: CreateAlbumDto): Promise { return this.service.create(auth, dto); } @Get('statistics') - @Authenticated({ permission: Permission.ALBUM_STATISTICS }) + @Authenticated({ permission: Permission.AlbumStatistics }) getAlbumStatistics(@Auth() auth: AuthDto): Promise { return this.service.getStatistics(auth); } - @Authenticated({ permission: Permission.ALBUM_READ, sharedLink: true }) + @Authenticated({ permission: Permission.AlbumRead, sharedLink: true }) @Get(':id') getAlbumInfo( @Auth() auth: AuthDto, @@ -51,7 +51,7 @@ export class AlbumController { } @Patch(':id') - @Authenticated({ permission: Permission.ALBUM_UPDATE }) + @Authenticated({ permission: Permission.AlbumUpdate }) updateAlbumInfo( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -61,7 +61,7 @@ export class AlbumController { } @Delete(':id') - @Authenticated({ permission: Permission.ALBUM_DELETE }) + @Authenticated({ permission: Permission.AlbumDelete }) deleteAlbum(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto) { return this.service.delete(auth, id); } diff --git a/server/src/controllers/api-key.controller.spec.ts b/server/src/controllers/api-key.controller.spec.ts index f58a53723a..993ad012cc 100644 --- a/server/src/controllers/api-key.controller.spec.ts +++ b/server/src/controllers/api-key.controller.spec.ts @@ -55,7 +55,7 @@ describe(APIKeyController.name, () => { it('should require a valid uuid', async () => { const { status, body } = await request(ctx.getHttpServer()) .put(`/api-keys/123`) - .send({ name: 'new name', permissions: [Permission.ALL] }); + .send({ name: 'new name', permissions: [Permission.All] }); expect(status).toBe(400); expect(body).toEqual(factory.responses.badRequest(['id must be a UUID'])); }); diff --git a/server/src/controllers/api-key.controller.ts b/server/src/controllers/api-key.controller.ts index 08efd753cf..6347a1274a 100644 --- a/server/src/controllers/api-key.controller.ts +++ b/server/src/controllers/api-key.controller.ts @@ -13,25 +13,25 @@ export class APIKeyController { constructor(private service: ApiKeyService) {} @Post() - @Authenticated({ permission: Permission.API_KEY_CREATE }) + @Authenticated({ permission: Permission.ApiKeyCreate }) createApiKey(@Auth() auth: AuthDto, @Body() dto: APIKeyCreateDto): Promise { return this.service.create(auth, dto); } @Get() - @Authenticated({ permission: Permission.API_KEY_READ }) + @Authenticated({ permission: Permission.ApiKeyRead }) getApiKeys(@Auth() auth: AuthDto): Promise { return this.service.getAll(auth); } @Get(':id') - @Authenticated({ permission: Permission.API_KEY_READ }) + @Authenticated({ permission: Permission.ApiKeyRead }) getApiKey(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.getById(auth, id); } @Put(':id') - @Authenticated({ permission: Permission.API_KEY_UPDATE }) + @Authenticated({ permission: Permission.ApiKeyUpdate }) updateApiKey( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -42,7 +42,7 @@ export class APIKeyController { @Delete(':id') @HttpCode(HttpStatus.NO_CONTENT) - @Authenticated({ permission: Permission.API_KEY_DELETE }) + @Authenticated({ permission: Permission.ApiKeyDelete }) deleteApiKey(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.delete(auth, id); } diff --git a/server/src/controllers/asset-media.controller.ts b/server/src/controllers/asset-media.controller.ts index b2c9397580..ea6c9602c8 100644 --- a/server/src/controllers/asset-media.controller.ts +++ b/server/src/controllers/asset-media.controller.ts @@ -45,7 +45,7 @@ import { ImmichFileResponse, sendFile } from 'src/utils/file'; import { FileNotEmptyValidator, UUIDParamDto } from 'src/validation'; @ApiTags('Assets') -@Controller(RouteKey.ASSET) +@Controller(RouteKey.Asset) export class AssetMediaController { constructor( private logger: LoggingRepository, @@ -56,7 +56,7 @@ export class AssetMediaController { @UseInterceptors(AssetUploadInterceptor, FileUploadInterceptor) @ApiConsumes('multipart/form-data') @ApiHeader({ - name: ImmichHeader.CHECKSUM, + name: ImmichHeader.Checksum, description: 'sha1 checksum that can be used for duplicate detection before the file is uploaded', required: false, }) diff --git a/server/src/controllers/asset.controller.ts b/server/src/controllers/asset.controller.ts index 925b64c8a8..bb17daddf3 100644 --- a/server/src/controllers/asset.controller.ts +++ b/server/src/controllers/asset.controller.ts @@ -19,7 +19,7 @@ import { AssetService } from 'src/services/asset.service'; import { UUIDParamDto } from 'src/validation'; @ApiTags('Assets') -@Controller(RouteKey.ASSET) +@Controller(RouteKey.Asset) export class AssetController { constructor(private service: AssetService) {} diff --git a/server/src/controllers/auth.controller.ts b/server/src/controllers/auth.controller.ts index 78c611d761..9bc5fd0fbb 100644 --- a/server/src/controllers/auth.controller.ts +++ b/server/src/controllers/auth.controller.ts @@ -36,9 +36,9 @@ export class AuthController { return respondWithCookie(res, body, { isSecure: loginDetails.isSecure, values: [ - { key: ImmichCookie.ACCESS_TOKEN, value: body.accessToken }, - { key: ImmichCookie.AUTH_TYPE, value: AuthType.PASSWORD }, - { key: ImmichCookie.IS_AUTHENTICATED, value: 'true' }, + { key: ImmichCookie.AccessToken, value: body.accessToken }, + { key: ImmichCookie.AuthType, value: AuthType.Password }, + { key: ImmichCookie.IsAuthenticated, value: 'true' }, ], }); } @@ -70,13 +70,13 @@ export class AuthController { @Res({ passthrough: true }) res: Response, @Auth() auth: AuthDto, ): Promise { - const authType = (request.cookies || {})[ImmichCookie.AUTH_TYPE]; + const authType = (request.cookies || {})[ImmichCookie.AuthType]; const body = await this.service.logout(auth, authType); return respondWithoutCookie(res, body, [ - ImmichCookie.ACCESS_TOKEN, - ImmichCookie.AUTH_TYPE, - ImmichCookie.IS_AUTHENTICATED, + ImmichCookie.AccessToken, + ImmichCookie.AuthType, + ImmichCookie.IsAuthenticated, ]); } diff --git a/server/src/controllers/face.controller.ts b/server/src/controllers/face.controller.ts index d94cd532f7..20b6db6039 100644 --- a/server/src/controllers/face.controller.ts +++ b/server/src/controllers/face.controller.ts @@ -19,19 +19,19 @@ export class FaceController { constructor(private service: PersonService) {} @Post() - @Authenticated({ permission: Permission.FACE_CREATE }) + @Authenticated({ permission: Permission.FaceCreate }) createFace(@Auth() auth: AuthDto, @Body() dto: AssetFaceCreateDto) { return this.service.createFace(auth, dto); } @Get() - @Authenticated({ permission: Permission.FACE_READ }) + @Authenticated({ permission: Permission.FaceRead }) getFaces(@Auth() auth: AuthDto, @Query() dto: FaceDto): Promise { return this.service.getFacesById(auth, dto); } @Put(':id') - @Authenticated({ permission: Permission.FACE_UPDATE }) + @Authenticated({ permission: Permission.FaceUpdate }) reassignFacesById( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -41,7 +41,7 @@ export class FaceController { } @Delete(':id') - @Authenticated({ permission: Permission.FACE_DELETE }) + @Authenticated({ permission: Permission.FaceDelete }) deleteFace(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @Body() dto: AssetFaceDeleteDto) { return this.service.deleteFace(auth, id, dto); } diff --git a/server/src/controllers/library.controller.ts b/server/src/controllers/library.controller.ts index b8959ca288..e090586f57 100644 --- a/server/src/controllers/library.controller.ts +++ b/server/src/controllers/library.controller.ts @@ -19,32 +19,32 @@ export class LibraryController { constructor(private service: LibraryService) {} @Get() - @Authenticated({ permission: Permission.LIBRARY_READ, admin: true }) + @Authenticated({ permission: Permission.LibraryRead, admin: true }) getAllLibraries(): Promise { return this.service.getAll(); } @Post() - @Authenticated({ permission: Permission.LIBRARY_CREATE, admin: true }) + @Authenticated({ permission: Permission.LibraryCreate, admin: true }) createLibrary(@Body() dto: CreateLibraryDto): Promise { return this.service.create(dto); } @Get(':id') - @Authenticated({ permission: Permission.LIBRARY_READ, admin: true }) + @Authenticated({ permission: Permission.LibraryRead, admin: true }) getLibrary(@Param() { id }: UUIDParamDto): Promise { return this.service.get(id); } @Put(':id') - @Authenticated({ permission: Permission.LIBRARY_UPDATE, admin: true }) + @Authenticated({ permission: Permission.LibraryUpdate, admin: true }) updateLibrary(@Param() { id }: UUIDParamDto, @Body() dto: UpdateLibraryDto): Promise { return this.service.update(id, dto); } @Delete(':id') @HttpCode(HttpStatus.NO_CONTENT) - @Authenticated({ permission: Permission.LIBRARY_DELETE, admin: true }) + @Authenticated({ permission: Permission.LibraryDelete, admin: true }) deleteLibrary(@Param() { id }: UUIDParamDto): Promise { return this.service.delete(id); } @@ -58,14 +58,14 @@ export class LibraryController { } @Get(':id/statistics') - @Authenticated({ permission: Permission.LIBRARY_STATISTICS, admin: true }) + @Authenticated({ permission: Permission.LibraryStatistics, admin: true }) getLibraryStatistics(@Param() { id }: UUIDParamDto): Promise { return this.service.getStatistics(id); } @Post(':id/scan') @HttpCode(HttpStatus.NO_CONTENT) - @Authenticated({ permission: Permission.LIBRARY_UPDATE, admin: true }) + @Authenticated({ permission: Permission.LibraryUpdate, admin: true }) scanLibrary(@Param() { id }: UUIDParamDto) { return this.service.queueScan(id); } diff --git a/server/src/controllers/memory.controller.ts b/server/src/controllers/memory.controller.ts index d33c5ec22c..a5bbbd7411 100644 --- a/server/src/controllers/memory.controller.ts +++ b/server/src/controllers/memory.controller.ts @@ -20,31 +20,31 @@ export class MemoryController { constructor(private service: MemoryService) {} @Get() - @Authenticated({ permission: Permission.MEMORY_READ }) + @Authenticated({ permission: Permission.MemoryRead }) searchMemories(@Auth() auth: AuthDto, @Query() dto: MemorySearchDto): Promise { return this.service.search(auth, dto); } @Post() - @Authenticated({ permission: Permission.MEMORY_CREATE }) + @Authenticated({ permission: Permission.MemoryCreate }) createMemory(@Auth() auth: AuthDto, @Body() dto: MemoryCreateDto): Promise { return this.service.create(auth, dto); } @Get('statistics') - @Authenticated({ permission: Permission.MEMORY_READ }) + @Authenticated({ permission: Permission.MemoryRead }) memoriesStatistics(@Auth() auth: AuthDto, @Query() dto: MemorySearchDto): Promise { return this.service.statistics(auth, dto); } @Get(':id') - @Authenticated({ permission: Permission.MEMORY_READ }) + @Authenticated({ permission: Permission.MemoryRead }) getMemory(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.get(auth, id); } @Put(':id') - @Authenticated({ permission: Permission.MEMORY_UPDATE }) + @Authenticated({ permission: Permission.MemoryUpdate }) updateMemory( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -55,7 +55,7 @@ export class MemoryController { @Delete(':id') @HttpCode(HttpStatus.NO_CONTENT) - @Authenticated({ permission: Permission.MEMORY_DELETE }) + @Authenticated({ permission: Permission.MemoryDelete }) deleteMemory(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.remove(auth, id); } diff --git a/server/src/controllers/notification.controller.ts b/server/src/controllers/notification.controller.ts index c64f786850..af4eb198b6 100644 --- a/server/src/controllers/notification.controller.ts +++ b/server/src/controllers/notification.controller.ts @@ -19,31 +19,31 @@ export class NotificationController { constructor(private service: NotificationService) {} @Get() - @Authenticated({ permission: Permission.NOTIFICATION_READ }) + @Authenticated({ permission: Permission.NotificationRead }) getNotifications(@Auth() auth: AuthDto, @Query() dto: NotificationSearchDto): Promise { return this.service.search(auth, dto); } @Put() - @Authenticated({ permission: Permission.NOTIFICATION_UPDATE }) + @Authenticated({ permission: Permission.NotificationUpdate }) updateNotifications(@Auth() auth: AuthDto, @Body() dto: NotificationUpdateAllDto): Promise { return this.service.updateAll(auth, dto); } @Delete() - @Authenticated({ permission: Permission.NOTIFICATION_DELETE }) + @Authenticated({ permission: Permission.NotificationDelete }) deleteNotifications(@Auth() auth: AuthDto, @Body() dto: NotificationDeleteAllDto): Promise { return this.service.deleteAll(auth, dto); } @Get(':id') - @Authenticated({ permission: Permission.NOTIFICATION_READ }) + @Authenticated({ permission: Permission.NotificationRead }) getNotification(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.get(auth, id); } @Put(':id') - @Authenticated({ permission: Permission.NOTIFICATION_UPDATE }) + @Authenticated({ permission: Permission.NotificationUpdate }) updateNotification( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -53,7 +53,7 @@ export class NotificationController { } @Delete(':id') - @Authenticated({ permission: Permission.NOTIFICATION_DELETE }) + @Authenticated({ permission: Permission.NotificationDelete }) deleteNotification(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.delete(auth, id); } diff --git a/server/src/controllers/oauth.controller.ts b/server/src/controllers/oauth.controller.ts index 23ddff5ddc..7da75f573a 100644 --- a/server/src/controllers/oauth.controller.ts +++ b/server/src/controllers/oauth.controller.ts @@ -41,8 +41,8 @@ export class OAuthController { { isSecure: loginDetails.isSecure, values: [ - { key: ImmichCookie.OAUTH_STATE, value: state }, - { key: ImmichCookie.OAUTH_CODE_VERIFIER, value: codeVerifier }, + { key: ImmichCookie.OAuthState, value: state }, + { key: ImmichCookie.OAuthCodeVerifier, value: codeVerifier }, ], }, ); @@ -56,14 +56,14 @@ export class OAuthController { @GetLoginDetails() loginDetails: LoginDetails, ): Promise { const body = await this.service.callback(dto, request.headers, loginDetails); - res.clearCookie(ImmichCookie.OAUTH_STATE); - res.clearCookie(ImmichCookie.OAUTH_CODE_VERIFIER); + res.clearCookie(ImmichCookie.OAuthState); + res.clearCookie(ImmichCookie.OAuthCodeVerifier); return respondWithCookie(res, body, { isSecure: loginDetails.isSecure, values: [ - { key: ImmichCookie.ACCESS_TOKEN, value: body.accessToken }, - { key: ImmichCookie.AUTH_TYPE, value: AuthType.OAUTH }, - { key: ImmichCookie.IS_AUTHENTICATED, value: 'true' }, + { key: ImmichCookie.AccessToken, value: body.accessToken }, + { key: ImmichCookie.AuthType, value: AuthType.OAuth }, + { key: ImmichCookie.IsAuthenticated, value: 'true' }, ], }); } diff --git a/server/src/controllers/partner.controller.ts b/server/src/controllers/partner.controller.ts index 6830fdd52f..6b6efaa570 100644 --- a/server/src/controllers/partner.controller.ts +++ b/server/src/controllers/partner.controller.ts @@ -13,19 +13,19 @@ export class PartnerController { constructor(private service: PartnerService) {} @Get() - @Authenticated({ permission: Permission.PARTNER_READ }) + @Authenticated({ permission: Permission.PartnerRead }) getPartners(@Auth() auth: AuthDto, @Query() dto: PartnerSearchDto): Promise { return this.service.search(auth, dto); } @Post(':id') - @Authenticated({ permission: Permission.PARTNER_CREATE }) + @Authenticated({ permission: Permission.PartnerCreate }) createPartner(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.create(auth, id); } @Put(':id') - @Authenticated({ permission: Permission.PARTNER_UPDATE }) + @Authenticated({ permission: Permission.PartnerUpdate }) updatePartner( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -35,7 +35,7 @@ export class PartnerController { } @Delete(':id') - @Authenticated({ permission: Permission.PARTNER_DELETE }) + @Authenticated({ permission: Permission.PartnerDelete }) removePartner(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.remove(auth, id); } diff --git a/server/src/controllers/person.controller.ts b/server/src/controllers/person.controller.ts index a9f6616426..ec66f7a9ca 100644 --- a/server/src/controllers/person.controller.ts +++ b/server/src/controllers/person.controller.ts @@ -45,38 +45,38 @@ export class PersonController { } @Get() - @Authenticated({ permission: Permission.PERSON_READ }) + @Authenticated({ permission: Permission.PersonRead }) getAllPeople(@Auth() auth: AuthDto, @Query() options: PersonSearchDto): Promise { return this.service.getAll(auth, options); } @Post() - @Authenticated({ permission: Permission.PERSON_CREATE }) + @Authenticated({ permission: Permission.PersonCreate }) createPerson(@Auth() auth: AuthDto, @Body() dto: PersonCreateDto): Promise { return this.service.create(auth, dto); } @Put() - @Authenticated({ permission: Permission.PERSON_UPDATE }) + @Authenticated({ permission: Permission.PersonUpdate }) updatePeople(@Auth() auth: AuthDto, @Body() dto: PeopleUpdateDto): Promise { return this.service.updateAll(auth, dto); } @Delete() @HttpCode(HttpStatus.NO_CONTENT) - @Authenticated({ permission: Permission.PERSON_DELETE }) + @Authenticated({ permission: Permission.PersonDelete }) deletePeople(@Auth() auth: AuthDto, @Body() dto: BulkIdsDto): Promise { return this.service.deleteAll(auth, dto); } @Get(':id') - @Authenticated({ permission: Permission.PERSON_READ }) + @Authenticated({ permission: Permission.PersonRead }) getPerson(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.getById(auth, id); } @Put(':id') - @Authenticated({ permission: Permission.PERSON_UPDATE }) + @Authenticated({ permission: Permission.PersonUpdate }) updatePerson( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -87,20 +87,20 @@ export class PersonController { @Delete(':id') @HttpCode(HttpStatus.NO_CONTENT) - @Authenticated({ permission: Permission.PERSON_DELETE }) + @Authenticated({ permission: Permission.PersonDelete }) deletePerson(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.delete(auth, id); } @Get(':id/statistics') - @Authenticated({ permission: Permission.PERSON_STATISTICS }) + @Authenticated({ permission: Permission.PersonStatistics }) getPersonStatistics(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.getStatistics(auth, id); } @Get(':id/thumbnail') @FileResponse() - @Authenticated({ permission: Permission.PERSON_READ }) + @Authenticated({ permission: Permission.PersonRead }) async getPersonThumbnail( @Res() res: Response, @Next() next: NextFunction, @@ -111,7 +111,7 @@ export class PersonController { } @Put(':id/reassign') - @Authenticated({ permission: Permission.PERSON_REASSIGN }) + @Authenticated({ permission: Permission.PersonReassign }) reassignFaces( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -121,7 +121,7 @@ export class PersonController { } @Post(':id/merge') - @Authenticated({ permission: Permission.PERSON_MERGE }) + @Authenticated({ permission: Permission.PersonMerge }) mergePerson( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, diff --git a/server/src/controllers/session.controller.ts b/server/src/controllers/session.controller.ts index f5eb10b3dd..cbe8158fee 100644 --- a/server/src/controllers/session.controller.ts +++ b/server/src/controllers/session.controller.ts @@ -13,26 +13,26 @@ export class SessionController { constructor(private service: SessionService) {} @Post() - @Authenticated({ permission: Permission.SESSION_CREATE }) + @Authenticated({ permission: Permission.SessionCreate }) createSession(@Auth() auth: AuthDto, @Body() dto: SessionCreateDto): Promise { return this.service.create(auth, dto); } @Get() - @Authenticated({ permission: Permission.SESSION_READ }) + @Authenticated({ permission: Permission.SessionRead }) getSessions(@Auth() auth: AuthDto): Promise { return this.service.getAll(auth); } @Delete() - @Authenticated({ permission: Permission.SESSION_DELETE }) + @Authenticated({ permission: Permission.SessionDelete }) @HttpCode(HttpStatus.NO_CONTENT) deleteAllSessions(@Auth() auth: AuthDto): Promise { return this.service.deleteAll(auth); } @Put(':id') - @Authenticated({ permission: Permission.SESSION_UPDATE }) + @Authenticated({ permission: Permission.SessionUpdate }) updateSession( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -42,14 +42,14 @@ export class SessionController { } @Delete(':id') - @Authenticated({ permission: Permission.SESSION_DELETE }) + @Authenticated({ permission: Permission.SessionDelete }) @HttpCode(HttpStatus.NO_CONTENT) deleteSession(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.delete(auth, id); } @Post(':id/lock') - @Authenticated({ permission: Permission.SESSION_LOCK }) + @Authenticated({ permission: Permission.SessionLock }) @HttpCode(HttpStatus.NO_CONTENT) lockSession(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.lock(auth, id); diff --git a/server/src/controllers/shared-link.controller.ts b/server/src/controllers/shared-link.controller.ts index ca978f03da..273d625ca7 100644 --- a/server/src/controllers/shared-link.controller.ts +++ b/server/src/controllers/shared-link.controller.ts @@ -24,7 +24,7 @@ export class SharedLinkController { constructor(private service: SharedLinkService) {} @Get() - @Authenticated({ permission: Permission.SHARED_LINK_READ }) + @Authenticated({ permission: Permission.SharedLinkRead }) getAllSharedLinks(@Auth() auth: AuthDto, @Query() dto: SharedLinkSearchDto): Promise { return this.service.getAll(auth, dto); } @@ -38,31 +38,31 @@ export class SharedLinkController { @Res({ passthrough: true }) res: Response, @GetLoginDetails() loginDetails: LoginDetails, ): Promise { - const sharedLinkToken = request.cookies?.[ImmichCookie.SHARED_LINK_TOKEN]; + const sharedLinkToken = request.cookies?.[ImmichCookie.SharedLinkToken]; if (sharedLinkToken) { dto.token = sharedLinkToken; } const body = await this.service.getMine(auth, dto); return respondWithCookie(res, body, { isSecure: loginDetails.isSecure, - values: body.token ? [{ key: ImmichCookie.SHARED_LINK_TOKEN, value: body.token }] : [], + values: body.token ? [{ key: ImmichCookie.SharedLinkToken, value: body.token }] : [], }); } @Get(':id') - @Authenticated({ permission: Permission.SHARED_LINK_READ }) + @Authenticated({ permission: Permission.SharedLinkRead }) getSharedLinkById(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.get(auth, id); } @Post() - @Authenticated({ permission: Permission.SHARED_LINK_CREATE }) + @Authenticated({ permission: Permission.SharedLinkCreate }) createSharedLink(@Auth() auth: AuthDto, @Body() dto: SharedLinkCreateDto) { return this.service.create(auth, dto); } @Patch(':id') - @Authenticated({ permission: Permission.SHARED_LINK_UPDATE }) + @Authenticated({ permission: Permission.SharedLinkUpdate }) updateSharedLink( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -72,7 +72,7 @@ export class SharedLinkController { } @Delete(':id') - @Authenticated({ permission: Permission.SHARED_LINK_DELETE }) + @Authenticated({ permission: Permission.SharedLinkDelete }) removeSharedLink(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.remove(auth, id); } diff --git a/server/src/controllers/stack.controller.ts b/server/src/controllers/stack.controller.ts index 188952eba5..238753734c 100644 --- a/server/src/controllers/stack.controller.ts +++ b/server/src/controllers/stack.controller.ts @@ -14,32 +14,32 @@ export class StackController { constructor(private service: StackService) {} @Get() - @Authenticated({ permission: Permission.STACK_READ }) + @Authenticated({ permission: Permission.StackRead }) searchStacks(@Auth() auth: AuthDto, @Query() query: StackSearchDto): Promise { return this.service.search(auth, query); } @Post() - @Authenticated({ permission: Permission.STACK_CREATE }) + @Authenticated({ permission: Permission.StackCreate }) createStack(@Auth() auth: AuthDto, @Body() dto: StackCreateDto): Promise { return this.service.create(auth, dto); } @Delete() - @Authenticated({ permission: Permission.STACK_DELETE }) + @Authenticated({ permission: Permission.StackDelete }) @HttpCode(HttpStatus.NO_CONTENT) deleteStacks(@Auth() auth: AuthDto, @Body() dto: BulkIdsDto): Promise { return this.service.deleteAll(auth, dto); } @Get(':id') - @Authenticated({ permission: Permission.STACK_READ }) + @Authenticated({ permission: Permission.StackRead }) getStack(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.get(auth, id); } @Put(':id') - @Authenticated({ permission: Permission.STACK_UPDATE }) + @Authenticated({ permission: Permission.StackUpdate }) updateStack( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -50,7 +50,7 @@ export class StackController { @Delete(':id') @HttpCode(HttpStatus.NO_CONTENT) - @Authenticated({ permission: Permission.STACK_DELETE }) + @Authenticated({ permission: Permission.StackDelete }) deleteStack(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.delete(auth, id); } diff --git a/server/src/controllers/system-config.controller.ts b/server/src/controllers/system-config.controller.ts index 58e8bde87b..69117f4d45 100644 --- a/server/src/controllers/system-config.controller.ts +++ b/server/src/controllers/system-config.controller.ts @@ -15,25 +15,25 @@ export class SystemConfigController { ) {} @Get() - @Authenticated({ permission: Permission.SYSTEM_CONFIG_READ, admin: true }) + @Authenticated({ permission: Permission.SystemConfigRead, admin: true }) getConfig(): Promise { return this.service.getSystemConfig(); } @Get('defaults') - @Authenticated({ permission: Permission.SYSTEM_CONFIG_READ, admin: true }) + @Authenticated({ permission: Permission.SystemConfigRead, admin: true }) getConfigDefaults(): SystemConfigDto { return this.service.getDefaults(); } @Put() - @Authenticated({ permission: Permission.SYSTEM_CONFIG_UPDATE, admin: true }) + @Authenticated({ permission: Permission.SystemConfigUpdate, admin: true }) updateConfig(@Body() dto: SystemConfigDto): Promise { return this.service.updateSystemConfig(dto); } @Get('storage-template-options') - @Authenticated({ permission: Permission.SYSTEM_CONFIG_READ, admin: true }) + @Authenticated({ permission: Permission.SystemConfigRead, admin: true }) getStorageTemplateOptions(): SystemConfigTemplateStorageOptionDto { return this.storageTemplateService.getStorageTemplateOptions(); } diff --git a/server/src/controllers/system-metadata.controller.ts b/server/src/controllers/system-metadata.controller.ts index 71c37d02c4..ad2245a391 100644 --- a/server/src/controllers/system-metadata.controller.ts +++ b/server/src/controllers/system-metadata.controller.ts @@ -15,26 +15,26 @@ export class SystemMetadataController { constructor(private service: SystemMetadataService) {} @Get('admin-onboarding') - @Authenticated({ permission: Permission.SYSTEM_METADATA_READ, admin: true }) + @Authenticated({ permission: Permission.SystemMetadataRead, admin: true }) getAdminOnboarding(): Promise { return this.service.getAdminOnboarding(); } @Post('admin-onboarding') @HttpCode(HttpStatus.NO_CONTENT) - @Authenticated({ permission: Permission.SYSTEM_METADATA_UPDATE, admin: true }) + @Authenticated({ permission: Permission.SystemMetadataUpdate, admin: true }) updateAdminOnboarding(@Body() dto: AdminOnboardingUpdateDto): Promise { return this.service.updateAdminOnboarding(dto); } @Get('reverse-geocoding-state') - @Authenticated({ permission: Permission.SYSTEM_METADATA_READ, admin: true }) + @Authenticated({ permission: Permission.SystemMetadataRead, admin: true }) getReverseGeocodingState(): Promise { return this.service.getReverseGeocodingState(); } @Get('version-check-state') - @Authenticated({ permission: Permission.SYSTEM_METADATA_READ, admin: true }) + @Authenticated({ permission: Permission.SystemMetadataRead, admin: true }) getVersionCheckState(): Promise { return this.service.getVersionCheckState(); } diff --git a/server/src/controllers/tag.controller.ts b/server/src/controllers/tag.controller.ts index cf6b8ac695..4906bc0c6e 100644 --- a/server/src/controllers/tag.controller.ts +++ b/server/src/controllers/tag.controller.ts @@ -21,50 +21,50 @@ export class TagController { constructor(private service: TagService) {} @Post() - @Authenticated({ permission: Permission.TAG_CREATE }) + @Authenticated({ permission: Permission.TagCreate }) createTag(@Auth() auth: AuthDto, @Body() dto: TagCreateDto): Promise { return this.service.create(auth, dto); } @Get() - @Authenticated({ permission: Permission.TAG_READ }) + @Authenticated({ permission: Permission.TagRead }) getAllTags(@Auth() auth: AuthDto): Promise { return this.service.getAll(auth); } @Put() - @Authenticated({ permission: Permission.TAG_CREATE }) + @Authenticated({ permission: Permission.TagCreate }) upsertTags(@Auth() auth: AuthDto, @Body() dto: TagUpsertDto): Promise { return this.service.upsert(auth, dto); } @Put('assets') - @Authenticated({ permission: Permission.TAG_ASSET }) + @Authenticated({ permission: Permission.TagAsset }) bulkTagAssets(@Auth() auth: AuthDto, @Body() dto: TagBulkAssetsDto): Promise { return this.service.bulkTagAssets(auth, dto); } @Get(':id') - @Authenticated({ permission: Permission.TAG_READ }) + @Authenticated({ permission: Permission.TagRead }) getTagById(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.get(auth, id); } @Put(':id') - @Authenticated({ permission: Permission.TAG_UPDATE }) + @Authenticated({ permission: Permission.TagUpdate }) updateTag(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @Body() dto: TagUpdateDto): Promise { return this.service.update(auth, id, dto); } @Delete(':id') @HttpCode(HttpStatus.NO_CONTENT) - @Authenticated({ permission: Permission.TAG_DELETE }) + @Authenticated({ permission: Permission.TagDelete }) deleteTag(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.remove(auth, id); } @Put(':id/assets') - @Authenticated({ permission: Permission.TAG_ASSET }) + @Authenticated({ permission: Permission.TagAsset }) tagAssets( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -74,7 +74,7 @@ export class TagController { } @Delete(':id/assets') - @Authenticated({ permission: Permission.TAG_ASSET }) + @Authenticated({ permission: Permission.TagAsset }) untagAssets( @Auth() auth: AuthDto, @Body() dto: BulkIdsDto, diff --git a/server/src/controllers/timeline.controller.ts b/server/src/controllers/timeline.controller.ts index b4ee042625..8cab840ec8 100644 --- a/server/src/controllers/timeline.controller.ts +++ b/server/src/controllers/timeline.controller.ts @@ -12,13 +12,13 @@ export class TimelineController { constructor(private service: TimelineService) {} @Get('buckets') - @Authenticated({ permission: Permission.ASSET_READ, sharedLink: true }) + @Authenticated({ permission: Permission.AssetRead, sharedLink: true }) getTimeBuckets(@Auth() auth: AuthDto, @Query() dto: TimeBucketDto) { return this.service.getTimeBuckets(auth, dto); } @Get('bucket') - @Authenticated({ permission: Permission.ASSET_READ, sharedLink: true }) + @Authenticated({ permission: Permission.AssetRead, sharedLink: true }) @ApiOkResponse({ type: TimeBucketAssetResponseDto }) @Header('Content-Type', 'application/json') getTimeBucket(@Auth() auth: AuthDto, @Query() dto: TimeBucketAssetDto) { diff --git a/server/src/controllers/trash.controller.ts b/server/src/controllers/trash.controller.ts index dfcdfa6ba2..1bb46e4f98 100644 --- a/server/src/controllers/trash.controller.ts +++ b/server/src/controllers/trash.controller.ts @@ -14,21 +14,21 @@ export class TrashController { @Post('empty') @HttpCode(HttpStatus.OK) - @Authenticated({ permission: Permission.ASSET_DELETE }) + @Authenticated({ permission: Permission.AssetDelete }) emptyTrash(@Auth() auth: AuthDto): Promise { return this.service.empty(auth); } @Post('restore') @HttpCode(HttpStatus.OK) - @Authenticated({ permission: Permission.ASSET_DELETE }) + @Authenticated({ permission: Permission.AssetDelete }) restoreTrash(@Auth() auth: AuthDto): Promise { return this.service.restore(auth); } @Post('restore/assets') @HttpCode(HttpStatus.OK) - @Authenticated({ permission: Permission.ASSET_DELETE }) + @Authenticated({ permission: Permission.AssetDelete }) restoreAssets(@Auth() auth: AuthDto, @Body() dto: BulkIdsDto): Promise { return this.service.restoreAssets(auth, dto); } diff --git a/server/src/controllers/user-admin.controller.ts b/server/src/controllers/user-admin.controller.ts index 83d7caef08..d50bd174ad 100644 --- a/server/src/controllers/user-admin.controller.ts +++ b/server/src/controllers/user-admin.controller.ts @@ -21,25 +21,25 @@ export class UserAdminController { constructor(private service: UserAdminService) {} @Get() - @Authenticated({ permission: Permission.ADMIN_USER_READ, admin: true }) + @Authenticated({ permission: Permission.AdminUserRead, admin: true }) searchUsersAdmin(@Auth() auth: AuthDto, @Query() dto: UserAdminSearchDto): Promise { return this.service.search(auth, dto); } @Post() - @Authenticated({ permission: Permission.ADMIN_USER_CREATE, admin: true }) + @Authenticated({ permission: Permission.AdminUserCreate, admin: true }) createUserAdmin(@Body() createUserDto: UserAdminCreateDto): Promise { return this.service.create(createUserDto); } @Get(':id') - @Authenticated({ permission: Permission.ADMIN_USER_READ, admin: true }) + @Authenticated({ permission: Permission.AdminUserRead, admin: true }) getUserAdmin(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.get(auth, id); } @Put(':id') - @Authenticated({ permission: Permission.ADMIN_USER_UPDATE, admin: true }) + @Authenticated({ permission: Permission.AdminUserUpdate, admin: true }) updateUserAdmin( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -49,7 +49,7 @@ export class UserAdminController { } @Delete(':id') - @Authenticated({ permission: Permission.ADMIN_USER_DELETE, admin: true }) + @Authenticated({ permission: Permission.AdminUserDelete, admin: true }) deleteUserAdmin( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -59,7 +59,7 @@ export class UserAdminController { } @Get(':id/statistics') - @Authenticated({ permission: Permission.ADMIN_USER_READ, admin: true }) + @Authenticated({ permission: Permission.AdminUserRead, admin: true }) getUserStatisticsAdmin( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -69,13 +69,13 @@ export class UserAdminController { } @Get(':id/preferences') - @Authenticated({ permission: Permission.ADMIN_USER_READ, admin: true }) + @Authenticated({ permission: Permission.AdminUserRead, admin: true }) getUserPreferencesAdmin(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.getPreferences(auth, id); } @Put(':id/preferences') - @Authenticated({ permission: Permission.ADMIN_USER_UPDATE, admin: true }) + @Authenticated({ permission: Permission.AdminUserUpdate, admin: true }) updateUserPreferencesAdmin( @Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @@ -85,7 +85,7 @@ export class UserAdminController { } @Post(':id/restore') - @Authenticated({ permission: Permission.ADMIN_USER_DELETE, admin: true }) + @Authenticated({ permission: Permission.AdminUserDelete, admin: true }) @HttpCode(HttpStatus.OK) restoreUserAdmin(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise { return this.service.restore(auth, id); diff --git a/server/src/controllers/user.controller.ts b/server/src/controllers/user.controller.ts index 6c6eae15ff..76d2cf4c5a 100644 --- a/server/src/controllers/user.controller.ts +++ b/server/src/controllers/user.controller.ts @@ -30,7 +30,7 @@ import { sendFile } from 'src/utils/file'; import { UUIDParamDto } from 'src/validation'; @ApiTags('Users') -@Controller(RouteKey.USER) +@Controller(RouteKey.User) export class UserController { constructor( private service: UserService, diff --git a/server/src/cores/storage.core.ts b/server/src/cores/storage.core.ts index 1a8e31e86b..6576b397e3 100644 --- a/server/src/cores/storage.core.ts +++ b/server/src/cores/storage.core.ts @@ -25,8 +25,8 @@ export interface MoveRequest { }; } -export type GeneratedImageType = AssetPathType.PREVIEW | AssetPathType.THUMBNAIL | AssetPathType.FULLSIZE; -export type GeneratedAssetType = GeneratedImageType | AssetPathType.ENCODED_VIDEO; +export type GeneratedImageType = AssetPathType.Preview | AssetPathType.Thumbnail | AssetPathType.FullSize; +export type GeneratedAssetType = GeneratedImageType | AssetPathType.EncodedVideo; export type ThumbnailPathEntity = { id: string; ownerId: string }; @@ -79,7 +79,7 @@ export class StorageCore { } static getLibraryFolder(user: { storageLabel: string | null; id: string }) { - return join(StorageCore.getBaseFolder(StorageFolder.LIBRARY), user.storageLabel || user.id); + return join(StorageCore.getBaseFolder(StorageFolder.Library), user.storageLabel || user.id); } static getBaseFolder(folder: StorageFolder) { @@ -87,23 +87,23 @@ export class StorageCore { } static getPersonThumbnailPath(person: ThumbnailPathEntity) { - return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, person.ownerId, `${person.id}.jpeg`); + return StorageCore.getNestedPath(StorageFolder.Thumbnails, person.ownerId, `${person.id}.jpeg`); } static getImagePath(asset: ThumbnailPathEntity, type: GeneratedImageType, format: 'jpeg' | 'webp') { - return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, asset.ownerId, `${asset.id}-${type}.${format}`); + return StorageCore.getNestedPath(StorageFolder.Thumbnails, asset.ownerId, `${asset.id}-${type}.${format}`); } static getEncodedVideoPath(asset: ThumbnailPathEntity) { - return StorageCore.getNestedPath(StorageFolder.ENCODED_VIDEO, asset.ownerId, `${asset.id}.mp4`); + return StorageCore.getNestedPath(StorageFolder.EncodedVideo, asset.ownerId, `${asset.id}.mp4`); } static getAndroidMotionPath(asset: ThumbnailPathEntity, uuid: string) { - return StorageCore.getNestedPath(StorageFolder.ENCODED_VIDEO, asset.ownerId, `${uuid}-MP.mp4`); + return StorageCore.getNestedPath(StorageFolder.EncodedVideo, asset.ownerId, `${uuid}-MP.mp4`); } static isAndroidMotionPath(originalPath: string) { - return originalPath.startsWith(StorageCore.getBaseFolder(StorageFolder.ENCODED_VIDEO)); + return originalPath.startsWith(StorageCore.getBaseFolder(StorageFolder.EncodedVideo)); } static isImmichPath(path: string) { @@ -130,7 +130,7 @@ export class StorageCore { async moveAssetVideo(asset: StorageAsset) { return this.moveFile({ entityId: asset.id, - pathType: AssetPathType.ENCODED_VIDEO, + pathType: AssetPathType.EncodedVideo, oldPath: asset.encodedVideoPath, newPath: StorageCore.getEncodedVideoPath(asset), }); @@ -139,7 +139,7 @@ export class StorageCore { async movePersonFile(person: { id: string; ownerId: string; thumbnailPath: string }, pathType: PersonPathType) { const { id: entityId, thumbnailPath } = person; switch (pathType) { - case PersonPathType.FACE: { + case PersonPathType.Face: { await this.moveFile({ entityId, pathType, @@ -188,7 +188,7 @@ export class StorageCore { move = await this.moveRepository.create({ entityId, pathType, oldPath, newPath }); } - if (pathType === AssetPathType.ORIGINAL && !assetInfo) { + if (pathType === AssetPathType.Original && !assetInfo) { this.logger.warn(`Unable to complete move. Missing asset info for ${entityId}`); return; } @@ -274,25 +274,25 @@ export class StorageCore { private savePath(pathType: PathType, id: string, newPath: string) { switch (pathType) { - case AssetPathType.ORIGINAL: { + case AssetPathType.Original: { return this.assetRepository.update({ id, originalPath: newPath }); } - case AssetPathType.FULLSIZE: { - return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.FULLSIZE, path: newPath }); + case AssetPathType.FullSize: { + return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.FullSize, path: newPath }); } - case AssetPathType.PREVIEW: { - return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.PREVIEW, path: newPath }); + case AssetPathType.Preview: { + return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Preview, path: newPath }); } - case AssetPathType.THUMBNAIL: { - return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.THUMBNAIL, path: newPath }); + case AssetPathType.Thumbnail: { + return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Thumbnail, path: newPath }); } - case AssetPathType.ENCODED_VIDEO: { + case AssetPathType.EncodedVideo: { return this.assetRepository.update({ id, encodedVideoPath: newPath }); } - case AssetPathType.SIDECAR: { + case AssetPathType.Sidecar: { return this.assetRepository.update({ id, sidecarPath: newPath }); } - case PersonPathType.FACE: { + case PersonPathType.Face: { return this.personRepository.update({ id, thumbnailPath: newPath }); } } diff --git a/server/src/decorators.ts b/server/src/decorators.ts index 766e7c70b9..b88f2d2d7e 100644 --- a/server/src/decorators.ts +++ b/server/src/decorators.ts @@ -131,7 +131,7 @@ export interface GenerateSqlQueries { } export const Telemetry = (options: { enabled?: boolean }) => - SetMetadata(MetadataKey.TELEMETRY_ENABLED, options?.enabled ?? true); + SetMetadata(MetadataKey.TelemetryEnabled, options?.enabled ?? true); /** Decorator to enable versioning/tracking of generated Sql */ export const GenerateSql = (...options: GenerateSqlQueries[]) => SetMetadata(GENERATE_SQL_KEY, options); @@ -145,13 +145,13 @@ export type EventConfig = { /** register events for these workers, defaults to all workers */ workers?: ImmichWorker[]; }; -export const OnEvent = (config: EventConfig) => SetMetadata(MetadataKey.EVENT_CONFIG, config); +export const OnEvent = (config: EventConfig) => SetMetadata(MetadataKey.EventConfig, config); export type JobConfig = { name: JobName; queue: QueueName; }; -export const OnJob = (config: JobConfig) => SetMetadata(MetadataKey.JOB_CONFIG, config); +export const OnJob = (config: JobConfig) => SetMetadata(MetadataKey.JobConfig, config); type LifecycleRelease = 'NEXT_RELEASE' | string; type LifecycleMetadata = { diff --git a/server/src/dtos/album.dto.ts b/server/src/dtos/album.dto.ts index c6cde0894f..3a88ba5be3 100644 --- a/server/src/dtos/album.dto.ts +++ b/server/src/dtos/album.dto.ts @@ -18,7 +18,7 @@ export class AlbumUserAddDto { @ValidateUUID() userId!: string; - @ValidateEnum({ enum: AlbumUserRole, name: 'AlbumUserRole', default: AlbumUserRole.EDITOR }) + @ValidateEnum({ enum: AlbumUserRole, name: 'AlbumUserRole', default: AlbumUserRole.Editor }) role?: AlbumUserRole; } diff --git a/server/src/dtos/asset-response.dto.ts b/server/src/dtos/asset-response.dto.ts index 5b587e59ba..98ed8669f0 100644 --- a/server/src/dtos/asset-response.dto.ts +++ b/server/src/dtos/asset-response.dto.ts @@ -205,7 +205,7 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset localDateTime: entity.localDateTime, updatedAt: entity.updatedAt, isFavorite: options.auth?.user.id === entity.ownerId ? entity.isFavorite : false, - isArchived: entity.visibility === AssetVisibility.ARCHIVE, + isArchived: entity.visibility === AssetVisibility.Archive, isTrashed: !!entity.deletedAt, visibility: entity.visibility, duration: entity.duration ?? '0:00:00.00000', diff --git a/server/src/dtos/asset.dto.ts b/server/src/dtos/asset.dto.ts index 727ab1625d..5728d21646 100644 --- a/server/src/dtos/asset.dto.ts +++ b/server/src/dtos/asset.dto.ts @@ -126,8 +126,8 @@ export class AssetStatsResponseDto { export const mapStats = (stats: AssetStats): AssetStatsResponseDto => { return { - images: stats[AssetType.IMAGE], - videos: stats[AssetType.VIDEO], + images: stats[AssetType.Image], + videos: stats[AssetType.Video], total: Object.values(stats).reduce((total, value) => total + value, 0), }; }; diff --git a/server/src/dtos/auth.dto.ts b/server/src/dtos/auth.dto.ts index e94818b2b5..2bb98b34a5 100644 --- a/server/src/dtos/auth.dto.ts +++ b/server/src/dtos/auth.dto.ts @@ -45,7 +45,7 @@ export class LoginResponseDto { export function mapLoginResponse(entity: UserAdmin, accessToken: string): LoginResponseDto { const onboardingMetadata = entity.metadata.find( - (item): item is UserMetadataItem => item.key === UserMetadataKey.ONBOARDING, + (item): item is UserMetadataItem => item.key === UserMetadataKey.Onboarding, )?.value; return { diff --git a/server/src/dtos/job.dto.ts b/server/src/dtos/job.dto.ts index 60124c877a..2123b65878 100644 --- a/server/src/dtos/job.dto.ts +++ b/server/src/dtos/job.dto.ts @@ -50,47 +50,47 @@ export class JobStatusDto { export class AllJobStatusResponseDto implements Record { @ApiProperty({ type: JobStatusDto }) - [QueueName.THUMBNAIL_GENERATION]!: JobStatusDto; + [QueueName.ThumbnailGeneration]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.METADATA_EXTRACTION]!: JobStatusDto; + [QueueName.MetadataExtraction]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.VIDEO_CONVERSION]!: JobStatusDto; + [QueueName.VideoConversion]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.SMART_SEARCH]!: JobStatusDto; + [QueueName.SmartSearch]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.STORAGE_TEMPLATE_MIGRATION]!: JobStatusDto; + [QueueName.StorageTemplateMigration]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.MIGRATION]!: JobStatusDto; + [QueueName.Migration]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.BACKGROUND_TASK]!: JobStatusDto; + [QueueName.BackgroundTask]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.SEARCH]!: JobStatusDto; + [QueueName.Search]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.DUPLICATE_DETECTION]!: JobStatusDto; + [QueueName.DuplicateDetection]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.FACE_DETECTION]!: JobStatusDto; + [QueueName.FaceDetection]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.FACIAL_RECOGNITION]!: JobStatusDto; + [QueueName.FacialRecognition]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.SIDECAR]!: JobStatusDto; + [QueueName.Sidecar]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.LIBRARY]!: JobStatusDto; + [QueueName.Library]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.NOTIFICATION]!: JobStatusDto; + [QueueName.Notification]!: JobStatusDto; @ApiProperty({ type: JobStatusDto }) - [QueueName.BACKUP_DATABASE]!: JobStatusDto; + [QueueName.BackupDatabase]!: JobStatusDto; } diff --git a/server/src/dtos/memory.dto.ts b/server/src/dtos/memory.dto.ts index e92e11bdfb..a79511c73e 100644 --- a/server/src/dtos/memory.dto.ts +++ b/server/src/dtos/memory.dto.ts @@ -50,7 +50,7 @@ export class MemoryCreateDto extends MemoryBaseDto { @ValidateNested() @Type((options) => { switch (options?.object.type) { - case MemoryType.ON_THIS_DAY: { + case MemoryType.OnThisDay: { return OnThisDayDto; } diff --git a/server/src/dtos/search.dto.ts b/server/src/dtos/search.dto.ts index 85c6fbf0de..aef78e51ea 100644 --- a/server/src/dtos/search.dto.ts +++ b/server/src/dtos/search.dto.ts @@ -170,7 +170,7 @@ export class MetadataSearchDto extends RandomSearchDto { @Optional() encodedVideoPath?: string; - @ValidateEnum({ enum: AssetOrder, name: 'AssetOrder', optional: true, default: AssetOrder.DESC }) + @ValidateEnum({ enum: AssetOrder, name: 'AssetOrder', optional: true, default: AssetOrder.Desc }) order?: AssetOrder; @IsInt() diff --git a/server/src/dtos/system-config.dto.ts b/server/src/dtos/system-config.dto.ts index 809f381dd6..8a58995de7 100644 --- a/server/src/dtos/system-config.dto.ts +++ b/server/src/dtos/system-config.dto.ts @@ -26,7 +26,7 @@ import { OAuthTokenEndpointAuthMethod, QueueName, ToneMapping, - TranscodeHWAccel, + TranscodeHardwareAcceleration, TranscodePolicy, VideoCodec, VideoContainer, @@ -136,8 +136,8 @@ export class SystemConfigFFmpegDto { @ValidateEnum({ enum: TranscodePolicy, name: 'TranscodePolicy' }) transcode!: TranscodePolicy; - @ValidateEnum({ enum: TranscodeHWAccel, name: 'TranscodeHWAccel' }) - accel!: TranscodeHWAccel; + @ValidateEnum({ enum: TranscodeHardwareAcceleration, name: 'TranscodeHWAccel' }) + accel!: TranscodeHardwareAcceleration; @ValidateBoolean() accelDecode!: boolean; @@ -158,67 +158,67 @@ class SystemConfigJobDto implements Record @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.THUMBNAIL_GENERATION]!: JobSettingsDto; + [QueueName.ThumbnailGeneration]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.METADATA_EXTRACTION]!: JobSettingsDto; + [QueueName.MetadataExtraction]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.VIDEO_CONVERSION]!: JobSettingsDto; + [QueueName.VideoConversion]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.SMART_SEARCH]!: JobSettingsDto; + [QueueName.SmartSearch]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.MIGRATION]!: JobSettingsDto; + [QueueName.Migration]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.BACKGROUND_TASK]!: JobSettingsDto; + [QueueName.BackgroundTask]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.SEARCH]!: JobSettingsDto; + [QueueName.Search]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.FACE_DETECTION]!: JobSettingsDto; + [QueueName.FaceDetection]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.SIDECAR]!: JobSettingsDto; + [QueueName.Sidecar]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.LIBRARY]!: JobSettingsDto; + [QueueName.Library]!: JobSettingsDto; @ApiProperty({ type: JobSettingsDto }) @ValidateNested() @IsObject() @Type(() => JobSettingsDto) - [QueueName.NOTIFICATION]!: JobSettingsDto; + [QueueName.Notification]!: JobSettingsDto; } class SystemConfigLibraryScanDto { diff --git a/server/src/dtos/user-preferences.dto.ts b/server/src/dtos/user-preferences.dto.ts index d165438061..b258158ae2 100644 --- a/server/src/dtos/user-preferences.dto.ts +++ b/server/src/dtos/user-preferences.dto.ts @@ -157,7 +157,7 @@ export class UserPreferencesUpdateDto { class AlbumsResponse { @ValidateEnum({ enum: AssetOrder, name: 'AssetOrder' }) - defaultAssetOrder: AssetOrder = AssetOrder.DESC; + defaultAssetOrder: AssetOrder = AssetOrder.Desc; } class RatingsResponse { diff --git a/server/src/dtos/user.dto.ts b/server/src/dtos/user.dto.ts index 3e3a92d42e..0da86bfcb5 100644 --- a/server/src/dtos/user.dto.ts +++ b/server/src/dtos/user.dto.ts @@ -171,7 +171,7 @@ export class UserAdminResponseDto extends UserResponseDto { export function mapUserAdmin(entity: UserAdmin): UserAdminResponseDto { const metadata = entity.metadata || []; const license = metadata.find( - (item): item is UserMetadataItem => item.key === UserMetadataKey.LICENSE, + (item): item is UserMetadataItem => item.key === UserMetadataKey.License, )?.value; return { ...mapUser(entity), diff --git a/server/src/enum.ts b/server/src/enum.ts index d7c74a71c6..1d8b8645d1 100644 --- a/server/src/enum.ts +++ b/server/src/enum.ts @@ -1,402 +1,397 @@ export enum AuthType { - PASSWORD = 'password', - OAUTH = 'oauth', + Password = 'password', + OAuth = 'oauth', } export enum ImmichCookie { - ACCESS_TOKEN = 'immich_access_token', - AUTH_TYPE = 'immich_auth_type', - IS_AUTHENTICATED = 'immich_is_authenticated', - SHARED_LINK_TOKEN = 'immich_shared_link_token', - OAUTH_STATE = 'immich_oauth_state', - OAUTH_CODE_VERIFIER = 'immich_oauth_code_verifier', + AccessToken = 'immich_access_token', + AuthType = 'immich_auth_type', + IsAuthenticated = 'immich_is_authenticated', + SharedLinkToken = 'immich_shared_link_token', + OAuthState = 'immich_oauth_state', + OAuthCodeVerifier = 'immich_oauth_code_verifier', } export enum ImmichHeader { - API_KEY = 'x-api-key', - USER_TOKEN = 'x-immich-user-token', - SESSION_TOKEN = 'x-immich-session-token', - SHARED_LINK_KEY = 'x-immich-share-key', - CHECKSUM = 'x-immich-checksum', - CID = 'x-immich-cid', + ApiKey = 'x-api-key', + UserToken = 'x-immich-user-token', + SessionToken = 'x-immich-session-token', + SharedLinkKey = 'x-immich-share-key', + Checksum = 'x-immich-checksum', + Cid = 'x-immich-cid', } export enum ImmichQuery { - SHARED_LINK_KEY = 'key', - API_KEY = 'apiKey', - SESSION_KEY = 'sessionKey', + SharedLinkKey = 'key', + ApiKey = 'apiKey', + SessionKey = 'sessionKey', } export enum AssetType { - IMAGE = 'IMAGE', - VIDEO = 'VIDEO', - AUDIO = 'AUDIO', - OTHER = 'OTHER', + Image = 'IMAGE', + Video = 'VIDEO', + Audio = 'AUDIO', + Other = 'OTHER', } export enum AssetFileType { /** * An full/large-size image extracted/converted from RAW photos */ - FULLSIZE = 'fullsize', - PREVIEW = 'preview', - THUMBNAIL = 'thumbnail', + FullSize = 'fullsize', + Preview = 'preview', + Thumbnail = 'thumbnail', } export enum AlbumUserRole { - EDITOR = 'editor', - VIEWER = 'viewer', + Editor = 'editor', + Viewer = 'viewer', } export enum AssetOrder { - ASC = 'asc', - DESC = 'desc', + Asc = 'asc', + Desc = 'desc', } export enum DatabaseAction { - CREATE = 'CREATE', - UPDATE = 'UPDATE', - DELETE = 'DELETE', + Create = 'CREATE', + Update = 'UPDATE', + Delete = 'DELETE', } export enum EntityType { - ASSET = 'ASSET', - ALBUM = 'ALBUM', + Asset = 'ASSET', + Album = 'ALBUM', } export enum MemoryType { /** pictures taken on this day X years ago */ - ON_THIS_DAY = 'on_this_day', + OnThisDay = 'on_this_day', } export enum Permission { - ALL = 'all', + All = 'all', - ACTIVITY_CREATE = 'activity.create', - ACTIVITY_READ = 'activity.read', - ACTIVITY_UPDATE = 'activity.update', - ACTIVITY_DELETE = 'activity.delete', - ACTIVITY_STATISTICS = 'activity.statistics', + ActivityCreate = 'activity.create', + ActivityRead = 'activity.read', + ActivityUpdate = 'activity.update', + ActivityDelete = 'activity.delete', + ActivityStatistics = 'activity.statistics', - API_KEY_CREATE = 'apiKey.create', - API_KEY_READ = 'apiKey.read', - API_KEY_UPDATE = 'apiKey.update', - API_KEY_DELETE = 'apiKey.delete', + ApiKeyCreate = 'apiKey.create', + ApiKeyRead = 'apiKey.read', + ApiKeyUpdate = 'apiKey.update', + ApiKeyDelete = 'apiKey.delete', // ASSET_CREATE = 'asset.create', - ASSET_READ = 'asset.read', - ASSET_UPDATE = 'asset.update', - ASSET_DELETE = 'asset.delete', - ASSET_SHARE = 'asset.share', - ASSET_VIEW = 'asset.view', - ASSET_DOWNLOAD = 'asset.download', - ASSET_UPLOAD = 'asset.upload', + AssetRead = 'asset.read', + AssetUpdate = 'asset.update', + AssetDelete = 'asset.delete', + AssetShare = 'asset.share', + AssetView = 'asset.view', + AssetDownload = 'asset.download', + AssetUpload = 'asset.upload', - ALBUM_CREATE = 'album.create', - ALBUM_READ = 'album.read', - ALBUM_UPDATE = 'album.update', - ALBUM_DELETE = 'album.delete', - ALBUM_STATISTICS = 'album.statistics', + AlbumCreate = 'album.create', + AlbumRead = 'album.read', + AlbumUpdate = 'album.update', + AlbumDelete = 'album.delete', + AlbumStatistics = 'album.statistics', - ALBUM_ADD_ASSET = 'album.addAsset', - ALBUM_REMOVE_ASSET = 'album.removeAsset', - ALBUM_SHARE = 'album.share', - ALBUM_DOWNLOAD = 'album.download', + AlbumAddAsset = 'album.addAsset', + AlbumRemoveAsset = 'album.removeAsset', + AlbumShare = 'album.share', + AlbumDownload = 'album.download', - AUTH_DEVICE_DELETE = 'authDevice.delete', + AuthDeviceDelete = 'authDevice.delete', - ARCHIVE_READ = 'archive.read', + ArchiveRead = 'archive.read', - FACE_CREATE = 'face.create', - FACE_READ = 'face.read', - FACE_UPDATE = 'face.update', - FACE_DELETE = 'face.delete', + FaceCreate = 'face.create', + FaceRead = 'face.read', + FaceUpdate = 'face.update', + FaceDelete = 'face.delete', - LIBRARY_CREATE = 'library.create', - LIBRARY_READ = 'library.read', - LIBRARY_UPDATE = 'library.update', - LIBRARY_DELETE = 'library.delete', - LIBRARY_STATISTICS = 'library.statistics', + LibraryCreate = 'library.create', + LibraryRead = 'library.read', + LibraryUpdate = 'library.update', + LibraryDelete = 'library.delete', + LibraryStatistics = 'library.statistics', - TIMELINE_READ = 'timeline.read', - TIMELINE_DOWNLOAD = 'timeline.download', + TimelineRead = 'timeline.read', + TimelineDownload = 'timeline.download', - MEMORY_CREATE = 'memory.create', - MEMORY_READ = 'memory.read', - MEMORY_UPDATE = 'memory.update', - MEMORY_DELETE = 'memory.delete', + MemoryCreate = 'memory.create', + MemoryRead = 'memory.read', + MemoryUpdate = 'memory.update', + MemoryDelete = 'memory.delete', - NOTIFICATION_CREATE = 'notification.create', - NOTIFICATION_READ = 'notification.read', - NOTIFICATION_UPDATE = 'notification.update', - NOTIFICATION_DELETE = 'notification.delete', + NotificationCreate = 'notification.create', + NotificationRead = 'notification.read', + NotificationUpdate = 'notification.update', + NotificationDelete = 'notification.delete', - PARTNER_CREATE = 'partner.create', - PARTNER_READ = 'partner.read', - PARTNER_UPDATE = 'partner.update', - PARTNER_DELETE = 'partner.delete', + PartnerCreate = 'partner.create', + PartnerRead = 'partner.read', + PartnerUpdate = 'partner.update', + PartnerDelete = 'partner.delete', - PERSON_CREATE = 'person.create', - PERSON_READ = 'person.read', - PERSON_UPDATE = 'person.update', - PERSON_DELETE = 'person.delete', - PERSON_STATISTICS = 'person.statistics', - PERSON_MERGE = 'person.merge', - PERSON_REASSIGN = 'person.reassign', + PersonCreate = 'person.create', + PersonRead = 'person.read', + PersonUpdate = 'person.update', + PersonDelete = 'person.delete', + PersonStatistics = 'person.statistics', + PersonMerge = 'person.merge', + PersonReassign = 'person.reassign', - SESSION_CREATE = 'session.create', - SESSION_READ = 'session.read', - SESSION_UPDATE = 'session.update', - SESSION_DELETE = 'session.delete', - SESSION_LOCK = 'session.lock', + SessionCreate = 'session.create', + SessionRead = 'session.read', + SessionUpdate = 'session.update', + SessionDelete = 'session.delete', + SessionLock = 'session.lock', - SHARED_LINK_CREATE = 'sharedLink.create', - SHARED_LINK_READ = 'sharedLink.read', - SHARED_LINK_UPDATE = 'sharedLink.update', - SHARED_LINK_DELETE = 'sharedLink.delete', + SharedLinkCreate = 'sharedLink.create', + SharedLinkRead = 'sharedLink.read', + SharedLinkUpdate = 'sharedLink.update', + SharedLinkDelete = 'sharedLink.delete', - STACK_CREATE = 'stack.create', - STACK_READ = 'stack.read', - STACK_UPDATE = 'stack.update', - STACK_DELETE = 'stack.delete', + StackCreate = 'stack.create', + StackRead = 'stack.read', + StackUpdate = 'stack.update', + StackDelete = 'stack.delete', - SYSTEM_CONFIG_READ = 'systemConfig.read', - SYSTEM_CONFIG_UPDATE = 'systemConfig.update', + SystemConfigRead = 'systemConfig.read', + SystemConfigUpdate = 'systemConfig.update', - SYSTEM_METADATA_READ = 'systemMetadata.read', - SYSTEM_METADATA_UPDATE = 'systemMetadata.update', + SystemMetadataRead = 'systemMetadata.read', + SystemMetadataUpdate = 'systemMetadata.update', - TAG_CREATE = 'tag.create', - TAG_READ = 'tag.read', - TAG_UPDATE = 'tag.update', - TAG_DELETE = 'tag.delete', - TAG_ASSET = 'tag.asset', + TagCreate = 'tag.create', + TagRead = 'tag.read', + TagUpdate = 'tag.update', + TagDelete = 'tag.delete', + TagAsset = 'tag.asset', - ADMIN_USER_CREATE = 'admin.user.create', - ADMIN_USER_READ = 'admin.user.read', - ADMIN_USER_UPDATE = 'admin.user.update', - ADMIN_USER_DELETE = 'admin.user.delete', + AdminUserCreate = 'admin.user.create', + AdminUserRead = 'admin.user.read', + AdminUserUpdate = 'admin.user.update', + AdminUserDelete = 'admin.user.delete', } export enum SharedLinkType { - ALBUM = 'ALBUM', + Album = 'ALBUM', /** * Individual asset * or group of assets that are not in an album */ - INDIVIDUAL = 'INDIVIDUAL', + Individual = 'INDIVIDUAL', } export enum StorageFolder { - ENCODED_VIDEO = 'encoded-video', - LIBRARY = 'library', - UPLOAD = 'upload', - PROFILE = 'profile', - THUMBNAILS = 'thumbs', - BACKUPS = 'backups', + EncodedVideo = 'encoded-video', + Library = 'library', + Upload = 'upload', + Profile = 'profile', + Thumbnails = 'thumbs', + Backups = 'backups', } export enum SystemMetadataKey { - REVERSE_GEOCODING_STATE = 'reverse-geocoding-state', - FACIAL_RECOGNITION_STATE = 'facial-recognition-state', - MEMORIES_STATE = 'memories-state', - ADMIN_ONBOARDING = 'admin-onboarding', - SYSTEM_CONFIG = 'system-config', - SYSTEM_FLAGS = 'system-flags', - VERSION_CHECK_STATE = 'version-check-state', - LICENSE = 'license', + ReverseGeocodingState = 'reverse-geocoding-state', + FacialRecognitionState = 'facial-recognition-state', + MemoriesState = 'memories-state', + AdminOnboarding = 'admin-onboarding', + SystemConfig = 'system-config', + SystemFlags = 'system-flags', + VersionCheckState = 'version-check-state', + License = 'license', } export enum UserMetadataKey { - PREFERENCES = 'preferences', - LICENSE = 'license', - ONBOARDING = 'onboarding', + Preferences = 'preferences', + License = 'license', + Onboarding = 'onboarding', } export enum UserAvatarColor { - PRIMARY = 'primary', - PINK = 'pink', - RED = 'red', - YELLOW = 'yellow', - BLUE = 'blue', - GREEN = 'green', - PURPLE = 'purple', - ORANGE = 'orange', - GRAY = 'gray', - AMBER = 'amber', + Primary = 'primary', + Pink = 'pink', + Red = 'red', + Yellow = 'yellow', + Blue = 'blue', + Green = 'green', + Purple = 'purple', + Orange = 'orange', + Gray = 'gray', + Amber = 'amber', } export enum UserStatus { - ACTIVE = 'active', - REMOVING = 'removing', - DELETED = 'deleted', + Active = 'active', + Removing = 'removing', + Deleted = 'deleted', } export enum AssetStatus { - ACTIVE = 'active', - TRASHED = 'trashed', - DELETED = 'deleted', + Active = 'active', + Trashed = 'trashed', + Deleted = 'deleted', } export enum SourceType { - MACHINE_LEARNING = 'machine-learning', - EXIF = 'exif', - MANUAL = 'manual', + MachineLearning = 'machine-learning', + Exif = 'exif', + Manual = 'manual', } export enum ManualJobName { - PERSON_CLEANUP = 'person-cleanup', - TAG_CLEANUP = 'tag-cleanup', - USER_CLEANUP = 'user-cleanup', - MEMORY_CLEANUP = 'memory-cleanup', - MEMORY_CREATE = 'memory-create', - BACKUP_DATABASE = 'backup-database', + PersonCleanup = 'person-cleanup', + TagCleanup = 'tag-cleanup', + UserCleanup = 'user-cleanup', + MemoryCleanup = 'memory-cleanup', + MemoryCreate = 'memory-create', + BackupDatabase = 'backup-database', } export enum AssetPathType { - ORIGINAL = 'original', - FULLSIZE = 'fullsize', - PREVIEW = 'preview', - THUMBNAIL = 'thumbnail', - ENCODED_VIDEO = 'encoded_video', - SIDECAR = 'sidecar', + Original = 'original', + FullSize = 'fullsize', + Preview = 'preview', + Thumbnail = 'thumbnail', + EncodedVideo = 'encoded_video', + Sidecar = 'sidecar', } export enum PersonPathType { - FACE = 'face', + Face = 'face', } export enum UserPathType { - PROFILE = 'profile', + Profile = 'profile', } export type PathType = AssetPathType | PersonPathType | UserPathType; export enum TranscodePolicy { - ALL = 'all', - OPTIMAL = 'optimal', - BITRATE = 'bitrate', - REQUIRED = 'required', - DISABLED = 'disabled', + All = 'all', + Optimal = 'optimal', + Bitrate = 'bitrate', + Required = 'required', + Disabled = 'disabled', } export enum TranscodeTarget { - NONE, - AUDIO, - VIDEO, - ALL, + None = 'NONE', + Audio = 'AUDIO', + Video = 'VIDEO', + All = 'ALL', } export enum VideoCodec { H264 = 'h264', - HEVC = 'hevc', - VP9 = 'vp9', - AV1 = 'av1', + Hevc = 'hevc', + Vp9 = 'vp9', + Av1 = 'av1', } export enum AudioCodec { - MP3 = 'mp3', - AAC = 'aac', - LIBOPUS = 'libopus', - PCMS16LE = 'pcm_s16le', + Mp3 = 'mp3', + Aac = 'aac', + LibOpus = 'libopus', + PcmS16le = 'pcm_s16le', } export enum VideoContainer { - MOV = 'mov', - MP4 = 'mp4', - OGG = 'ogg', - WEBM = 'webm', + Mov = 'mov', + Mp4 = 'mp4', + Ogg = 'ogg', + Webm = 'webm', } -export enum TranscodeHWAccel { - NVENC = 'nvenc', - QSV = 'qsv', - VAAPI = 'vaapi', - RKMPP = 'rkmpp', - DISABLED = 'disabled', +export enum TranscodeHardwareAcceleration { + Nvenc = 'nvenc', + Qsv = 'qsv', + Vaapi = 'vaapi', + Rkmpp = 'rkmpp', + Disabled = 'disabled', } export enum ToneMapping { - HABLE = 'hable', - MOBIUS = 'mobius', - REINHARD = 'reinhard', - DISABLED = 'disabled', + Hable = 'hable', + Mobius = 'mobius', + Reinhard = 'reinhard', + Disabled = 'disabled', } export enum CQMode { - AUTO = 'auto', - CQP = 'cqp', - ICQ = 'icq', + Auto = 'auto', + Cqp = 'cqp', + Icq = 'icq', } export enum Colorspace { - SRGB = 'srgb', + Srgb = 'srgb', P3 = 'p3', } export enum ImageFormat { - JPEG = 'jpeg', - WEBP = 'webp', + Jpeg = 'jpeg', + Webp = 'webp', } export enum RawExtractedFormat { - JPEG = 'jpeg', - JXL = 'jxl', + Jpeg = 'jpeg', + Jxl = 'jxl', } export enum LogLevel { - VERBOSE = 'verbose', - DEBUG = 'debug', - LOG = 'log', - WARN = 'warn', - ERROR = 'error', - FATAL = 'fatal', + Verbose = 'verbose', + Debug = 'debug', + Log = 'log', + Warn = 'warn', + Error = 'error', + Fatal = 'fatal', } export enum MetadataKey { - AUTH_ROUTE = 'auth_route', - ADMIN_ROUTE = 'admin_route', - SHARED_ROUTE = 'shared_route', - API_KEY_SECURITY = 'api_key', - EVENT_CONFIG = 'event_config', - JOB_CONFIG = 'job_config', - TELEMETRY_ENABLED = 'telemetry_enabled', + AuthRoute = 'auth_route', + AdminRoute = 'admin_route', + SharedRoute = 'shared_route', + ApiKeySecurity = 'api_key', + EventConfig = 'event_config', + JobConfig = 'job_config', + TelemetryEnabled = 'telemetry_enabled', } export enum RouteKey { - ASSET = 'assets', - USER = 'users', + Asset = 'assets', + User = 'users', } export enum CacheControl { - PRIVATE_WITH_CACHE = 'private_with_cache', - PRIVATE_WITHOUT_CACHE = 'private_without_cache', - NONE = 'none', -} - -export enum PaginationMode { - LIMIT_OFFSET = 'limit-offset', - SKIP_TAKE = 'skip-take', + PrivateWithCache = 'private_with_cache', + PrivateWithoutCache = 'private_without_cache', + None = 'none', } export enum ImmichEnvironment { - DEVELOPMENT = 'development', - TESTING = 'testing', - PRODUCTION = 'production', + Development = 'development', + Testing = 'testing', + Production = 'production', } export enum ImmichWorker { - API = 'api', - MICROSERVICES = 'microservices', + Api = 'api', + Microservices = 'microservices', } export enum ImmichTelemetry { - HOST = 'host', - API = 'api', - IO = 'io', - REPO = 'repo', - JOB = 'job', + Host = 'host', + Api = 'api', + Io = 'io', + Repo = 'repo', + Job = 'job', } export enum ExifOrientation { @@ -411,11 +406,11 @@ export enum ExifOrientation { } export enum DatabaseExtension { - CUBE = 'cube', - EARTH_DISTANCE = 'earthdistance', - VECTOR = 'vector', - VECTORS = 'vectors', - VECTORCHORD = 'vchord', + Cube = 'cube', + EarthDistance = 'earthdistance', + Vector = 'vector', + Vectors = 'vectors', + VectorChord = 'vchord', } export enum BootstrapEventPriority { @@ -428,135 +423,135 @@ export enum BootstrapEventPriority { } export enum QueueName { - THUMBNAIL_GENERATION = 'thumbnailGeneration', - METADATA_EXTRACTION = 'metadataExtraction', - VIDEO_CONVERSION = 'videoConversion', - FACE_DETECTION = 'faceDetection', - FACIAL_RECOGNITION = 'facialRecognition', - SMART_SEARCH = 'smartSearch', - DUPLICATE_DETECTION = 'duplicateDetection', - BACKGROUND_TASK = 'backgroundTask', - STORAGE_TEMPLATE_MIGRATION = 'storageTemplateMigration', - MIGRATION = 'migration', - SEARCH = 'search', - SIDECAR = 'sidecar', - LIBRARY = 'library', - NOTIFICATION = 'notifications', - BACKUP_DATABASE = 'backupDatabase', + ThumbnailGeneration = 'thumbnailGeneration', + MetadataExtraction = 'metadataExtraction', + VideoConversion = 'videoConversion', + FaceDetection = 'faceDetection', + FacialRecognition = 'facialRecognition', + SmartSearch = 'smartSearch', + DuplicateDetection = 'duplicateDetection', + BackgroundTask = 'backgroundTask', + StorageTemplateMigration = 'storageTemplateMigration', + Migration = 'migration', + Search = 'search', + Sidecar = 'sidecar', + Library = 'library', + Notification = 'notifications', + BackupDatabase = 'backupDatabase', } export enum JobName { //backups - BACKUP_DATABASE = 'database-backup', + BackupDatabase = 'database-backup', // conversion - QUEUE_VIDEO_CONVERSION = 'queue-video-conversion', - VIDEO_CONVERSION = 'video-conversion', + QueueVideoConversion = 'queue-video-conversion', + VideoConversation = 'video-conversion', // thumbnails - QUEUE_GENERATE_THUMBNAILS = 'queue-generate-thumbnails', - GENERATE_THUMBNAILS = 'generate-thumbnails', - GENERATE_PERSON_THUMBNAIL = 'generate-person-thumbnail', + QueueGenerateThumbnails = 'queue-generate-thumbnails', + GenerateThumbnails = 'generate-thumbnails', + GeneratePersonThumbnail = 'generate-person-thumbnail', // metadata - QUEUE_METADATA_EXTRACTION = 'queue-metadata-extraction', - METADATA_EXTRACTION = 'metadata-extraction', + QueueMetadataExtraction = 'queue-metadata-extraction', + MetadataExtraction = 'metadata-extraction', // user - USER_DELETION = 'user-deletion', - USER_DELETE_CHECK = 'user-delete-check', - USER_SYNC_USAGE = 'user-sync-usage', + UserDeletion = 'user-deletion', + UserDeleteCheck = 'user-delete-check', + userSyncUsage = 'user-sync-usage', // asset - ASSET_DELETION = 'asset-deletion', - ASSET_DELETION_CHECK = 'asset-deletion-check', + AssetDeletion = 'asset-deletion', + AssetDeletionCheck = 'asset-deletion-check', // storage template - STORAGE_TEMPLATE_MIGRATION = 'storage-template-migration', - STORAGE_TEMPLATE_MIGRATION_SINGLE = 'storage-template-migration-single', + StorageTemplateMigration = 'storage-template-migration', + StorageTemplateMigrationSingle = 'storage-template-migration-single', // tags - TAG_CLEANUP = 'tag-cleanup', + TagCleanup = 'tag-cleanup', // migration - QUEUE_MIGRATION = 'queue-migration', - MIGRATE_ASSET = 'migrate-asset', - MIGRATE_PERSON = 'migrate-person', + QueueMigration = 'queue-migration', + MigrateAsset = 'migrate-asset', + MigratePerson = 'migrate-person', // facial recognition - PERSON_CLEANUP = 'person-cleanup', - QUEUE_FACE_DETECTION = 'queue-face-detection', - FACE_DETECTION = 'face-detection', - QUEUE_FACIAL_RECOGNITION = 'queue-facial-recognition', - FACIAL_RECOGNITION = 'facial-recognition', + PersonCleanup = 'person-cleanup', + QueueFaceDetection = 'queue-face-detection', + FaceDetection = 'face-detection', + QueueFacialRecognition = 'queue-facial-recognition', + FacialRecognition = 'facial-recognition', // library management - LIBRARY_QUEUE_SYNC_FILES = 'library-queue-sync-files', - LIBRARY_QUEUE_SYNC_ASSETS = 'library-queue-sync-assets', - LIBRARY_SYNC_FILES = 'library-sync-files', - LIBRARY_SYNC_ASSETS = 'library-sync-assets', - LIBRARY_ASSET_REMOVAL = 'handle-library-file-deletion', - LIBRARY_DELETE = 'library-delete', - LIBRARY_QUEUE_SCAN_ALL = 'library-queue-scan-all', - LIBRARY_QUEUE_CLEANUP = 'library-queue-cleanup', + LibraryQueueSyncFiles = 'library-queue-sync-files', + LibraryQueueSyncAssets = 'library-queue-sync-assets', + LibrarySyncFiles = 'library-sync-files', + LibrarySyncAssets = 'library-sync-assets', + LibraryAssetRemoval = 'handle-library-file-deletion', + LibraryDelete = 'library-delete', + LibraryQueueScanAll = 'library-queue-scan-all', + LibraryQueueCleanup = 'library-queue-cleanup', // cleanup - DELETE_FILES = 'delete-files', - CLEAN_OLD_AUDIT_LOGS = 'clean-old-audit-logs', - CLEAN_OLD_SESSION_TOKENS = 'clean-old-session-tokens', + DeleteFiles = 'delete-files', + CleanOldAuditLogs = 'clean-old-audit-logs', + CleanOldSessionTokens = 'clean-old-session-tokens', // memories - MEMORIES_CLEANUP = 'memories-cleanup', - MEMORIES_CREATE = 'memories-create', + MemoriesCleanup = 'memories-cleanup', + MemoriesCreate = 'memories-create', // smart search - QUEUE_SMART_SEARCH = 'queue-smart-search', - SMART_SEARCH = 'smart-search', + QueueSmartSearch = 'queue-smart-search', + SmartSearch = 'smart-search', - QUEUE_TRASH_EMPTY = 'queue-trash-empty', + QueueTrashEmpty = 'queue-trash-empty', // duplicate detection - QUEUE_DUPLICATE_DETECTION = 'queue-duplicate-detection', - DUPLICATE_DETECTION = 'duplicate-detection', + QueueDuplicateDetection = 'queue-duplicate-detection', + DuplicateDetection = 'duplicate-detection', // XMP sidecars - QUEUE_SIDECAR = 'queue-sidecar', - SIDECAR_DISCOVERY = 'sidecar-discovery', - SIDECAR_SYNC = 'sidecar-sync', - SIDECAR_WRITE = 'sidecar-write', + QueueSidecar = 'queue-sidecar', + SidecarDiscovery = 'sidecar-discovery', + SidecarSync = 'sidecar-sync', + SidecarWrite = 'sidecar-write', // Notification - NOTIFY_SIGNUP = 'notify-signup', - NOTIFY_ALBUM_INVITE = 'notify-album-invite', - NOTIFY_ALBUM_UPDATE = 'notify-album-update', - NOTIFICATIONS_CLEANUP = 'notifications-cleanup', - SEND_EMAIL = 'notification-send-email', + NotifySignup = 'notify-signup', + NotifyAlbumInvite = 'notify-album-invite', + NotifyAlbumUpdate = 'notify-album-update', + NotificationsCleanup = 'notifications-cleanup', + SendMail = 'notification-send-email', // Version check - VERSION_CHECK = 'version-check', + VersionCheck = 'version-check', } export enum JobCommand { - START = 'start', - PAUSE = 'pause', - RESUME = 'resume', - EMPTY = 'empty', - CLEAR_FAILED = 'clear-failed', + Start = 'start', + Pause = 'pause', + Resume = 'resume', + Empty = 'empty', + ClearFailed = 'clear-failed', } export enum JobStatus { - SUCCESS = 'success', - FAILED = 'failed', - SKIPPED = 'skipped', + Success = 'success', + Failed = 'failed', + Skipped = 'skipped', } export enum QueueCleanType { - FAILED = 'failed', + Failed = 'failed', } export enum VectorIndex { - CLIP = 'clip_index', - FACE = 'face_index', + Clip = 'clip_index', + Face = 'face_index', } export enum DatabaseLock { @@ -663,8 +658,8 @@ export enum NotificationType { } export enum OAuthTokenEndpointAuthMethod { - CLIENT_SECRET_POST = 'client_secret_post', - CLIENT_SECRET_BASIC = 'client_secret_basic', + ClientSecretPost = 'client_secret_post', + ClientSecretBasic = 'client_secret_basic', } export enum DatabaseSslMode { @@ -676,14 +671,14 @@ export enum DatabaseSslMode { } export enum AssetVisibility { - ARCHIVE = 'archive', - TIMELINE = 'timeline', + Archive = 'archive', + Timeline = 'timeline', /** * Video part of the LivePhotos and MotionPhotos */ - HIDDEN = 'hidden', - LOCKED = 'locked', + Hidden = 'hidden', + Locked = 'locked', } export enum CronJob { diff --git a/server/src/main.ts b/server/src/main.ts index 95b35c6915..591fc156d9 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -20,7 +20,7 @@ const onExit = (name: string, exitCode: number | null) => { if (exitCode !== 0) { console.error(`${name} worker exited with code ${exitCode}`); - if (apiProcess && name !== ImmichWorker.API) { + if (apiProcess && name !== ImmichWorker.Api) { console.error('Killing api process'); apiProcess.kill('SIGTERM'); apiProcess = undefined; @@ -34,7 +34,7 @@ function bootstrapWorker(name: ImmichWorker) { console.log(`Starting ${name} worker`); let worker: Worker | ChildProcess; - if (name === ImmichWorker.API) { + if (name === ImmichWorker.Api) { worker = fork(`./dist/workers/${name}.js`, [], { execArgv: process.execArgv.map((arg) => (arg.startsWith('--inspect') ? '--inspect=0.0.0.0:9231' : arg)), }); @@ -50,7 +50,7 @@ function bootstrapWorker(name: ImmichWorker) { function bootstrap() { if (immichApp === 'immich-admin') { process.title = 'immich_admin_cli'; - process.env.IMMICH_LOG_LEVEL = LogLevel.WARN; + process.env.IMMICH_LOG_LEVEL = LogLevel.Warn; return CommandFactory.run(ImmichAdminModule); } diff --git a/server/src/middleware/asset-upload.interceptor.ts b/server/src/middleware/asset-upload.interceptor.ts index bc403ee562..0f1eaa4ce5 100644 --- a/server/src/middleware/asset-upload.interceptor.ts +++ b/server/src/middleware/asset-upload.interceptor.ts @@ -15,7 +15,7 @@ export class AssetUploadInterceptor implements NestInterceptor { const req = context.switchToHttp().getRequest(); const res = context.switchToHttp().getResponse>(); - const checksum = fromMaybeArray(req.headers[ImmichHeader.CHECKSUM]); + const checksum = fromMaybeArray(req.headers[ImmichHeader.Checksum]); const response = await this.service.getUploadAssetIdByChecksum(req.user, checksum); if (response) { res.status(200); diff --git a/server/src/middleware/auth.guard.ts b/server/src/middleware/auth.guard.ts index 438843436b..238f99257a 100644 --- a/server/src/middleware/auth.guard.ts +++ b/server/src/middleware/auth.guard.ts @@ -23,12 +23,12 @@ export const Authenticated = (options?: AuthenticatedOptions): MethodDecorator = const decorators: MethodDecorator[] = [ ApiBearerAuth(), ApiCookieAuth(), - ApiSecurity(MetadataKey.API_KEY_SECURITY), - SetMetadata(MetadataKey.AUTH_ROUTE, options || {}), + ApiSecurity(MetadataKey.ApiKeySecurity), + SetMetadata(MetadataKey.AuthRoute, options || {}), ]; if ((options as SharedLinkRoute)?.sharedLink) { - decorators.push(ApiQuery({ name: ImmichQuery.SHARED_LINK_KEY, type: String, required: false })); + decorators.push(ApiQuery({ name: ImmichQuery.SharedLinkKey, type: String, required: false })); } return applyDecorators(...decorators); @@ -76,7 +76,7 @@ export class AuthGuard implements CanActivate { async canActivate(context: ExecutionContext): Promise { const targets = [context.getHandler()]; - const options = this.reflector.getAllAndOverride(MetadataKey.AUTH_ROUTE, targets); + const options = this.reflector.getAllAndOverride(MetadataKey.AuthRoute, targets); if (!options) { return true; } diff --git a/server/src/middleware/file-upload.interceptor.ts b/server/src/middleware/file-upload.interceptor.ts index b6f37dbbd2..59c28849e1 100644 --- a/server/src/middleware/file-upload.interceptor.ts +++ b/server/src/middleware/file-upload.interceptor.ts @@ -154,11 +154,11 @@ export class FileUploadInterceptor implements NestInterceptor { private getHandler(route: RouteKey) { switch (route) { - case RouteKey.ASSET: { + case RouteKey.Asset: { return this.handlers.assetUpload; } - case RouteKey.USER: { + case RouteKey.User: { return this.handlers.userProfile; } diff --git a/server/src/migrations/1718486162779-AddFaceSearchRelation.ts b/server/src/migrations/1718486162779-AddFaceSearchRelation.ts index 68e1618775..2bd1acad34 100644 --- a/server/src/migrations/1718486162779-AddFaceSearchRelation.ts +++ b/server/src/migrations/1718486162779-AddFaceSearchRelation.ts @@ -6,7 +6,7 @@ import { MigrationInterface, QueryRunner } from 'typeorm'; export class AddFaceSearchRelation1718486162779 implements MigrationInterface { public async up(queryRunner: QueryRunner): Promise { const vectorExtension = await getVectorExtension(queryRunner); - if (vectorExtension === DatabaseExtension.VECTORS) { + if (vectorExtension === DatabaseExtension.Vectors) { await queryRunner.query(`SET search_path TO "$user", public, vectors`); } @@ -52,7 +52,7 @@ export class AddFaceSearchRelation1718486162779 implements MigrationInterface { public async down(queryRunner: QueryRunner): Promise { const vectorExtension = await getVectorExtension(queryRunner); - if (vectorExtension === DatabaseExtension.VECTORS) { + if (vectorExtension === DatabaseExtension.Vectors) { await queryRunner.query(`SET search_path TO "$user", public, vectors`); } diff --git a/server/src/repositories/access.repository.ts b/server/src/repositories/access.repository.ts index 14a765778e..5cceb6dbe0 100644 --- a/server/src/repositories/access.repository.ts +++ b/server/src/repositories/access.repository.ts @@ -91,7 +91,7 @@ class AlbumAccess { } const accessRole = - access === AlbumUserRole.EDITOR ? [AlbumUserRole.EDITOR] : [AlbumUserRole.EDITOR, AlbumUserRole.VIEWER]; + access === AlbumUserRole.Editor ? [AlbumUserRole.Editor] : [AlbumUserRole.Editor, AlbumUserRole.Viewer]; return this.db .selectFrom('album') @@ -178,7 +178,7 @@ class AssetAccess { .select('asset.id') .where('asset.id', 'in', [...assetIds]) .where('asset.ownerId', '=', userId) - .$if(!hasElevatedPermission, (eb) => eb.where('asset.visibility', '!=', AssetVisibility.LOCKED)) + .$if(!hasElevatedPermission, (eb) => eb.where('asset.visibility', '!=', AssetVisibility.Locked)) .execute() .then((assets) => new Set(assets.map((asset) => asset.id))); } @@ -200,8 +200,8 @@ class AssetAccess { .where('partner.sharedWithId', '=', userId) .where((eb) => eb.or([ - eb('asset.visibility', '=', sql.lit(AssetVisibility.TIMELINE)), - eb('asset.visibility', '=', sql.lit(AssetVisibility.HIDDEN)), + eb('asset.visibility', '=', sql.lit(AssetVisibility.Timeline)), + eb('asset.visibility', '=', sql.lit(AssetVisibility.Hidden)), ]), ) diff --git a/server/src/repositories/activity.repository.ts b/server/src/repositories/activity.repository.ts index 9b991ef17f..1a1104b118 100644 --- a/server/src/repositories/activity.repository.ts +++ b/server/src/repositories/activity.repository.ts @@ -90,7 +90,7 @@ export class ActivityRepository { .where('activity.albumId', '=', albumId) .where(({ or, and, eb }) => or([ - and([eb('asset.deletedAt', 'is', null), eb('asset.visibility', '!=', sql.lit(AssetVisibility.LOCKED))]), + and([eb('asset.deletedAt', 'is', null), eb('asset.visibility', '!=', sql.lit(AssetVisibility.Locked))]), eb('asset.id', 'is', null), ]), ) diff --git a/server/src/repositories/album-user.repository.ts b/server/src/repositories/album-user.repository.ts index d968ed100c..2fce797aff 100644 --- a/server/src/repositories/album-user.repository.ts +++ b/server/src/repositories/album-user.repository.ts @@ -24,7 +24,7 @@ export class AlbumUserRepository { .executeTakeFirstOrThrow(); } - @GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }, { role: AlbumUserRole.VIEWER }] }) + @GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }, { role: AlbumUserRole.Viewer }] }) update({ usersId, albumsId }: AlbumPermissionId, dto: Updateable) { return this.db .updateTable('album_user') diff --git a/server/src/repositories/asset-job.repository.ts b/server/src/repositories/asset-job.repository.ts index c784ae276f..0500bb867f 100644 --- a/server/src/repositories/asset-job.repository.ts +++ b/server/src/repositories/asset-job.repository.ts @@ -62,7 +62,7 @@ export class AssetJobRepository { .select(['asset.id', 'asset.thumbhash']) .select(withFiles) .where('asset.deletedAt', 'is', null) - .where('asset.visibility', '!=', AssetVisibility.HIDDEN) + .where('asset.visibility', '!=', AssetVisibility.Hidden) .$if(!force, (qb) => qb // If there aren't any entries, metadata extraction hasn't run yet which is required for thumbnails @@ -117,7 +117,7 @@ export class AssetJobRepository { .executeTakeFirst(); } - @GenerateSql({ params: [DummyValue.UUID, AssetFileType.THUMBNAIL] }) + @GenerateSql({ params: [DummyValue.UUID, AssetFileType.Thumbnail] }) getAlbumThumbnailFiles(id: string, fileType?: AssetFileType) { return this.db .selectFrom('asset_file') @@ -130,7 +130,7 @@ export class AssetJobRepository { private assetsWithPreviews() { return this.db .selectFrom('asset') - .where('asset.visibility', '!=', AssetVisibility.HIDDEN) + .where('asset.visibility', '!=', AssetVisibility.Hidden) .where('asset.deletedAt', 'is', null) .innerJoin('asset_job_status as job_status', 'assetId', 'asset.id') .where('job_status.previewAt', 'is not', null); @@ -167,7 +167,7 @@ export class AssetJobRepository { return this.db .selectFrom('asset') .select(['asset.id', 'asset.visibility']) - .select((eb) => withFiles(eb, AssetFileType.PREVIEW)) + .select((eb) => withFiles(eb, AssetFileType.Preview)) .where('asset.id', '=', id) .executeTakeFirst(); } @@ -179,7 +179,7 @@ export class AssetJobRepository { .select(['asset.id', 'asset.visibility']) .$call(withExifInner) .select((eb) => withFaces(eb, true)) - .select((eb) => withFiles(eb, AssetFileType.PREVIEW)) + .select((eb) => withFiles(eb, AssetFileType.Preview)) .where('asset.id', '=', id) .executeTakeFirst(); } @@ -225,7 +225,7 @@ export class AssetJobRepository { .select(['stack.id', 'stack.primaryAssetId']) .select((eb) => eb.fn('array_agg', [eb.table('stacked')]).as('assets')) .where('stacked.deletedAt', 'is not', null) - .where('stacked.visibility', '=', AssetVisibility.TIMELINE) + .where('stacked.visibility', '=', AssetVisibility.Timeline) .whereRef('stacked.stackId', '=', 'stack.id') .groupBy('stack.id') .as('stacked_assets'), @@ -241,11 +241,11 @@ export class AssetJobRepository { return this.db .selectFrom('asset') .select(['asset.id']) - .where('asset.type', '=', AssetType.VIDEO) + .where('asset.type', '=', AssetType.Video) .$if(!force, (qb) => qb .where((eb) => eb.or([eb('asset.encodedVideoPath', 'is', null), eb('asset.encodedVideoPath', '=', '')])) - .where('asset.visibility', '!=', AssetVisibility.HIDDEN), + .where('asset.visibility', '!=', AssetVisibility.Hidden), ) .where('asset.deletedAt', 'is', null) .stream(); @@ -257,7 +257,7 @@ export class AssetJobRepository { .selectFrom('asset') .select(['asset.id', 'asset.ownerId', 'asset.originalPath', 'asset.encodedVideoPath']) .where('asset.id', '=', id) - .where('asset.type', '=', AssetType.VIDEO) + .where('asset.type', '=', AssetType.Video) .executeTakeFirst(); } @@ -327,7 +327,7 @@ export class AssetJobRepository { .$if(!force, (qb) => qb.where((eb) => eb.or([eb('asset.sidecarPath', '=', ''), eb('asset.sidecarPath', 'is', null)])), ) - .where('asset.visibility', '!=', AssetVisibility.HIDDEN) + .where('asset.visibility', '!=', AssetVisibility.Hidden) .stream(); } diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index f00d0c170f..cb7e804f6f 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -230,13 +230,13 @@ export class AssetRepository { .where('asset_job_status.previewAt', 'is not', null) .where(sql`(asset."localDateTime" at time zone 'UTC')::date`, '=', sql`today.date`) .where('asset.ownerId', '=', anyUuid(ownerIds)) - .where('asset.visibility', '=', AssetVisibility.TIMELINE) + .where('asset.visibility', '=', AssetVisibility.Timeline) .where((eb) => eb.exists((qb) => qb .selectFrom('asset_file') .whereRef('assetId', '=', 'asset.id') - .where('asset_file.type', '=', AssetFileType.PREVIEW), + .where('asset_file.type', '=', AssetFileType.Preview), ), ) .where('asset.deletedAt', 'is', null) @@ -318,7 +318,7 @@ export class AssetRepository { .select(['deviceAssetId']) .where('ownerId', '=', asUuid(ownerId)) .where('deviceId', '=', deviceId) - .where('visibility', '!=', AssetVisibility.HIDDEN) + .where('visibility', '!=', AssetVisibility.Hidden) .where('deletedAt', 'is', null) .execute(); @@ -363,7 +363,7 @@ export class AssetRepository { .whereRef('stacked.stackId', '=', 'stack.id') .whereRef('stacked.id', '!=', 'stack.primaryAssetId') .where('stacked.deletedAt', 'is', null) - .where('stacked.visibility', '=', AssetVisibility.TIMELINE) + .where('stacked.visibility', '=', AssetVisibility.Timeline) .groupBy('stack.id') .as('stacked_assets'), (join) => join.on('stack.id', 'is not', null), @@ -463,15 +463,15 @@ export class AssetRepository { getStatistics(ownerId: string, { visibility, isFavorite, isTrashed }: AssetStatsOptions): Promise { return this.db .selectFrom('asset') - .select((eb) => eb.fn.countAll().filterWhere('type', '=', AssetType.AUDIO).as(AssetType.AUDIO)) - .select((eb) => eb.fn.countAll().filterWhere('type', '=', AssetType.IMAGE).as(AssetType.IMAGE)) - .select((eb) => eb.fn.countAll().filterWhere('type', '=', AssetType.VIDEO).as(AssetType.VIDEO)) - .select((eb) => eb.fn.countAll().filterWhere('type', '=', AssetType.OTHER).as(AssetType.OTHER)) + .select((eb) => eb.fn.countAll().filterWhere('type', '=', AssetType.Audio).as(AssetType.Audio)) + .select((eb) => eb.fn.countAll().filterWhere('type', '=', AssetType.Image).as(AssetType.Image)) + .select((eb) => eb.fn.countAll().filterWhere('type', '=', AssetType.Video).as(AssetType.Video)) + .select((eb) => eb.fn.countAll().filterWhere('type', '=', AssetType.Other).as(AssetType.Other)) .where('ownerId', '=', asUuid(ownerId)) .$if(visibility === undefined, withDefaultVisibility) .$if(!!visibility, (qb) => qb.where('asset.visibility', '=', visibility!)) .$if(isFavorite !== undefined, (qb) => qb.where('isFavorite', '=', isFavorite!)) - .$if(!!isTrashed, (qb) => qb.where('asset.status', '!=', AssetStatus.DELETED)) + .$if(!!isTrashed, (qb) => qb.where('asset.status', '!=', AssetStatus.Deleted)) .where('deletedAt', isTrashed ? 'is not' : 'is', null) .executeTakeFirstOrThrow(); } @@ -496,7 +496,7 @@ export class AssetRepository { qb .selectFrom('asset') .select(truncatedDate().as('timeBucket')) - .$if(!!options.isTrashed, (qb) => qb.where('asset.status', '!=', AssetStatus.DELETED)) + .$if(!!options.isTrashed, (qb) => qb.where('asset.status', '!=', AssetStatus.Deleted)) .where('asset.deletedAt', options.isTrashed ? 'is not' : 'is', null) .$if(options.visibility === undefined, withDefaultVisibility) .$if(!!options.visibility, (qb) => qb.where('asset.visibility', '=', options.visibility!)) @@ -606,7 +606,7 @@ export class AssetRepository { .select(sql`array[stacked."stackId"::text, count('stacked')::text]`.as('stack')) .whereRef('stacked.stackId', '=', 'asset.stackId') .where('stacked.deletedAt', 'is', null) - .where('stacked.visibility', '=', AssetVisibility.TIMELINE) + .where('stacked.visibility', '=', AssetVisibility.Timeline) .groupBy('stacked.stackId') .as('stacked_assets'), (join) => join.onTrue(), @@ -617,7 +617,7 @@ export class AssetRepository { .$if(options.isDuplicate !== undefined, (qb) => qb.where('asset.duplicateId', options.isDuplicate ? 'is not' : 'is', null), ) - .$if(!!options.isTrashed, (qb) => qb.where('asset.status', '!=', AssetStatus.DELETED)) + .$if(!!options.isTrashed, (qb) => qb.where('asset.status', '!=', AssetStatus.Deleted)) .$if(!!options.tagId, (qb) => withTagId(qb, options.tagId!)) .orderBy('asset.fileCreatedAt', options.order ?? 'desc'), ) @@ -671,8 +671,8 @@ export class AssetRepository { .select(['assetId as data', 'asset_exif.city as value']) .$narrowType<{ value: NotNull }>() .where('ownerId', '=', asUuid(ownerId)) - .where('visibility', '=', AssetVisibility.TIMELINE) - .where('type', '=', AssetType.IMAGE) + .where('visibility', '=', AssetVisibility.Timeline) + .where('type', '=', AssetType.Image) .where('deletedAt', 'is', null) .limit(maxFields) .execute(); @@ -710,7 +710,7 @@ export class AssetRepository { ) .select((eb) => eb.fn.toJson(eb.table('stacked_assets')).$castTo().as('stack')) .where('asset.ownerId', '=', asUuid(ownerId)) - .where('asset.visibility', '!=', AssetVisibility.HIDDEN) + .where('asset.visibility', '!=', AssetVisibility.Hidden) .where('asset.updatedAt', '<=', updatedUntil) .$if(!!lastId, (qb) => qb.where('asset.id', '>', lastId!)) .orderBy('asset.id') @@ -738,7 +738,7 @@ export class AssetRepository { ) .select((eb) => eb.fn.toJson(eb.table('stacked_assets').$castTo()).as('stack')) .where('asset.ownerId', '=', anyUuid(options.userIds)) - .where('asset.visibility', '!=', AssetVisibility.HIDDEN) + .where('asset.visibility', '!=', AssetVisibility.Hidden) .where('asset.updatedAt', '>', options.updatedAfter) .limit(options.limit) .execute(); diff --git a/server/src/repositories/audit.repository.ts b/server/src/repositories/audit.repository.ts index 85ca4a79f8..2d56eddc9a 100644 --- a/server/src/repositories/audit.repository.ts +++ b/server/src/repositories/audit.repository.ts @@ -18,7 +18,7 @@ export class AuditRepository { @GenerateSql({ params: [ DummyValue.DATE, - { action: DatabaseAction.CREATE, entityType: EntityType.ASSET, userIds: [DummyValue.UUID] }, + { action: DatabaseAction.Create, entityType: EntityType.Asset, userIds: [DummyValue.UUID] }, ], }) async getAfter(since: Date, options: AuditSearch): Promise { diff --git a/server/src/repositories/config.repository.spec.ts b/server/src/repositories/config.repository.spec.ts index 238b48bcef..a096c6a4bc 100644 --- a/server/src/repositories/config.repository.spec.ts +++ b/server/src/repositories/config.repository.spec.ts @@ -275,14 +275,14 @@ describe('getEnv', () => { process.env.IMMICH_TELEMETRY_EXCLUDE = 'job'; const { telemetry } = getEnv(); expect(telemetry.metrics).toEqual( - new Set([ImmichTelemetry.API, ImmichTelemetry.HOST, ImmichTelemetry.IO, ImmichTelemetry.REPO]), + new Set([ImmichTelemetry.Api, ImmichTelemetry.Host, ImmichTelemetry.Io, ImmichTelemetry.Repo]), ); }); it('should run with specific telemetry metrics', () => { process.env.IMMICH_TELEMETRY_INCLUDE = 'io, host, api'; const { telemetry } = getEnv(); - expect(telemetry.metrics).toEqual(new Set([ImmichTelemetry.API, ImmichTelemetry.HOST, ImmichTelemetry.IO])); + expect(telemetry.metrics).toEqual(new Set([ImmichTelemetry.Api, ImmichTelemetry.Host, ImmichTelemetry.Io])); }); }); }); diff --git a/server/src/repositories/config.repository.ts b/server/src/repositories/config.repository.ts index dbb57bb141..7038338927 100644 --- a/server/src/repositories/config.repository.ts +++ b/server/src/repositories/config.repository.ts @@ -136,7 +136,7 @@ const getEnv = (): EnvData => { ); } - const includedWorkers = asSet(dto.IMMICH_WORKERS_INCLUDE, [ImmichWorker.API, ImmichWorker.MICROSERVICES]); + const includedWorkers = asSet(dto.IMMICH_WORKERS_INCLUDE, [ImmichWorker.Api, ImmichWorker.Microservices]); const excludedWorkers = asSet(dto.IMMICH_WORKERS_EXCLUDE, []); const workers = [...setDifference(includedWorkers, excludedWorkers)]; for (const worker of workers) { @@ -145,8 +145,8 @@ const getEnv = (): EnvData => { } } - const environment = dto.IMMICH_ENV || ImmichEnvironment.PRODUCTION; - const isProd = environment === ImmichEnvironment.PRODUCTION; + const environment = dto.IMMICH_ENV || ImmichEnvironment.Production; + const isProd = environment === ImmichEnvironment.Production; const buildFolder = dto.IMMICH_BUILD_DATA || '/build'; const folders = { geodata: join(buildFolder, 'geodata'), @@ -199,15 +199,15 @@ const getEnv = (): EnvData => { let vectorExtension: VectorExtension | undefined; switch (dto.DB_VECTOR_EXTENSION) { case 'pgvector': { - vectorExtension = DatabaseExtension.VECTOR; + vectorExtension = DatabaseExtension.Vector; break; } case 'pgvecto.rs': { - vectorExtension = DatabaseExtension.VECTORS; + vectorExtension = DatabaseExtension.Vectors; break; } case 'vectorchord': { - vectorExtension = DatabaseExtension.VECTORCHORD; + vectorExtension = DatabaseExtension.VectorChord; break; } } @@ -254,11 +254,11 @@ const getEnv = (): EnvData => { mount: true, generateId: true, setup: (cls, req: Request, res: Response) => { - const headerValues = req.headers[ImmichHeader.CID]; + const headerValues = req.headers[ImmichHeader.Cid]; const headerValue = Array.isArray(headerValues) ? headerValues[0] : headerValues; const cid = headerValue || cls.get(CLS_ID); cls.set(CLS_ID, cid); - res.header(ImmichHeader.CID, cid); + res.header(ImmichHeader.Cid, cid); }, }, }, @@ -278,9 +278,9 @@ const getEnv = (): EnvData => { otel: { metrics: { - hostMetrics: telemetries.has(ImmichTelemetry.HOST), + hostMetrics: telemetries.has(ImmichTelemetry.Host), apiMetrics: { - enable: telemetries.has(ImmichTelemetry.API), + enable: telemetries.has(ImmichTelemetry.Api), ignoreRoutes: excludePaths, }, }, @@ -335,7 +335,7 @@ export class ConfigRepository { } isDev() { - return this.getEnv().environment === ImmichEnvironment.DEVELOPMENT; + return this.getEnv().environment === ImmichEnvironment.Development; } getWorker() { diff --git a/server/src/repositories/database.repository.ts b/server/src/repositories/database.repository.ts index b1aefe19f8..8b5c728ce4 100644 --- a/server/src/repositories/database.repository.ts +++ b/server/src/repositories/database.repository.ts @@ -53,8 +53,8 @@ export async function getVectorExtension(runner: Kysely | QueryRunner): Prom } export const probes: Record = { - [VectorIndex.CLIP]: 1, - [VectorIndex.FACE]: 1, + [VectorIndex.Clip]: 1, + [VectorIndex.Face]: 1, }; @Injectable() @@ -77,7 +77,7 @@ export class DatabaseRepository { return getVectorExtension(this.db); } - @GenerateSql({ params: [[DatabaseExtension.VECTORS]] }) + @GenerateSql({ params: [[DatabaseExtension.Vectors]] }) async getExtensionVersions(extensions: readonly DatabaseExtension[]): Promise { const { rows } = await sql` SELECT name, default_version as "availableVersion", installed_version as "installedVersion" @@ -89,13 +89,13 @@ export class DatabaseRepository { getExtensionVersionRange(extension: VectorExtension): string { switch (extension) { - case DatabaseExtension.VECTORCHORD: { + case DatabaseExtension.VectorChord: { return VECTORCHORD_VERSION_RANGE; } - case DatabaseExtension.VECTORS: { + case DatabaseExtension.Vectors: { return VECTORS_VERSION_RANGE; } - case DatabaseExtension.VECTOR: { + case DatabaseExtension.Vector: { return VECTOR_VERSION_RANGE; } default: { @@ -117,7 +117,7 @@ export class DatabaseRepository { async createExtension(extension: DatabaseExtension): Promise { this.logger.log(`Creating ${EXTENSION_NAMES[extension]} extension`); await sql`CREATE EXTENSION IF NOT EXISTS ${sql.raw(extension)} CASCADE`.execute(this.db); - if (extension === DatabaseExtension.VECTORCHORD) { + if (extension === DatabaseExtension.VectorChord) { const dbName = sql.id(await this.getDatabaseName()); await sql`ALTER DATABASE ${dbName} SET vchordrq.probes = 1`.execute(this.db); await sql`SET vchordrq.probes = 1`.execute(this.db); @@ -147,8 +147,8 @@ export class DatabaseRepository { } await Promise.all([ - this.db.schema.dropIndex(VectorIndex.CLIP).ifExists().execute(), - this.db.schema.dropIndex(VectorIndex.FACE).ifExists().execute(), + this.db.schema.dropIndex(VectorIndex.Clip).ifExists().execute(), + this.db.schema.dropIndex(VectorIndex.Face).ifExists().execute(), ]); await this.db.transaction().execute(async (tx) => { @@ -156,14 +156,14 @@ export class DatabaseRepository { await sql`ALTER EXTENSION ${sql.raw(extension)} UPDATE TO ${sql.lit(targetVersion)}`.execute(tx); - if (extension === DatabaseExtension.VECTORS && (diff === 'major' || diff === 'minor')) { + if (extension === DatabaseExtension.Vectors && (diff === 'major' || diff === 'minor')) { await sql`SELECT pgvectors_upgrade()`.execute(tx); restartRequired = true; } }); if (!restartRequired) { - await Promise.all([this.reindexVectors(VectorIndex.CLIP), this.reindexVectors(VectorIndex.FACE)]); + await Promise.all([this.reindexVectors(VectorIndex.Clip), this.reindexVectors(VectorIndex.Face)]); } return { restartRequired }; @@ -171,7 +171,7 @@ export class DatabaseRepository { async prewarm(index: VectorIndex): Promise { const vectorExtension = await getVectorExtension(this.db); - if (vectorExtension !== DatabaseExtension.VECTORCHORD) { + if (vectorExtension !== DatabaseExtension.VectorChord) { return; } this.logger.debug(`Prewarming ${index}`); @@ -196,19 +196,19 @@ export class DatabaseRepository { } switch (vectorExtension) { - case DatabaseExtension.VECTOR: { + case DatabaseExtension.Vector: { if (!row.indexdef.toLowerCase().includes('using hnsw')) { promises.push(this.reindexVectors(indexName)); } break; } - case DatabaseExtension.VECTORS: { + case DatabaseExtension.Vectors: { if (!row.indexdef.toLowerCase().includes('using vectors')) { promises.push(this.reindexVectors(indexName)); } break; } - case DatabaseExtension.VECTORCHORD: { + case DatabaseExtension.VectorChord: { const matches = row.indexdef.match(/(?<=lists = \[)\d+/g); const lists = matches && matches.length > 0 ? Number(matches[0]) : 1; promises.push( @@ -264,7 +264,7 @@ export class DatabaseRepository { await sql`ALTER TABLE ${sql.raw(table)} ADD COLUMN embedding real[] NOT NULL`.execute(tx); } await sql`ALTER TABLE ${sql.raw(table)} ALTER COLUMN embedding SET DATA TYPE real[]`.execute(tx); - const schema = vectorExtension === DatabaseExtension.VECTORS ? 'vectors.' : ''; + const schema = vectorExtension === DatabaseExtension.Vectors ? 'vectors.' : ''; await sql` ALTER TABLE ${sql.raw(table)} ALTER COLUMN embedding @@ -329,11 +329,11 @@ export class DatabaseRepository { .alterColumn('embedding', (col) => col.setDataType(sql.raw(`vector(${dimSize})`))) .execute(); await sql - .raw(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: VectorIndex.CLIP })) + .raw(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: VectorIndex.Clip })) .execute(trx); await trx.schema.alterTable('smart_search').dropConstraint('dim_size_constraint').ifExists().execute(); }); - probes[VectorIndex.CLIP] = 1; + probes[VectorIndex.Clip] = 1; await sql`vacuum analyze ${sql.table('smart_search')}`.execute(this.db); } diff --git a/server/src/repositories/download.repository.ts b/server/src/repositories/download.repository.ts index 5645ca1217..ecc1e4d3ab 100644 --- a/server/src/repositories/download.repository.ts +++ b/server/src/repositories/download.repository.ts @@ -34,7 +34,7 @@ export class DownloadRepository { downloadUserId(userId: string) { return builder(this.db) .where('asset.ownerId', '=', userId) - .where('asset.visibility', '!=', AssetVisibility.HIDDEN) + .where('asset.visibility', '!=', AssetVisibility.Hidden) .stream(); } } diff --git a/server/src/repositories/duplicate.repository.ts b/server/src/repositories/duplicate.repository.ts index ac9e5798e5..140c42a643 100644 --- a/server/src/repositories/duplicate.repository.ts +++ b/server/src/repositories/duplicate.repository.ts @@ -109,14 +109,14 @@ export class DuplicateRepository { assetId: DummyValue.UUID, embedding: DummyValue.VECTOR, maxDistance: 0.6, - type: AssetType.IMAGE, + type: AssetType.Image, userIds: [DummyValue.UUID], }, ], }) search({ assetId, embedding, maxDistance, type, userIds }: DuplicateSearch) { return this.db.transaction().execute(async (trx) => { - await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.CLIP])}`.execute(trx); + await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.Clip])}`.execute(trx); return await trx .with('cte', (qb) => qb diff --git a/server/src/repositories/event.repository.ts b/server/src/repositories/event.repository.ts index 04494d5547..c1b26d5dde 100644 --- a/server/src/repositories/event.repository.ts +++ b/server/src/repositories/event.repository.ts @@ -166,7 +166,7 @@ export class EventRepository implements OnGatewayConnection, OnGatewayDisconnect continue; } - const event = reflector.get(MetadataKey.EVENT_CONFIG, handler); + const event = reflector.get(MetadataKey.EventConfig, handler); if (!event) { continue; } diff --git a/server/src/repositories/job.repository.ts b/server/src/repositories/job.repository.ts index 27c623cc89..3550529bfc 100644 --- a/server/src/repositories/job.repository.ts +++ b/server/src/repositories/job.repository.ts @@ -41,7 +41,7 @@ export class JobRepository { const instance = this.moduleRef.get(Service); for (const methodName of getMethodNames(instance)) { const handler = instance[methodName]; - const config = reflector.get(MetadataKey.JOB_CONFIG, handler); + const config = reflector.get(MetadataKey.JobConfig, handler); if (!config) { continue; } @@ -99,7 +99,7 @@ export class JobRepository { const item = this.handlers[name as JobName]; if (!item) { this.logger.warn(`Skipping unknown job: "${name}"`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } return item.handler(data); @@ -205,20 +205,20 @@ export class JobRepository { private getJobOptions(item: JobItem): JobsOptions | null { switch (item.name) { - case JobName.NOTIFY_ALBUM_UPDATE: { + case JobName.NotifyAlbumUpdate: { return { jobId: `${item.data.id}/${item.data.recipientId}`, delay: item.data?.delay, }; } - case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: { + case JobName.StorageTemplateMigrationSingle: { return { jobId: item.data.id }; } - case JobName.GENERATE_PERSON_THUMBNAIL: { + case JobName.GeneratePersonThumbnail: { return { priority: 1 }; } - case JobName.QUEUE_FACIAL_RECOGNITION: { - return { jobId: JobName.QUEUE_FACIAL_RECOGNITION }; + case JobName.QueueFacialRecognition: { + return { jobId: JobName.QueueFacialRecognition }; } default: { return null; diff --git a/server/src/repositories/library.repository.ts b/server/src/repositories/library.repository.ts index 9f26191cfb..68102ab765 100644 --- a/server/src/repositories/library.repository.ts +++ b/server/src/repositories/library.repository.ts @@ -79,7 +79,7 @@ export class LibraryRepository { eb.fn .countAll() .filterWhere((eb) => - eb.and([eb('asset.type', '=', AssetType.IMAGE), eb('asset.visibility', '!=', AssetVisibility.HIDDEN)]), + eb.and([eb('asset.type', '=', AssetType.Image), eb('asset.visibility', '!=', AssetVisibility.Hidden)]), ) .as('photos'), ) @@ -87,7 +87,7 @@ export class LibraryRepository { eb.fn .countAll() .filterWhere((eb) => - eb.and([eb('asset.type', '=', AssetType.VIDEO), eb('asset.visibility', '!=', AssetVisibility.HIDDEN)]), + eb.and([eb('asset.type', '=', AssetType.Video), eb('asset.visibility', '!=', AssetVisibility.Hidden)]), ) .as('videos'), ) diff --git a/server/src/repositories/logging.repository.spec.ts b/server/src/repositories/logging.repository.spec.ts index 393eeb9496..99bb1dbf18 100644 --- a/server/src/repositories/logging.repository.spec.ts +++ b/server/src/repositories/logging.repository.spec.ts @@ -22,7 +22,7 @@ describe(LoggingRepository.name, () => { describe('formatContext', () => { it('should use colors', () => { sut = new LoggingRepository(clsMock, configMock); - sut.setAppName(ImmichWorker.API); + sut.setAppName(ImmichWorker.Api); const logger = new MyConsoleLogger(clsMock, { color: true }); @@ -31,7 +31,7 @@ describe(LoggingRepository.name, () => { it('should not use colors when color is false', () => { sut = new LoggingRepository(clsMock, configMock); - sut.setAppName(ImmichWorker.API); + sut.setAppName(ImmichWorker.Api); const logger = new MyConsoleLogger(clsMock, { color: false }); diff --git a/server/src/repositories/logging.repository.ts b/server/src/repositories/logging.repository.ts index 2ac3715a50..1833168f3e 100644 --- a/server/src/repositories/logging.repository.ts +++ b/server/src/repositories/logging.repository.ts @@ -8,7 +8,7 @@ import { ConfigRepository } from 'src/repositories/config.repository'; type LogDetails = any; type LogFunction = () => string; -const LOG_LEVELS = [LogLevel.VERBOSE, LogLevel.DEBUG, LogLevel.LOG, LogLevel.WARN, LogLevel.ERROR, LogLevel.FATAL]; +const LOG_LEVELS = [LogLevel.Verbose, LogLevel.Debug, LogLevel.Log, LogLevel.Warn, LogLevel.Error, LogLevel.Fatal]; enum LogColor { RED = 31, @@ -20,7 +20,7 @@ enum LogColor { } let appName: string | undefined; -let logLevels: LogLevel[] = [LogLevel.LOG, LogLevel.WARN, LogLevel.ERROR, LogLevel.FATAL]; +let logLevels: LogLevel[] = [LogLevel.Log, LogLevel.Warn, LogLevel.Error, LogLevel.Fatal]; export class MyConsoleLogger extends ConsoleLogger { private isColorEnabled: boolean; @@ -106,35 +106,35 @@ export class LoggingRepository { } verbose(message: string, ...details: LogDetails) { - this.handleMessage(LogLevel.VERBOSE, message, details); + this.handleMessage(LogLevel.Verbose, message, details); } verboseFn(message: LogFunction, ...details: LogDetails) { - this.handleFunction(LogLevel.VERBOSE, message, details); + this.handleFunction(LogLevel.Verbose, message, details); } debug(message: string, ...details: LogDetails) { - this.handleMessage(LogLevel.DEBUG, message, details); + this.handleMessage(LogLevel.Debug, message, details); } debugFn(message: LogFunction, ...details: LogDetails) { - this.handleFunction(LogLevel.DEBUG, message, details); + this.handleFunction(LogLevel.Debug, message, details); } log(message: string, ...details: LogDetails) { - this.handleMessage(LogLevel.LOG, message, details); + this.handleMessage(LogLevel.Log, message, details); } warn(message: string, ...details: LogDetails) { - this.handleMessage(LogLevel.WARN, message, details); + this.handleMessage(LogLevel.Warn, message, details); } error(message: string | Error, ...details: LogDetails) { - this.handleMessage(LogLevel.ERROR, message, details); + this.handleMessage(LogLevel.Error, message, details); } fatal(message: string, ...details: LogDetails) { - this.handleMessage(LogLevel.FATAL, message, details); + this.handleMessage(LogLevel.Fatal, message, details); } private handleFunction(level: LogLevel, message: LogFunction, details: LogDetails[]) { @@ -145,32 +145,32 @@ export class LoggingRepository { private handleMessage(level: LogLevel, message: string | Error, details: LogDetails[]) { switch (level) { - case LogLevel.VERBOSE: { + case LogLevel.Verbose: { this.logger.verbose(message, ...details); break; } - case LogLevel.DEBUG: { + case LogLevel.Debug: { this.logger.debug(message, ...details); break; } - case LogLevel.LOG: { + case LogLevel.Log: { this.logger.log(message, ...details); break; } - case LogLevel.WARN: { + case LogLevel.Warn: { this.logger.warn(message, ...details); break; } - case LogLevel.ERROR: { + case LogLevel.Error: { this.logger.error(message, ...details); break; } - case LogLevel.FATAL: { + case LogLevel.Fatal: { this.logger.fatal(message, ...details); break; } diff --git a/server/src/repositories/map.repository.ts b/server/src/repositories/map.repository.ts index 64c8a6229d..d1f60791c3 100644 --- a/server/src/repositories/map.repository.ts +++ b/server/src/repositories/map.repository.ts @@ -61,14 +61,14 @@ export class MapRepository { const geodataDate = await readFile(resourcePaths.geodata.dateFile, 'utf8'); // TODO move to service init - const geocodingMetadata = await this.metadataRepository.get(SystemMetadataKey.REVERSE_GEOCODING_STATE); + const geocodingMetadata = await this.metadataRepository.get(SystemMetadataKey.ReverseGeocodingState); if (geocodingMetadata?.lastUpdate === geodataDate) { return; } await Promise.all([this.importGeodata(), this.importNaturalEarthCountries()]); - await this.metadataRepository.set(SystemMetadataKey.REVERSE_GEOCODING_STATE, { + await this.metadataRepository.set(SystemMetadataKey.ReverseGeocodingState, { lastUpdate: geodataDate, lastImportFileName: citiesFile, }); @@ -102,13 +102,13 @@ export class MapRepository { .$if(isArchived === true, (qb) => qb.where((eb) => eb.or([ - eb('asset.visibility', '=', AssetVisibility.TIMELINE), - eb('asset.visibility', '=', AssetVisibility.ARCHIVE), + eb('asset.visibility', '=', AssetVisibility.Timeline), + eb('asset.visibility', '=', AssetVisibility.Archive), ]), ), ) .$if(isArchived === false || isArchived === undefined, (qb) => - qb.where('asset.visibility', '=', AssetVisibility.TIMELINE), + qb.where('asset.visibility', '=', AssetVisibility.Timeline), ) .$if(isFavorite !== undefined, (q) => q.where('isFavorite', '=', isFavorite!)) .$if(fileCreatedAfter !== undefined, (q) => q.where('fileCreatedAt', '>=', fileCreatedAfter!)) diff --git a/server/src/repositories/media.repository.ts b/server/src/repositories/media.repository.ts index 33cf4e3e03..6266acf0ed 100644 --- a/server/src/repositories/media.repository.ts +++ b/server/src/repositories/media.repository.ts @@ -55,28 +55,28 @@ export class MediaRepository { async extract(input: string): Promise { try { const buffer = await exiftool.extractBinaryTagToBuffer('JpgFromRaw2', input); - return { buffer, format: RawExtractedFormat.JPEG }; + return { buffer, format: RawExtractedFormat.Jpeg }; } catch (error: any) { this.logger.debug('Could not extract JpgFromRaw2 buffer from image, trying JPEG from RAW next', error.message); } try { const buffer = await exiftool.extractBinaryTagToBuffer('JpgFromRaw', input); - return { buffer, format: RawExtractedFormat.JPEG }; + return { buffer, format: RawExtractedFormat.Jpeg }; } catch (error: any) { this.logger.debug('Could not extract JPEG buffer from image, trying PreviewJXL next', error.message); } try { const buffer = await exiftool.extractBinaryTagToBuffer('PreviewJXL', input); - return { buffer, format: RawExtractedFormat.JXL }; + return { buffer, format: RawExtractedFormat.Jxl }; } catch (error: any) { this.logger.debug('Could not extract PreviewJXL buffer from image, trying PreviewImage next', error.message); } try { const buffer = await exiftool.extractBinaryTagToBuffer('PreviewImage', input); - return { buffer, format: RawExtractedFormat.JPEG }; + return { buffer, format: RawExtractedFormat.Jpeg }; } catch (error: any) { this.logger.debug('Could not extract preview buffer from image', error.message); return null; @@ -142,7 +142,7 @@ export class MediaRepository { limitInputPixels: false, raw: options.raw, }) - .pipelineColorspace(options.colorspace === Colorspace.SRGB ? 'srgb' : 'rgb16') + .pipelineColorspace(options.colorspace === Colorspace.Srgb ? 'srgb' : 'rgb16') .withIccProfile(options.colorspace); if (!options.raw) { @@ -267,7 +267,7 @@ export class MediaRepository { const { frameCount, percentInterval } = options.progress; const frameInterval = Math.ceil(frameCount / (100 / percentInterval)); - if (this.logger.isLevelEnabled(LogLevel.DEBUG) && frameCount && frameInterval) { + if (this.logger.isLevelEnabled(LogLevel.Debug) && frameCount && frameInterval) { let lastProgressFrame: number = 0; ffmpegCall.on('progress', (progress: ProgressEvent) => { if (progress.frames - lastProgressFrame < frameInterval) { diff --git a/server/src/repositories/memory.repository.ts b/server/src/repositories/memory.repository.ts index 7cf03508be..65b4cb3df7 100644 --- a/server/src/repositories/memory.repository.ts +++ b/server/src/repositories/memory.repository.ts @@ -19,7 +19,7 @@ export class MemoryRepository implements IBulkAsset { .deleteFrom('memory_asset') .using('asset') .whereRef('memory_asset.assetsId', '=', 'asset.id') - .where('asset.visibility', '!=', AssetVisibility.TIMELINE) + .where('asset.visibility', '!=', AssetVisibility.Timeline) .execute(); return this.db @@ -67,7 +67,7 @@ export class MemoryRepository implements IBulkAsset { .innerJoin('memory_asset', 'asset.id', 'memory_asset.assetsId') .whereRef('memory_asset.memoriesId', '=', 'memory.id') .orderBy('asset.fileCreatedAt', 'asc') - .where('asset.visibility', '=', sql.lit(AssetVisibility.TIMELINE)) + .where('asset.visibility', '=', sql.lit(AssetVisibility.Timeline)) .where('asset.deletedAt', 'is', null), ).as('assets'), ) @@ -158,7 +158,7 @@ export class MemoryRepository implements IBulkAsset { .innerJoin('memory_asset', 'asset.id', 'memory_asset.assetsId') .whereRef('memory_asset.memoriesId', '=', 'memory.id') .orderBy('asset.fileCreatedAt', 'asc') - .where('asset.visibility', '=', sql.lit(AssetVisibility.TIMELINE)) + .where('asset.visibility', '=', sql.lit(AssetVisibility.Timeline)) .where('asset.deletedAt', 'is', null), ).as('assets'), ) diff --git a/server/src/repositories/move.repository.ts b/server/src/repositories/move.repository.ts index e416a65249..2ea69eba27 100644 --- a/server/src/repositories/move.repository.ts +++ b/server/src/repositories/move.repository.ts @@ -48,7 +48,7 @@ export class MoveRepository { eb.selectFrom('asset').select('id').whereRef('asset.id', '=', 'move_history.entityId'), ), ) - .where('move_history.pathType', '=', sql.lit(AssetPathType.ORIGINAL)) + .where('move_history.pathType', '=', sql.lit(AssetPathType.Original)) .execute(); } @@ -56,7 +56,7 @@ export class MoveRepository { async cleanMoveHistorySingle(assetId: string): Promise { await this.db .deleteFrom('move_history') - .where('move_history.pathType', '=', sql.lit(AssetPathType.ORIGINAL)) + .where('move_history.pathType', '=', sql.lit(AssetPathType.Original)) .where('entityId', '=', assetId) .execute(); } diff --git a/server/src/repositories/oauth.repository.ts b/server/src/repositories/oauth.repository.ts index 357b52a77a..9a436e4b9a 100644 --- a/server/src/repositories/oauth.repository.ts +++ b/server/src/repositories/oauth.repository.ts @@ -138,11 +138,11 @@ export class OAuthRepository { } switch (tokenEndpointAuthMethod) { - case OAuthTokenEndpointAuthMethod.CLIENT_SECRET_POST: { + case OAuthTokenEndpointAuthMethod.ClientSecretPost: { return ClientSecretPost(clientSecret); } - case OAuthTokenEndpointAuthMethod.CLIENT_SECRET_BASIC: { + case OAuthTokenEndpointAuthMethod.ClientSecretBasic: { return ClientSecretBasic(clientSecret); } diff --git a/server/src/repositories/person.repository.ts b/server/src/repositories/person.repository.ts index 1885a196ff..5b7d1d3700 100644 --- a/server/src/repositories/person.repository.ts +++ b/server/src/repositories/person.repository.ts @@ -151,7 +151,7 @@ export class PersonRepository { .innerJoin('asset', (join) => join .onRef('asset_face.assetId', '=', 'asset.id') - .on('asset.visibility', '=', sql.lit(AssetVisibility.TIMELINE)) + .on('asset.visibility', '=', sql.lit(AssetVisibility.Timeline)) .on('asset.deletedAt', 'is', null), ) .where('person.ownerId', '=', userId) @@ -276,7 +276,7 @@ export class PersonRepository { .selectFrom('asset_file') .select('asset_file.path') .whereRef('asset_file.assetId', '=', 'asset.id') - .where('asset_file.type', '=', sql.lit(AssetFileType.PREVIEW)) + .where('asset_file.type', '=', sql.lit(AssetFileType.Preview)) .as('previewPath'), ) .where('person.id', '=', id) @@ -341,7 +341,7 @@ export class PersonRepository { join .onRef('asset.id', '=', 'asset_face.assetId') .on('asset_face.personId', '=', personId) - .on('asset.visibility', '=', sql.lit(AssetVisibility.TIMELINE)) + .on('asset.visibility', '=', sql.lit(AssetVisibility.Timeline)) .on('asset.deletedAt', 'is', null), ) .select((eb) => eb.fn.count(eb.fn('distinct', ['asset.id'])).as('count')) @@ -369,7 +369,7 @@ export class PersonRepository { eb .selectFrom('asset') .whereRef('asset.id', '=', 'asset_face.assetId') - .where('asset.visibility', '=', sql.lit(AssetVisibility.TIMELINE)) + .where('asset.visibility', '=', sql.lit(AssetVisibility.Timeline)) .where('asset.deletedAt', 'is', null), ), ), diff --git a/server/src/repositories/search.repository.ts b/server/src/repositories/search.repository.ts index fe8ad563bb..61e0cc1e29 100644 --- a/server/src/repositories/search.repository.ts +++ b/server/src/repositories/search.repository.ts @@ -256,7 +256,7 @@ export class SearchRepository { } return this.db.transaction().execute(async (trx) => { - await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.CLIP])}`.execute(trx); + await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.Clip])}`.execute(trx); const items = await searchAssetBuilder(trx, options) .selectAll('asset') .innerJoin('smart_search', 'asset.id', 'smart_search.assetId') @@ -284,7 +284,7 @@ export class SearchRepository { } return this.db.transaction().execute(async (trx) => { - await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.FACE])}`.execute(trx); + await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.Face])}`.execute(trx); return await trx .with('cte', (qb) => qb @@ -351,8 +351,8 @@ export class SearchRepository { .select(['city', 'assetId']) .innerJoin('asset', 'asset.id', 'asset_exif.assetId') .where('asset.ownerId', '=', anyUuid(userIds)) - .where('asset.visibility', '=', AssetVisibility.TIMELINE) - .where('asset.type', '=', AssetType.IMAGE) + .where('asset.visibility', '=', AssetVisibility.Timeline) + .where('asset.type', '=', AssetType.Image) .where('asset.deletedAt', 'is', null) .orderBy('city') .limit(1); @@ -367,8 +367,8 @@ export class SearchRepository { .select(['city', 'assetId']) .innerJoin('asset', 'asset.id', 'asset_exif.assetId') .where('asset.ownerId', '=', anyUuid(userIds)) - .where('asset.visibility', '=', AssetVisibility.TIMELINE) - .where('asset.type', '=', AssetType.IMAGE) + .where('asset.visibility', '=', AssetVisibility.Timeline) + .where('asset.type', '=', AssetType.Image) .where('asset.deletedAt', 'is', null) .whereRef('asset_exif.city', '>', 'cte.city') .orderBy('city') @@ -450,7 +450,7 @@ export class SearchRepository { .distinctOn(field) .innerJoin('asset', 'asset.id', 'asset_exif.assetId') .where('ownerId', '=', anyUuid(userIds)) - .where('visibility', '=', AssetVisibility.TIMELINE) + .where('visibility', '=', AssetVisibility.Timeline) .where('deletedAt', 'is', null) .where(field, 'is not', null); } diff --git a/server/src/repositories/shared-link.repository.ts b/server/src/repositories/shared-link.repository.ts index d61333fcd6..d5fb3be47d 100644 --- a/server/src/repositories/shared-link.repository.ts +++ b/server/src/repositories/shared-link.repository.ts @@ -103,7 +103,7 @@ export class SharedLinkRepository { .select((eb) => eb.fn.toJson('album').$castTo().as('album')) .where('shared_link.id', '=', id) .where('shared_link.userId', '=', userId) - .where((eb) => eb.or([eb('shared_link.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)])) + .where((eb) => eb.or([eb('shared_link.type', '=', SharedLinkType.Individual), eb('album.id', 'is not', null)])) .orderBy('shared_link.createdAt', 'desc') .executeTakeFirst(); } @@ -165,7 +165,7 @@ export class SharedLinkRepository { (join) => join.onTrue(), ) .select((eb) => eb.fn.toJson('album').$castTo().as('album')) - .where((eb) => eb.or([eb('shared_link.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)])) + .where((eb) => eb.or([eb('shared_link.type', '=', SharedLinkType.Individual), eb('album.id', 'is not', null)])) .$if(!!albumId, (eb) => eb.where('shared_link.albumId', '=', albumId!)) .orderBy('shared_link.createdAt', 'desc') .distinctOn(['shared_link.createdAt']) @@ -185,7 +185,7 @@ export class SharedLinkRepository { eb.selectFrom('user').select(columns.authUser).whereRef('user.id', '=', 'shared_link.userId'), ).as('user'), ]) - .where((eb) => eb.or([eb('shared_link.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)])) + .where((eb) => eb.or([eb('shared_link.type', '=', SharedLinkType.Individual), eb('album.id', 'is not', null)])) .executeTakeFirst(); } diff --git a/server/src/repositories/telemetry.repository.ts b/server/src/repositories/telemetry.repository.ts index fc680ddcc5..5fbbb76cf7 100644 --- a/server/src/repositories/telemetry.repository.ts +++ b/server/src/repositories/telemetry.repository.ts @@ -112,21 +112,21 @@ export class TelemetryRepository { const { telemetry } = this.configRepository.getEnv(); const { metrics } = telemetry; - this.api = new MetricGroupRepository(metricService).configure({ enabled: metrics.has(ImmichTelemetry.API) }); - this.host = new MetricGroupRepository(metricService).configure({ enabled: metrics.has(ImmichTelemetry.HOST) }); - this.jobs = new MetricGroupRepository(metricService).configure({ enabled: metrics.has(ImmichTelemetry.JOB) }); - this.repo = new MetricGroupRepository(metricService).configure({ enabled: metrics.has(ImmichTelemetry.REPO) }); + this.api = new MetricGroupRepository(metricService).configure({ enabled: metrics.has(ImmichTelemetry.Api) }); + this.host = new MetricGroupRepository(metricService).configure({ enabled: metrics.has(ImmichTelemetry.Host) }); + this.jobs = new MetricGroupRepository(metricService).configure({ enabled: metrics.has(ImmichTelemetry.Job) }); + this.repo = new MetricGroupRepository(metricService).configure({ enabled: metrics.has(ImmichTelemetry.Repo) }); } setup({ repositories }: { repositories: ClassConstructor[] }) { const { telemetry } = this.configRepository.getEnv(); const { metrics } = telemetry; - if (!metrics.has(ImmichTelemetry.REPO)) { + if (!metrics.has(ImmichTelemetry.Repo)) { return; } for (const Repository of repositories) { - const isEnabled = this.reflect.get(MetadataKey.TELEMETRY_ENABLED, Repository) ?? true; + const isEnabled = this.reflect.get(MetadataKey.TelemetryEnabled, Repository) ?? true; if (!isEnabled) { this.logger.debug(`Telemetry disabled for ${Repository.name}`); continue; diff --git a/server/src/repositories/trash.repository.ts b/server/src/repositories/trash.repository.ts index ee6fe3ace1..f6f13188d4 100644 --- a/server/src/repositories/trash.repository.ts +++ b/server/src/repositories/trash.repository.ts @@ -8,7 +8,7 @@ export class TrashRepository { constructor(@InjectKysely() private db: Kysely) {} getDeletedIds(): AsyncIterableIterator<{ id: string }> { - return this.db.selectFrom('asset').select(['id']).where('status', '=', AssetStatus.DELETED).stream(); + return this.db.selectFrom('asset').select(['id']).where('status', '=', AssetStatus.Deleted).stream(); } @GenerateSql({ params: [DummyValue.UUID] }) @@ -16,8 +16,8 @@ export class TrashRepository { const { numUpdatedRows } = await this.db .updateTable('asset') .where('ownerId', '=', userId) - .where('status', '=', AssetStatus.TRASHED) - .set({ status: AssetStatus.ACTIVE, deletedAt: null }) + .where('status', '=', AssetStatus.Trashed) + .set({ status: AssetStatus.Active, deletedAt: null }) .executeTakeFirst(); return Number(numUpdatedRows); @@ -28,8 +28,8 @@ export class TrashRepository { const { numUpdatedRows } = await this.db .updateTable('asset') .where('ownerId', '=', userId) - .where('status', '=', AssetStatus.TRASHED) - .set({ status: AssetStatus.DELETED }) + .where('status', '=', AssetStatus.Trashed) + .set({ status: AssetStatus.Deleted }) .executeTakeFirst(); return Number(numUpdatedRows); @@ -43,9 +43,9 @@ export class TrashRepository { const { numUpdatedRows } = await this.db .updateTable('asset') - .where('status', '=', AssetStatus.TRASHED) + .where('status', '=', AssetStatus.Trashed) .where('id', 'in', ids) - .set({ status: AssetStatus.ACTIVE, deletedAt: null }) + .set({ status: AssetStatus.Active, deletedAt: null }) .executeTakeFirst(); return Number(numUpdatedRows); diff --git a/server/src/repositories/user.repository.ts b/server/src/repositories/user.repository.ts index f809280d86..715aa2dc32 100644 --- a/server/src/repositories/user.repository.ts +++ b/server/src/repositories/user.repository.ts @@ -187,7 +187,7 @@ export class UserRepository { restore(id: string) { return this.db .updateTable('user') - .set({ status: UserStatus.ACTIVE, deletedAt: null }) + .set({ status: UserStatus.Active, deletedAt: null }) .where('user.id', '=', asUuid(id)) .returning(columns.userAdmin) .returning(withMetadata) @@ -229,8 +229,8 @@ export class UserRepository { .countAll() .filterWhere((eb) => eb.and([ - eb('asset.type', '=', sql.lit(AssetType.IMAGE)), - eb('asset.visibility', '!=', sql.lit(AssetVisibility.HIDDEN)), + eb('asset.type', '=', sql.lit(AssetType.Image)), + eb('asset.visibility', '!=', sql.lit(AssetVisibility.Hidden)), ]), ) .as('photos'), @@ -238,8 +238,8 @@ export class UserRepository { .countAll() .filterWhere((eb) => eb.and([ - eb('asset.type', '=', sql.lit(AssetType.VIDEO)), - eb('asset.visibility', '!=', sql.lit(AssetVisibility.HIDDEN)), + eb('asset.type', '=', sql.lit(AssetType.Video)), + eb('asset.visibility', '!=', sql.lit(AssetVisibility.Hidden)), ]), ) .as('videos'), @@ -254,7 +254,7 @@ export class UserRepository { eb.fn .sum('asset_exif.fileSizeInByte') .filterWhere((eb) => - eb.and([eb('asset.libraryId', 'is', null), eb('asset.type', '=', sql.lit(AssetType.IMAGE))]), + eb.and([eb('asset.libraryId', 'is', null), eb('asset.type', '=', sql.lit(AssetType.Image))]), ), eb.lit(0), ) @@ -264,7 +264,7 @@ export class UserRepository { eb.fn .sum('asset_exif.fileSizeInByte') .filterWhere((eb) => - eb.and([eb('asset.libraryId', 'is', null), eb('asset.type', '=', sql.lit(AssetType.VIDEO))]), + eb.and([eb('asset.libraryId', 'is', null), eb('asset.type', '=', sql.lit(AssetType.Video))]), ), eb.lit(0), ) diff --git a/server/src/repositories/view-repository.ts b/server/src/repositories/view-repository.ts index 0fd74d299f..93c1280191 100644 --- a/server/src/repositories/view-repository.ts +++ b/server/src/repositories/view-repository.ts @@ -15,7 +15,7 @@ export class ViewRepository { .select((eb) => eb.fn('substring', ['asset.originalPath', eb.val('^(.*/)[^/]*$')]).as('directoryPath')) .distinct() .where('ownerId', '=', asUuid(userId)) - .where('visibility', '=', AssetVisibility.TIMELINE) + .where('visibility', '=', AssetVisibility.Timeline) .where('deletedAt', 'is', null) .where('fileCreatedAt', 'is not', null) .where('fileModifiedAt', 'is not', null) @@ -34,7 +34,7 @@ export class ViewRepository { .selectAll('asset') .$call(withExif) .where('ownerId', '=', asUuid(userId)) - .where('visibility', '=', AssetVisibility.TIMELINE) + .where('visibility', '=', AssetVisibility.Timeline) .where('deletedAt', 'is', null) .where('fileCreatedAt', 'is not', null) .where('fileModifiedAt', 'is not', null) diff --git a/server/src/schema/migrations/1744910873969-InitialMigration.ts b/server/src/schema/migrations/1744910873969-InitialMigration.ts index 63625a69ad..53a55d860e 100644 --- a/server/src/schema/migrations/1744910873969-InitialMigration.ts +++ b/server/src/schema/migrations/1744910873969-InitialMigration.ts @@ -16,9 +16,7 @@ export async function up(db: Kysely): Promise { rows: [lastMigration], } = await lastMigrationSql.execute(db); if (lastMigration?.name !== 'AddMissingIndex1744910873956') { - throw new Error( - 'Invalid upgrade path. For more information, see https://immich.app/errors#typeorm-upgrade', - ); + throw new Error('Invalid upgrade path. For more information, see https://immich.app/errors#typeorm-upgrade'); } logger.log('Database has up to date TypeORM migrations, skipping initial Kysely migration'); return; @@ -108,152 +106,344 @@ export async function up(db: Kysely): Promise { RETURN NULL; END; $$;`.execute(db); - if (vectorExtension === DatabaseExtension.VECTORS) { + if (vectorExtension === DatabaseExtension.Vectors) { await sql`SET search_path TO "$user", public, vectors`.execute(db); } await sql`CREATE TYPE "assets_status_enum" AS ENUM ('active','trashed','deleted');`.execute(db); await sql`CREATE TYPE "sourcetype" AS ENUM ('machine-learning','exif','manual');`.execute(db); - await sql`CREATE TABLE "users" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "email" character varying NOT NULL, "password" character varying NOT NULL DEFAULT '', "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "profileImagePath" character varying NOT NULL DEFAULT '', "isAdmin" boolean NOT NULL DEFAULT false, "shouldChangePassword" boolean NOT NULL DEFAULT true, "deletedAt" timestamp with time zone, "oauthId" character varying NOT NULL DEFAULT '', "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "storageLabel" character varying, "name" character varying NOT NULL DEFAULT '', "quotaSizeInBytes" bigint, "quotaUsageInBytes" bigint NOT NULL DEFAULT 0, "status" character varying NOT NULL DEFAULT 'active', "profileChangedAt" timestamp with time zone NOT NULL DEFAULT now(), "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); - await sql`CREATE TABLE "libraries" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "name" character varying NOT NULL, "ownerId" uuid NOT NULL, "importPaths" text[] NOT NULL, "exclusionPatterns" text[] NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "refreshedAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); - await sql`CREATE TABLE "asset_stack" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "primaryAssetId" uuid NOT NULL, "ownerId" uuid NOT NULL);`.execute(db); - await sql`CREATE TABLE "assets" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "deviceAssetId" character varying NOT NULL, "ownerId" uuid NOT NULL, "deviceId" character varying NOT NULL, "type" character varying NOT NULL, "originalPath" character varying NOT NULL, "fileCreatedAt" timestamp with time zone NOT NULL, "fileModifiedAt" timestamp with time zone NOT NULL, "isFavorite" boolean NOT NULL DEFAULT false, "duration" character varying, "encodedVideoPath" character varying DEFAULT '', "checksum" bytea NOT NULL, "isVisible" boolean NOT NULL DEFAULT true, "livePhotoVideoId" uuid, "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "isArchived" boolean NOT NULL DEFAULT false, "originalFileName" character varying NOT NULL, "sidecarPath" character varying, "thumbhash" bytea, "isOffline" boolean NOT NULL DEFAULT false, "libraryId" uuid, "isExternal" boolean NOT NULL DEFAULT false, "deletedAt" timestamp with time zone, "localDateTime" timestamp with time zone NOT NULL, "stackId" uuid, "duplicateId" uuid, "status" assets_status_enum NOT NULL DEFAULT 'active', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); - await sql`CREATE TABLE "albums" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "ownerId" uuid NOT NULL, "albumName" character varying NOT NULL DEFAULT 'Untitled Album', "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "albumThumbnailAssetId" uuid, "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "description" text NOT NULL DEFAULT '', "deletedAt" timestamp with time zone, "isActivityEnabled" boolean NOT NULL DEFAULT true, "order" character varying NOT NULL DEFAULT 'desc', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); + await sql`CREATE TABLE "users" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "email" character varying NOT NULL, "password" character varying NOT NULL DEFAULT '', "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "profileImagePath" character varying NOT NULL DEFAULT '', "isAdmin" boolean NOT NULL DEFAULT false, "shouldChangePassword" boolean NOT NULL DEFAULT true, "deletedAt" timestamp with time zone, "oauthId" character varying NOT NULL DEFAULT '', "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "storageLabel" character varying, "name" character varying NOT NULL DEFAULT '', "quotaSizeInBytes" bigint, "quotaUsageInBytes" bigint NOT NULL DEFAULT 0, "status" character varying NOT NULL DEFAULT 'active', "profileChangedAt" timestamp with time zone NOT NULL DEFAULT now(), "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); + await sql`CREATE TABLE "libraries" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "name" character varying NOT NULL, "ownerId" uuid NOT NULL, "importPaths" text[] NOT NULL, "exclusionPatterns" text[] NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "refreshedAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); + await sql`CREATE TABLE "asset_stack" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "primaryAssetId" uuid NOT NULL, "ownerId" uuid NOT NULL);`.execute( + db, + ); + await sql`CREATE TABLE "assets" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "deviceAssetId" character varying NOT NULL, "ownerId" uuid NOT NULL, "deviceId" character varying NOT NULL, "type" character varying NOT NULL, "originalPath" character varying NOT NULL, "fileCreatedAt" timestamp with time zone NOT NULL, "fileModifiedAt" timestamp with time zone NOT NULL, "isFavorite" boolean NOT NULL DEFAULT false, "duration" character varying, "encodedVideoPath" character varying DEFAULT '', "checksum" bytea NOT NULL, "isVisible" boolean NOT NULL DEFAULT true, "livePhotoVideoId" uuid, "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "isArchived" boolean NOT NULL DEFAULT false, "originalFileName" character varying NOT NULL, "sidecarPath" character varying, "thumbhash" bytea, "isOffline" boolean NOT NULL DEFAULT false, "libraryId" uuid, "isExternal" boolean NOT NULL DEFAULT false, "deletedAt" timestamp with time zone, "localDateTime" timestamp with time zone NOT NULL, "stackId" uuid, "duplicateId" uuid, "status" assets_status_enum NOT NULL DEFAULT 'active', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); + await sql`CREATE TABLE "albums" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "ownerId" uuid NOT NULL, "albumName" character varying NOT NULL DEFAULT 'Untitled Album', "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "albumThumbnailAssetId" uuid, "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "description" text NOT NULL DEFAULT '', "deletedAt" timestamp with time zone, "isActivityEnabled" boolean NOT NULL DEFAULT true, "order" character varying NOT NULL DEFAULT 'desc', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); await sql`COMMENT ON COLUMN "albums"."albumThumbnailAssetId" IS 'Asset ID to be used as thumbnail';`.execute(db); - await sql`CREATE TABLE "activity" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "albumId" uuid NOT NULL, "userId" uuid NOT NULL, "assetId" uuid, "comment" text, "isLiked" boolean NOT NULL DEFAULT false, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); - await sql`CREATE TABLE "albums_assets_assets" ("albumsId" uuid NOT NULL, "assetsId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now());`.execute(db); - await sql`CREATE TABLE "albums_shared_users_users" ("albumsId" uuid NOT NULL, "usersId" uuid NOT NULL, "role" character varying NOT NULL DEFAULT 'editor');`.execute(db); - await sql`CREATE TABLE "api_keys" ("name" character varying NOT NULL, "key" character varying NOT NULL, "userId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "id" uuid NOT NULL DEFAULT uuid_generate_v4(), "permissions" character varying[] NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); - await sql`CREATE TABLE "assets_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "assetId" uuid NOT NULL, "ownerId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute(db); - await sql`CREATE TABLE "person" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "ownerId" uuid NOT NULL, "name" character varying NOT NULL DEFAULT '', "thumbnailPath" character varying NOT NULL DEFAULT '', "isHidden" boolean NOT NULL DEFAULT false, "birthDate" date, "faceAssetId" uuid, "isFavorite" boolean NOT NULL DEFAULT false, "color" character varying, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); - await sql`CREATE TABLE "asset_faces" ("assetId" uuid NOT NULL, "personId" uuid, "imageWidth" integer NOT NULL DEFAULT 0, "imageHeight" integer NOT NULL DEFAULT 0, "boundingBoxX1" integer NOT NULL DEFAULT 0, "boundingBoxY1" integer NOT NULL DEFAULT 0, "boundingBoxX2" integer NOT NULL DEFAULT 0, "boundingBoxY2" integer NOT NULL DEFAULT 0, "id" uuid NOT NULL DEFAULT uuid_generate_v4(), "sourceType" sourcetype NOT NULL DEFAULT 'machine-learning', "deletedAt" timestamp with time zone);`.execute(db); - await sql`CREATE TABLE "asset_files" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "assetId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "type" character varying NOT NULL, "path" character varying NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); - await sql`CREATE TABLE "asset_job_status" ("assetId" uuid NOT NULL, "facesRecognizedAt" timestamp with time zone, "metadataExtractedAt" timestamp with time zone, "duplicatesDetectedAt" timestamp with time zone, "previewAt" timestamp with time zone, "thumbnailAt" timestamp with time zone);`.execute(db); - await sql`CREATE TABLE "audit" ("id" serial NOT NULL, "entityType" character varying NOT NULL, "entityId" uuid NOT NULL, "action" character varying NOT NULL, "ownerId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now());`.execute(db); - await sql`CREATE TABLE "exif" ("assetId" uuid NOT NULL, "make" character varying, "model" character varying, "exifImageWidth" integer, "exifImageHeight" integer, "fileSizeInByte" bigint, "orientation" character varying, "dateTimeOriginal" timestamp with time zone, "modifyDate" timestamp with time zone, "lensModel" character varying, "fNumber" double precision, "focalLength" double precision, "iso" integer, "latitude" double precision, "longitude" double precision, "city" character varying, "state" character varying, "country" character varying, "description" text NOT NULL DEFAULT '', "fps" double precision, "exposureTime" character varying, "livePhotoCID" character varying, "timeZone" character varying, "projectionType" character varying, "profileDescription" character varying, "colorspace" character varying, "bitsPerSample" integer, "autoStackId" character varying, "rating" integer, "updatedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(), "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); + await sql`CREATE TABLE "activity" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "albumId" uuid NOT NULL, "userId" uuid NOT NULL, "assetId" uuid, "comment" text, "isLiked" boolean NOT NULL DEFAULT false, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); + await sql`CREATE TABLE "albums_assets_assets" ("albumsId" uuid NOT NULL, "assetsId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now());`.execute( + db, + ); + await sql`CREATE TABLE "albums_shared_users_users" ("albumsId" uuid NOT NULL, "usersId" uuid NOT NULL, "role" character varying NOT NULL DEFAULT 'editor');`.execute( + db, + ); + await sql`CREATE TABLE "api_keys" ("name" character varying NOT NULL, "key" character varying NOT NULL, "userId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "id" uuid NOT NULL DEFAULT uuid_generate_v4(), "permissions" character varying[] NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); + await sql`CREATE TABLE "assets_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "assetId" uuid NOT NULL, "ownerId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute( + db, + ); + await sql`CREATE TABLE "person" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "ownerId" uuid NOT NULL, "name" character varying NOT NULL DEFAULT '', "thumbnailPath" character varying NOT NULL DEFAULT '', "isHidden" boolean NOT NULL DEFAULT false, "birthDate" date, "faceAssetId" uuid, "isFavorite" boolean NOT NULL DEFAULT false, "color" character varying, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); + await sql`CREATE TABLE "asset_faces" ("assetId" uuid NOT NULL, "personId" uuid, "imageWidth" integer NOT NULL DEFAULT 0, "imageHeight" integer NOT NULL DEFAULT 0, "boundingBoxX1" integer NOT NULL DEFAULT 0, "boundingBoxY1" integer NOT NULL DEFAULT 0, "boundingBoxX2" integer NOT NULL DEFAULT 0, "boundingBoxY2" integer NOT NULL DEFAULT 0, "id" uuid NOT NULL DEFAULT uuid_generate_v4(), "sourceType" sourcetype NOT NULL DEFAULT 'machine-learning', "deletedAt" timestamp with time zone);`.execute( + db, + ); + await sql`CREATE TABLE "asset_files" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "assetId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "type" character varying NOT NULL, "path" character varying NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); + await sql`CREATE TABLE "asset_job_status" ("assetId" uuid NOT NULL, "facesRecognizedAt" timestamp with time zone, "metadataExtractedAt" timestamp with time zone, "duplicatesDetectedAt" timestamp with time zone, "previewAt" timestamp with time zone, "thumbnailAt" timestamp with time zone);`.execute( + db, + ); + await sql`CREATE TABLE "audit" ("id" serial NOT NULL, "entityType" character varying NOT NULL, "entityId" uuid NOT NULL, "action" character varying NOT NULL, "ownerId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now());`.execute( + db, + ); + await sql`CREATE TABLE "exif" ("assetId" uuid NOT NULL, "make" character varying, "model" character varying, "exifImageWidth" integer, "exifImageHeight" integer, "fileSizeInByte" bigint, "orientation" character varying, "dateTimeOriginal" timestamp with time zone, "modifyDate" timestamp with time zone, "lensModel" character varying, "fNumber" double precision, "focalLength" double precision, "iso" integer, "latitude" double precision, "longitude" double precision, "city" character varying, "state" character varying, "country" character varying, "description" text NOT NULL DEFAULT '', "fps" double precision, "exposureTime" character varying, "livePhotoCID" character varying, "timeZone" character varying, "projectionType" character varying, "profileDescription" character varying, "colorspace" character varying, "bitsPerSample" integer, "autoStackId" character varying, "rating" integer, "updatedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(), "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); await sql`CREATE TABLE "face_search" ("faceId" uuid NOT NULL, "embedding" vector(512) NOT NULL);`.execute(db); - await sql`CREATE TABLE "geodata_places" ("id" integer NOT NULL, "name" character varying(200) NOT NULL, "longitude" double precision NOT NULL, "latitude" double precision NOT NULL, "countryCode" character(2) NOT NULL, "admin1Code" character varying(20), "admin2Code" character varying(80), "modificationDate" date NOT NULL, "admin1Name" character varying, "admin2Name" character varying, "alternateNames" character varying);`.execute(db); - await sql`CREATE TABLE "memories" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "ownerId" uuid NOT NULL, "type" character varying NOT NULL, "data" jsonb NOT NULL, "isSaved" boolean NOT NULL DEFAULT false, "memoryAt" timestamp with time zone NOT NULL, "seenAt" timestamp with time zone, "showAt" timestamp with time zone, "hideAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); + await sql`CREATE TABLE "geodata_places" ("id" integer NOT NULL, "name" character varying(200) NOT NULL, "longitude" double precision NOT NULL, "latitude" double precision NOT NULL, "countryCode" character(2) NOT NULL, "admin1Code" character varying(20), "admin2Code" character varying(80), "modificationDate" date NOT NULL, "admin1Name" character varying, "admin2Name" character varying, "alternateNames" character varying);`.execute( + db, + ); + await sql`CREATE TABLE "memories" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "ownerId" uuid NOT NULL, "type" character varying NOT NULL, "data" jsonb NOT NULL, "isSaved" boolean NOT NULL DEFAULT false, "memoryAt" timestamp with time zone NOT NULL, "seenAt" timestamp with time zone, "showAt" timestamp with time zone, "hideAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); await sql`CREATE TABLE "memories_assets_assets" ("memoriesId" uuid NOT NULL, "assetsId" uuid NOT NULL);`.execute(db); - await sql`CREATE TABLE "move_history" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "entityId" uuid NOT NULL, "pathType" character varying NOT NULL, "oldPath" character varying NOT NULL, "newPath" character varying NOT NULL);`.execute(db); - await sql`CREATE TABLE "naturalearth_countries" ("id" integer NOT NULL GENERATED ALWAYS AS IDENTITY, "admin" character varying(50) NOT NULL, "admin_a3" character varying(3) NOT NULL, "type" character varying(50) NOT NULL, "coordinates" polygon NOT NULL);`.execute(db); - await sql`CREATE TABLE "partners_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "sharedById" uuid NOT NULL, "sharedWithId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute(db); - await sql`CREATE TABLE "partners" ("sharedById" uuid NOT NULL, "sharedWithId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "inTimeline" boolean NOT NULL DEFAULT false, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); - await sql`CREATE TABLE "sessions" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "token" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "userId" uuid NOT NULL, "deviceType" character varying NOT NULL DEFAULT '', "deviceOS" character varying NOT NULL DEFAULT '', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); - await sql`CREATE TABLE "shared_links" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "description" character varying, "userId" uuid NOT NULL, "key" bytea NOT NULL, "type" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "expiresAt" timestamp with time zone, "allowUpload" boolean NOT NULL DEFAULT false, "albumId" uuid, "allowDownload" boolean NOT NULL DEFAULT true, "showExif" boolean NOT NULL DEFAULT true, "password" character varying);`.execute(db); + await sql`CREATE TABLE "move_history" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "entityId" uuid NOT NULL, "pathType" character varying NOT NULL, "oldPath" character varying NOT NULL, "newPath" character varying NOT NULL);`.execute( + db, + ); + await sql`CREATE TABLE "naturalearth_countries" ("id" integer NOT NULL GENERATED ALWAYS AS IDENTITY, "admin" character varying(50) NOT NULL, "admin_a3" character varying(3) NOT NULL, "type" character varying(50) NOT NULL, "coordinates" polygon NOT NULL);`.execute( + db, + ); + await sql`CREATE TABLE "partners_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "sharedById" uuid NOT NULL, "sharedWithId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute( + db, + ); + await sql`CREATE TABLE "partners" ("sharedById" uuid NOT NULL, "sharedWithId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "inTimeline" boolean NOT NULL DEFAULT false, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); + await sql`CREATE TABLE "sessions" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "token" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "userId" uuid NOT NULL, "deviceType" character varying NOT NULL DEFAULT '', "deviceOS" character varying NOT NULL DEFAULT '', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); + await sql`CREATE TABLE "shared_links" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "description" character varying, "userId" uuid NOT NULL, "key" bytea NOT NULL, "type" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "expiresAt" timestamp with time zone, "allowUpload" boolean NOT NULL DEFAULT false, "albumId" uuid, "allowDownload" boolean NOT NULL DEFAULT true, "showExif" boolean NOT NULL DEFAULT true, "password" character varying);`.execute( + db, + ); await sql`CREATE TABLE "shared_link__asset" ("assetsId" uuid NOT NULL, "sharedLinksId" uuid NOT NULL);`.execute(db); await sql`CREATE TABLE "smart_search" ("assetId" uuid NOT NULL, "embedding" vector(512) NOT NULL);`.execute(db); await sql`ALTER TABLE "smart_search" ALTER COLUMN "embedding" SET STORAGE EXTERNAL;`.execute(db); - await sql`CREATE TABLE "session_sync_checkpoints" ("sessionId" uuid NOT NULL, "type" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "ack" character varying NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); + await sql`CREATE TABLE "session_sync_checkpoints" ("sessionId" uuid NOT NULL, "type" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "ack" character varying NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); await sql`CREATE TABLE "system_metadata" ("key" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db); - await sql`CREATE TABLE "tags" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "userId" uuid NOT NULL, "value" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "color" character varying, "parentId" uuid, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); + await sql`CREATE TABLE "tags" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "userId" uuid NOT NULL, "value" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "color" character varying, "parentId" uuid, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); await sql`CREATE TABLE "tag_asset" ("assetsId" uuid NOT NULL, "tagsId" uuid NOT NULL);`.execute(db); await sql`CREATE TABLE "tags_closure" ("id_ancestor" uuid NOT NULL, "id_descendant" uuid NOT NULL);`.execute(db); - await sql`CREATE TABLE "users_audit" ("userId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(), "id" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db); - await sql`CREATE TABLE "user_metadata" ("userId" uuid NOT NULL, "key" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db); - await sql`CREATE TABLE "version_history" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "version" character varying NOT NULL);`.execute(db); + await sql`CREATE TABLE "users_audit" ("userId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(), "id" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute( + db, + ); + await sql`CREATE TABLE "user_metadata" ("userId" uuid NOT NULL, "key" character varying NOT NULL, "value" jsonb NOT NULL);`.execute( + db, + ); + await sql`CREATE TABLE "version_history" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "version" character varying NOT NULL);`.execute( + db, + ); await sql`ALTER TABLE "users" ADD CONSTRAINT "PK_a3ffb1c0c8416b9fc6f907b7433" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "libraries" ADD CONSTRAINT "PK_505fedfcad00a09b3734b4223de" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "PK_74a27e7fcbd5852463d0af3034b" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "assets" ADD CONSTRAINT "PK_da96729a8b113377cfb6a62439c" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "albums" ADD CONSTRAINT "PK_7f71c7b5bc7c87b8f94c9a93a00" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "activity" ADD CONSTRAINT "PK_24625a1d6b1b089c8ae206fe467" PRIMARY KEY ("id");`.execute(db); - await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "PK_c67bc36fa845fb7b18e0e398180" PRIMARY KEY ("albumsId", "assetsId");`.execute(db); - await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "PK_7df55657e0b2e8b626330a0ebc8" PRIMARY KEY ("albumsId", "usersId");`.execute(db); + await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "PK_c67bc36fa845fb7b18e0e398180" PRIMARY KEY ("albumsId", "assetsId");`.execute( + db, + ); + await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "PK_7df55657e0b2e8b626330a0ebc8" PRIMARY KEY ("albumsId", "usersId");`.execute( + db, + ); await sql`ALTER TABLE "api_keys" ADD CONSTRAINT "PK_5c8a79801b44bd27b79228e1dad" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "assets_audit" ADD CONSTRAINT "PK_99bd5c015f81a641927a32b4212" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "person" ADD CONSTRAINT "PK_5fdaf670315c4b7e70cce85daa3" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "PK_6df76ab2eb6f5b57b7c2f1fc684" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "PK_c41dc3e9ef5e1c57ca5a08a0004" PRIMARY KEY ("id");`.execute(db); - await sql`ALTER TABLE "asset_job_status" ADD CONSTRAINT "PK_420bec36fc02813bddf5c8b73d4" PRIMARY KEY ("assetId");`.execute(db); + await sql`ALTER TABLE "asset_job_status" ADD CONSTRAINT "PK_420bec36fc02813bddf5c8b73d4" PRIMARY KEY ("assetId");`.execute( + db, + ); await sql`ALTER TABLE "audit" ADD CONSTRAINT "PK_1d3d120ddaf7bc9b1ed68ed463a" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "exif" ADD CONSTRAINT "PK_c0117fdbc50b917ef9067740c44" PRIMARY KEY ("assetId");`.execute(db); await sql`ALTER TABLE "face_search" ADD CONSTRAINT "face_search_pkey" PRIMARY KEY ("faceId");`.execute(db); - await sql`ALTER TABLE "geodata_places" ADD CONSTRAINT "PK_c29918988912ef4036f3d7fbff4" PRIMARY KEY ("id");`.execute(db); + await sql`ALTER TABLE "geodata_places" ADD CONSTRAINT "PK_c29918988912ef4036f3d7fbff4" PRIMARY KEY ("id");`.execute( + db, + ); await sql`ALTER TABLE "memories" ADD CONSTRAINT "PK_aaa0692d9496fe827b0568612f8" PRIMARY KEY ("id");`.execute(db); - await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "PK_fcaf7112a013d1703c011c6793d" PRIMARY KEY ("memoriesId", "assetsId");`.execute(db); + await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "PK_fcaf7112a013d1703c011c6793d" PRIMARY KEY ("memoriesId", "assetsId");`.execute( + db, + ); await sql`ALTER TABLE "move_history" ADD CONSTRAINT "PK_af608f132233acf123f2949678d" PRIMARY KEY ("id");`.execute(db); - await sql`ALTER TABLE "naturalearth_countries" ADD CONSTRAINT "PK_21a6d86d1ab5d841648212e5353" PRIMARY KEY ("id");`.execute(db); - await sql`ALTER TABLE "partners_audit" ADD CONSTRAINT "PK_952b50217ff78198a7e380f0359" PRIMARY KEY ("id");`.execute(db); - await sql`ALTER TABLE "partners" ADD CONSTRAINT "PK_f1cc8f73d16b367f426261a8736" PRIMARY KEY ("sharedById", "sharedWithId");`.execute(db); + await sql`ALTER TABLE "naturalearth_countries" ADD CONSTRAINT "PK_21a6d86d1ab5d841648212e5353" PRIMARY KEY ("id");`.execute( + db, + ); + await sql`ALTER TABLE "partners_audit" ADD CONSTRAINT "PK_952b50217ff78198a7e380f0359" PRIMARY KEY ("id");`.execute( + db, + ); + await sql`ALTER TABLE "partners" ADD CONSTRAINT "PK_f1cc8f73d16b367f426261a8736" PRIMARY KEY ("sharedById", "sharedWithId");`.execute( + db, + ); await sql`ALTER TABLE "sessions" ADD CONSTRAINT "PK_48cb6b5c20faa63157b3c1baf7f" PRIMARY KEY ("id");`.execute(db); await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "PK_642e2b0f619e4876e5f90a43465" PRIMARY KEY ("id");`.execute(db); - await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "PK_9b4f3687f9b31d1e311336b05e3" PRIMARY KEY ("assetsId", "sharedLinksId");`.execute(db); + await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "PK_9b4f3687f9b31d1e311336b05e3" PRIMARY KEY ("assetsId", "sharedLinksId");`.execute( + db, + ); await sql`ALTER TABLE "smart_search" ADD CONSTRAINT "smart_search_pkey" PRIMARY KEY ("assetId");`.execute(db); - await sql`ALTER TABLE "session_sync_checkpoints" ADD CONSTRAINT "PK_b846ab547a702863ef7cd9412fb" PRIMARY KEY ("sessionId", "type");`.execute(db); - await sql`ALTER TABLE "system_metadata" ADD CONSTRAINT "PK_fa94f6857470fb5b81ec6084465" PRIMARY KEY ("key");`.execute(db); + await sql`ALTER TABLE "session_sync_checkpoints" ADD CONSTRAINT "PK_b846ab547a702863ef7cd9412fb" PRIMARY KEY ("sessionId", "type");`.execute( + db, + ); + await sql`ALTER TABLE "system_metadata" ADD CONSTRAINT "PK_fa94f6857470fb5b81ec6084465" PRIMARY KEY ("key");`.execute( + db, + ); await sql`ALTER TABLE "tags" ADD CONSTRAINT "PK_e7dc17249a1148a1970748eda99" PRIMARY KEY ("id");`.execute(db); - await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "PK_ef5346fe522b5fb3bc96454747e" PRIMARY KEY ("assetsId", "tagsId");`.execute(db); - await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "PK_eab38eb12a3ec6df8376c95477c" PRIMARY KEY ("id_ancestor", "id_descendant");`.execute(db); + await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "PK_ef5346fe522b5fb3bc96454747e" PRIMARY KEY ("assetsId", "tagsId");`.execute( + db, + ); + await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "PK_eab38eb12a3ec6df8376c95477c" PRIMARY KEY ("id_ancestor", "id_descendant");`.execute( + db, + ); await sql`ALTER TABLE "users_audit" ADD CONSTRAINT "PK_e9b2bdfd90e7eb5961091175180" PRIMARY KEY ("id");`.execute(db); - await sql`ALTER TABLE "user_metadata" ADD CONSTRAINT "PK_5931462150b3438cbc83277fe5a" PRIMARY KEY ("userId", "key");`.execute(db); - await sql`ALTER TABLE "version_history" ADD CONSTRAINT "PK_5db259cbb09ce82c0d13cfd1b23" PRIMARY KEY ("id");`.execute(db); - await sql`ALTER TABLE "libraries" ADD CONSTRAINT "FK_0f6fc2fb195f24d19b0fb0d57c1" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "FK_91704e101438fd0653f582426dc" FOREIGN KEY ("primaryAssetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION;`.execute(db); - await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "FK_c05079e542fd74de3b5ecb5c1c8" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_2c5ac0d6fb58b238fd2068de67d" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_16294b83fa8c0149719a1f631ef" FOREIGN KEY ("livePhotoVideoId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db); - await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_9977c3c1de01c3d848039a6b90c" FOREIGN KEY ("libraryId") REFERENCES "libraries" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_f15d48fa3ea5e4bda05ca8ab207" FOREIGN KEY ("stackId") REFERENCES "asset_stack" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db); - await sql`ALTER TABLE "albums" ADD CONSTRAINT "FK_b22c53f35ef20c28c21637c85f4" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "albums" ADD CONSTRAINT "FK_05895aa505a670300d4816debce" FOREIGN KEY ("albumThumbnailAssetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db); - await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_1af8519996fbfb3684b58df280b" FOREIGN KEY ("albumId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_3571467bcbe021f66e2bdce96ea" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_8091ea76b12338cb4428d33d782" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "FK_e590fa396c6898fcd4a50e40927" FOREIGN KEY ("albumsId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "FK_4bd1303d199f4e72ccdf998c621" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "FK_427c350ad49bd3935a50baab737" FOREIGN KEY ("albumsId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "FK_f48513bf9bccefd6ff3ad30bd06" FOREIGN KEY ("usersId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "api_keys" ADD CONSTRAINT "FK_6c2e267ae764a9413b863a29342" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "person" ADD CONSTRAINT "FK_5527cc99f530a547093f9e577b6" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "person" ADD CONSTRAINT "FK_2bbabe31656b6778c6b87b61023" FOREIGN KEY ("faceAssetId") REFERENCES "asset_faces" ("id") ON UPDATE NO ACTION ON DELETE SET NULL;`.execute(db); - await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "FK_02a43fd0b3c50fb6d7f0cb7282c" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "FK_95ad7106dd7b484275443f580f9" FOREIGN KEY ("personId") REFERENCES "person" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db); - await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "FK_e3e103a5f1d8bc8402999286040" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "asset_job_status" ADD CONSTRAINT "FK_420bec36fc02813bddf5c8b73d4" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "exif" ADD CONSTRAINT "FK_c0117fdbc50b917ef9067740c44" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "face_search" ADD CONSTRAINT "face_search_faceId_fkey" FOREIGN KEY ("faceId") REFERENCES "asset_faces" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "memories" ADD CONSTRAINT "FK_575842846f0c28fa5da46c99b19" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "FK_984e5c9ab1f04d34538cd32334e" FOREIGN KEY ("memoriesId") REFERENCES "memories" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "FK_6942ecf52d75d4273de19d2c16f" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "partners" ADD CONSTRAINT "FK_7e077a8b70b3530138610ff5e04" FOREIGN KEY ("sharedById") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "partners" ADD CONSTRAINT "FK_d7e875c6c60e661723dbf372fd3" FOREIGN KEY ("sharedWithId") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "sessions" ADD CONSTRAINT "FK_57de40bc620f456c7311aa3a1e6" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "FK_66fe3837414c5a9f1c33ca49340" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "FK_0c6ce9058c29f07cdf7014eac66" FOREIGN KEY ("albumId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "FK_5b7decce6c8d3db9593d6111a66" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "FK_c9fab4aa97ffd1b034f3d6581ab" FOREIGN KEY ("sharedLinksId") REFERENCES "shared_links" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "smart_search" ADD CONSTRAINT "smart_search_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "session_sync_checkpoints" ADD CONSTRAINT "FK_d8ddd9d687816cc490432b3d4bc" FOREIGN KEY ("sessionId") REFERENCES "sessions" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "tags" ADD CONSTRAINT "FK_92e67dc508c705dd66c94615576" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "tags" ADD CONSTRAINT "FK_9f9590cc11561f1f48ff034ef99" FOREIGN KEY ("parentId") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "FK_f8e8a9e893cb5c54907f1b798e9" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "FK_e99f31ea4cdf3a2c35c7287eb42" FOREIGN KEY ("tagsId") REFERENCES "tags" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "FK_15fbcbc67663c6bfc07b354c22c" FOREIGN KEY ("id_ancestor") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "FK_b1a2a7ed45c29179b5ad51548a1" FOREIGN KEY ("id_descendant") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db); - await sql`ALTER TABLE "user_metadata" ADD CONSTRAINT "FK_6afb43681a21cf7815932bc38ac" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db); + await sql`ALTER TABLE "user_metadata" ADD CONSTRAINT "PK_5931462150b3438cbc83277fe5a" PRIMARY KEY ("userId", "key");`.execute( + db, + ); + await sql`ALTER TABLE "version_history" ADD CONSTRAINT "PK_5db259cbb09ce82c0d13cfd1b23" PRIMARY KEY ("id");`.execute( + db, + ); + await sql`ALTER TABLE "libraries" ADD CONSTRAINT "FK_0f6fc2fb195f24d19b0fb0d57c1" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "FK_91704e101438fd0653f582426dc" FOREIGN KEY ("primaryAssetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION;`.execute( + db, + ); + await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "FK_c05079e542fd74de3b5ecb5c1c8" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_2c5ac0d6fb58b238fd2068de67d" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_16294b83fa8c0149719a1f631ef" FOREIGN KEY ("livePhotoVideoId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute( + db, + ); + await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_9977c3c1de01c3d848039a6b90c" FOREIGN KEY ("libraryId") REFERENCES "libraries" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_f15d48fa3ea5e4bda05ca8ab207" FOREIGN KEY ("stackId") REFERENCES "asset_stack" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute( + db, + ); + await sql`ALTER TABLE "albums" ADD CONSTRAINT "FK_b22c53f35ef20c28c21637c85f4" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "albums" ADD CONSTRAINT "FK_05895aa505a670300d4816debce" FOREIGN KEY ("albumThumbnailAssetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute( + db, + ); + await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_1af8519996fbfb3684b58df280b" FOREIGN KEY ("albumId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_3571467bcbe021f66e2bdce96ea" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_8091ea76b12338cb4428d33d782" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "FK_e590fa396c6898fcd4a50e40927" FOREIGN KEY ("albumsId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "FK_4bd1303d199f4e72ccdf998c621" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "FK_427c350ad49bd3935a50baab737" FOREIGN KEY ("albumsId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "FK_f48513bf9bccefd6ff3ad30bd06" FOREIGN KEY ("usersId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "api_keys" ADD CONSTRAINT "FK_6c2e267ae764a9413b863a29342" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "person" ADD CONSTRAINT "FK_5527cc99f530a547093f9e577b6" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "person" ADD CONSTRAINT "FK_2bbabe31656b6778c6b87b61023" FOREIGN KEY ("faceAssetId") REFERENCES "asset_faces" ("id") ON UPDATE NO ACTION ON DELETE SET NULL;`.execute( + db, + ); + await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "FK_02a43fd0b3c50fb6d7f0cb7282c" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "FK_95ad7106dd7b484275443f580f9" FOREIGN KEY ("personId") REFERENCES "person" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute( + db, + ); + await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "FK_e3e103a5f1d8bc8402999286040" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "asset_job_status" ADD CONSTRAINT "FK_420bec36fc02813bddf5c8b73d4" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "exif" ADD CONSTRAINT "FK_c0117fdbc50b917ef9067740c44" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "face_search" ADD CONSTRAINT "face_search_faceId_fkey" FOREIGN KEY ("faceId") REFERENCES "asset_faces" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "memories" ADD CONSTRAINT "FK_575842846f0c28fa5da46c99b19" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "FK_984e5c9ab1f04d34538cd32334e" FOREIGN KEY ("memoriesId") REFERENCES "memories" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "FK_6942ecf52d75d4273de19d2c16f" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "partners" ADD CONSTRAINT "FK_7e077a8b70b3530138610ff5e04" FOREIGN KEY ("sharedById") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "partners" ADD CONSTRAINT "FK_d7e875c6c60e661723dbf372fd3" FOREIGN KEY ("sharedWithId") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "sessions" ADD CONSTRAINT "FK_57de40bc620f456c7311aa3a1e6" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "FK_66fe3837414c5a9f1c33ca49340" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "FK_0c6ce9058c29f07cdf7014eac66" FOREIGN KEY ("albumId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "FK_5b7decce6c8d3db9593d6111a66" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "FK_c9fab4aa97ffd1b034f3d6581ab" FOREIGN KEY ("sharedLinksId") REFERENCES "shared_links" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "smart_search" ADD CONSTRAINT "smart_search_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "session_sync_checkpoints" ADD CONSTRAINT "FK_d8ddd9d687816cc490432b3d4bc" FOREIGN KEY ("sessionId") REFERENCES "sessions" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "tags" ADD CONSTRAINT "FK_92e67dc508c705dd66c94615576" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "tags" ADD CONSTRAINT "FK_9f9590cc11561f1f48ff034ef99" FOREIGN KEY ("parentId") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "FK_f8e8a9e893cb5c54907f1b798e9" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "FK_e99f31ea4cdf3a2c35c7287eb42" FOREIGN KEY ("tagsId") REFERENCES "tags" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "FK_15fbcbc67663c6bfc07b354c22c" FOREIGN KEY ("id_ancestor") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "FK_b1a2a7ed45c29179b5ad51548a1" FOREIGN KEY ("id_descendant") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute( + db, + ); + await sql`ALTER TABLE "user_metadata" ADD CONSTRAINT "FK_6afb43681a21cf7815932bc38ac" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute( + db, + ); await sql`ALTER TABLE "users" ADD CONSTRAINT "UQ_97672ac88f789774dd47f7c8be3" UNIQUE ("email");`.execute(db); await sql`ALTER TABLE "users" ADD CONSTRAINT "UQ_b309cf34fa58137c416b32cea3a" UNIQUE ("storageLabel");`.execute(db); - await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "REL_91704e101438fd0653f582426d" UNIQUE ("primaryAssetId");`.execute(db); + await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "REL_91704e101438fd0653f582426d" UNIQUE ("primaryAssetId");`.execute( + db, + ); await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "UQ_assetId_type" UNIQUE ("assetId", "type");`.execute(db); await sql`ALTER TABLE "move_history" ADD CONSTRAINT "UQ_newPath" UNIQUE ("newPath");`.execute(db); - await sql`ALTER TABLE "move_history" ADD CONSTRAINT "UQ_entityId_pathType" UNIQUE ("entityId", "pathType");`.execute(db); + await sql`ALTER TABLE "move_history" ADD CONSTRAINT "UQ_entityId_pathType" UNIQUE ("entityId", "pathType");`.execute( + db, + ); await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "UQ_sharedlink_key" UNIQUE ("key");`.execute(db); await sql`ALTER TABLE "tags" ADD CONSTRAINT "UQ_79d6f16e52bb2c7130375246793" UNIQUE ("userId", "value");`.execute(db); - await sql`ALTER TABLE "activity" ADD CONSTRAINT "CHK_2ab1e70f113f450eb40c1e3ec8" CHECK (("comment" IS NULL AND "isLiked" = true) OR ("comment" IS NOT NULL AND "isLiked" = false));`.execute(db); - await sql`ALTER TABLE "person" ADD CONSTRAINT "CHK_b0f82b0ed662bfc24fbb58bb45" CHECK ("birthDate" <= CURRENT_DATE);`.execute(db); + await sql`ALTER TABLE "activity" ADD CONSTRAINT "CHK_2ab1e70f113f450eb40c1e3ec8" CHECK (("comment" IS NULL AND "isLiked" = true) OR ("comment" IS NOT NULL AND "isLiked" = false));`.execute( + db, + ); + await sql`ALTER TABLE "person" ADD CONSTRAINT "CHK_b0f82b0ed662bfc24fbb58bb45" CHECK ("birthDate" <= CURRENT_DATE);`.execute( + db, + ); await sql`CREATE INDEX "IDX_users_updated_at_asc_id_asc" ON "users" ("updatedAt", "id")`.execute(db); await sql`CREATE INDEX "IDX_users_update_id" ON "users" ("updateId")`.execute(db); await sql`CREATE INDEX "IDX_0f6fc2fb195f24d19b0fb0d57c" ON "libraries" ("ownerId")`.execute(db); await sql`CREATE INDEX "IDX_libraries_update_id" ON "libraries" ("updateId")`.execute(db); await sql`CREATE INDEX "IDX_91704e101438fd0653f582426d" ON "asset_stack" ("primaryAssetId")`.execute(db); await sql`CREATE INDEX "IDX_c05079e542fd74de3b5ecb5c1c" ON "asset_stack" ("ownerId")`.execute(db); - await sql`CREATE INDEX "idx_originalfilename_trigram" ON "assets" USING gin (f_unaccent("originalFileName") gin_trgm_ops)`.execute(db); + await sql`CREATE INDEX "idx_originalfilename_trigram" ON "assets" USING gin (f_unaccent("originalFileName") gin_trgm_ops)`.execute( + db, + ); await sql`CREATE INDEX "IDX_asset_id_stackId" ON "assets" ("id", "stackId")`.execute(db); await sql`CREATE INDEX "IDX_originalPath_libraryId" ON "assets" ("originalPath", "libraryId")`.execute(db); - await sql`CREATE INDEX "idx_local_date_time_month" ON "assets" ((date_trunc('MONTH'::text, ("localDateTime" AT TIME ZONE 'UTC'::text)) AT TIME ZONE 'UTC'::text))`.execute(db); + await sql`CREATE INDEX "idx_local_date_time_month" ON "assets" ((date_trunc('MONTH'::text, ("localDateTime" AT TIME ZONE 'UTC'::text)) AT TIME ZONE 'UTC'::text))`.execute( + db, + ); await sql`CREATE INDEX "idx_local_date_time" ON "assets" ((("localDateTime" at time zone 'UTC')::date))`.execute(db); - await sql`CREATE UNIQUE INDEX "UQ_assets_owner_library_checksum" ON "assets" ("ownerId", "libraryId", "checksum") WHERE ("libraryId" IS NOT NULL)`.execute(db); - await sql`CREATE UNIQUE INDEX "UQ_assets_owner_checksum" ON "assets" ("ownerId", "checksum") WHERE ("libraryId" IS NULL)`.execute(db); + await sql`CREATE UNIQUE INDEX "UQ_assets_owner_library_checksum" ON "assets" ("ownerId", "libraryId", "checksum") WHERE ("libraryId" IS NOT NULL)`.execute( + db, + ); + await sql`CREATE UNIQUE INDEX "UQ_assets_owner_checksum" ON "assets" ("ownerId", "checksum") WHERE ("libraryId" IS NULL)`.execute( + db, + ); await sql`CREATE INDEX "IDX_2c5ac0d6fb58b238fd2068de67" ON "assets" ("ownerId")`.execute(db); await sql`CREATE INDEX "idx_asset_file_created_at" ON "assets" ("fileCreatedAt")`.execute(db); await sql`CREATE INDEX "IDX_8d3efe36c0755849395e6ea866" ON "assets" ("checksum")`.execute(db); @@ -266,7 +456,9 @@ export async function up(db: Kysely): Promise { await sql`CREATE INDEX "IDX_b22c53f35ef20c28c21637c85f" ON "albums" ("ownerId")`.execute(db); await sql`CREATE INDEX "IDX_05895aa505a670300d4816debc" ON "albums" ("albumThumbnailAssetId")`.execute(db); await sql`CREATE INDEX "IDX_albums_update_id" ON "albums" ("updateId")`.execute(db); - await sql`CREATE UNIQUE INDEX "IDX_activity_like" ON "activity" ("assetId", "userId", "albumId") WHERE ("isLiked" = true)`.execute(db); + await sql`CREATE UNIQUE INDEX "IDX_activity_like" ON "activity" ("assetId", "userId", "albumId") WHERE ("isLiked" = true)`.execute( + db, + ); await sql`CREATE INDEX "IDX_1af8519996fbfb3684b58df280" ON "activity" ("albumId")`.execute(db); await sql`CREATE INDEX "IDX_3571467bcbe021f66e2bdce96e" ON "activity" ("userId")`.execute(db); await sql`CREATE INDEX "IDX_8091ea76b12338cb4428d33d78" ON "activity" ("assetId")`.execute(db); @@ -295,11 +487,21 @@ export async function up(db: Kysely): Promise { await sql`CREATE INDEX "IDX_auto_stack_id" ON "exif" ("autoStackId")`.execute(db); await sql`CREATE INDEX "IDX_asset_exif_update_id" ON "exif" ("updateId")`.execute(db); await sql.raw(vectorIndexQuery({ vectorExtension, table: 'face_search', indexName: 'face_index' })).execute(db); - await sql`CREATE INDEX "IDX_geodata_gist_earthcoord" ON "geodata_places" (ll_to_earth_public(latitude, longitude))`.execute(db); - await sql`CREATE INDEX "idx_geodata_places_name" ON "geodata_places" USING gin (f_unaccent("name") gin_trgm_ops)`.execute(db); - await sql`CREATE INDEX "idx_geodata_places_admin2_name" ON "geodata_places" USING gin (f_unaccent("admin2Name") gin_trgm_ops)`.execute(db); - await sql`CREATE INDEX "idx_geodata_places_admin1_name" ON "geodata_places" USING gin (f_unaccent("admin1Name") gin_trgm_ops)`.execute(db); - await sql`CREATE INDEX "idx_geodata_places_alternate_names" ON "geodata_places" USING gin (f_unaccent("alternateNames") gin_trgm_ops)`.execute(db); + await sql`CREATE INDEX "IDX_geodata_gist_earthcoord" ON "geodata_places" (ll_to_earth_public(latitude, longitude))`.execute( + db, + ); + await sql`CREATE INDEX "idx_geodata_places_name" ON "geodata_places" USING gin (f_unaccent("name") gin_trgm_ops)`.execute( + db, + ); + await sql`CREATE INDEX "idx_geodata_places_admin2_name" ON "geodata_places" USING gin (f_unaccent("admin2Name") gin_trgm_ops)`.execute( + db, + ); + await sql`CREATE INDEX "idx_geodata_places_admin1_name" ON "geodata_places" USING gin (f_unaccent("admin1Name") gin_trgm_ops)`.execute( + db, + ); + await sql`CREATE INDEX "idx_geodata_places_alternate_names" ON "geodata_places" USING gin (f_unaccent("alternateNames") gin_trgm_ops)`.execute( + db, + ); await sql`CREATE INDEX "IDX_575842846f0c28fa5da46c99b1" ON "memories" ("ownerId")`.execute(db); await sql`CREATE INDEX "IDX_memories_update_id" ON "memories" ("updateId")`.execute(db); await sql`CREATE INDEX "IDX_984e5c9ab1f04d34538cd32334" ON "memories_assets_assets" ("memoriesId")`.execute(db); @@ -319,7 +521,9 @@ export async function up(db: Kysely): Promise { await sql`CREATE INDEX "IDX_c9fab4aa97ffd1b034f3d6581a" ON "shared_link__asset" ("sharedLinksId")`.execute(db); await sql.raw(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: 'clip_index' })).execute(db); await sql`CREATE INDEX "IDX_d8ddd9d687816cc490432b3d4b" ON "session_sync_checkpoints" ("sessionId")`.execute(db); - await sql`CREATE INDEX "IDX_session_sync_checkpoints_update_id" ON "session_sync_checkpoints" ("updateId")`.execute(db); + await sql`CREATE INDEX "IDX_session_sync_checkpoints_update_id" ON "session_sync_checkpoints" ("updateId")`.execute( + db, + ); await sql`CREATE INDEX "IDX_92e67dc508c705dd66c9461557" ON "tags" ("userId")`.execute(db); await sql`CREATE INDEX "IDX_9f9590cc11561f1f48ff034ef9" ON "tags" ("parentId")`.execute(db); await sql`CREATE INDEX "IDX_tags_update_id" ON "tags" ("updateId")`.execute(db); @@ -407,5 +611,5 @@ export async function up(db: Kysely): Promise { } export async function down(): Promise { -// not implemented + // not implemented } diff --git a/server/src/schema/migrations/1749067526135-UserOnboardingDefault.ts b/server/src/schema/migrations/1749067526135-UserOnboardingDefault.ts index 376541410f..e6aabec27d 100644 --- a/server/src/schema/migrations/1749067526135-UserOnboardingDefault.ts +++ b/server/src/schema/migrations/1749067526135-UserOnboardingDefault.ts @@ -2,11 +2,11 @@ import { Kysely, sql } from 'kysely'; import { UserMetadataKey } from 'src/enum'; export async function up(db: Kysely): Promise { - await sql`INSERT INTO user_metadata SELECT id, ${UserMetadataKey.ONBOARDING}, '{"isOnboarded": true}' FROM users + await sql`INSERT INTO user_metadata SELECT id, ${UserMetadataKey.Onboarding}, '{"isOnboarded": true}' FROM users ON CONFLICT ("userId", key) DO NOTHING `.execute(db); } export async function down(db: Kysely): Promise { - await sql`DELETE FROM user_metadata WHERE key = ${UserMetadataKey.ONBOARDING}`.execute(db); + await sql`DELETE FROM user_metadata WHERE key = ${UserMetadataKey.Onboarding}`.execute(db); } diff --git a/server/src/schema/tables/album-user.table.ts b/server/src/schema/tables/album-user.table.ts index 6f20e25d90..94383218da 100644 --- a/server/src/schema/tables/album-user.table.ts +++ b/server/src/schema/tables/album-user.table.ts @@ -47,7 +47,7 @@ export class AlbumUserTable { }) usersId!: string; - @Column({ type: 'character varying', default: AlbumUserRole.EDITOR }) + @Column({ type: 'character varying', default: AlbumUserRole.Editor }) role!: Generated; @CreateIdColumn({ index: true }) diff --git a/server/src/schema/tables/album.table.ts b/server/src/schema/tables/album.table.ts index bca15d520b..5628db3d03 100644 --- a/server/src/schema/tables/album.table.ts +++ b/server/src/schema/tables/album.table.ts @@ -57,7 +57,7 @@ export class AlbumTable { @Column({ type: 'boolean', default: true }) isActivityEnabled!: Generated; - @Column({ default: AssetOrder.DESC }) + @Column({ default: AssetOrder.Desc }) order!: Generated; @UpdateIdColumn({ index: true }) diff --git a/server/src/schema/tables/asset-face.table.ts b/server/src/schema/tables/asset-face.table.ts index 483d655768..6e45a3a64d 100644 --- a/server/src/schema/tables/asset-face.table.ts +++ b/server/src/schema/tables/asset-face.table.ts @@ -56,7 +56,7 @@ export class AssetFaceTable { @Column({ default: 0, type: 'integer' }) boundingBoxY2!: Generated; - @Column({ default: SourceType.MACHINE_LEARNING, enum: asset_face_source_type }) + @Column({ default: SourceType.MachineLearning, enum: asset_face_source_type }) sourceType!: Generated; @DeleteDateColumn() diff --git a/server/src/schema/tables/asset.table.ts b/server/src/schema/tables/asset.table.ts index 4e1d073848..e92e01a1bd 100644 --- a/server/src/schema/tables/asset.table.ts +++ b/server/src/schema/tables/asset.table.ts @@ -132,12 +132,12 @@ export class AssetTable { @Column({ type: 'uuid', nullable: true, index: true }) duplicateId!: string | null; - @Column({ enum: assets_status_enum, default: AssetStatus.ACTIVE }) + @Column({ enum: assets_status_enum, default: AssetStatus.Active }) status!: Generated; @UpdateIdColumn({ index: true }) updateId!: Generated; - @Column({ enum: asset_visibility_enum, default: AssetVisibility.TIMELINE }) + @Column({ enum: asset_visibility_enum, default: AssetVisibility.Timeline }) visibility!: Generated; } diff --git a/server/src/schema/tables/user.table.ts b/server/src/schema/tables/user.table.ts index 97ac0ff295..46d6656382 100644 --- a/server/src/schema/tables/user.table.ts +++ b/server/src/schema/tables/user.table.ts @@ -73,7 +73,7 @@ export class UserTable { @Column({ type: 'bigint', default: 0 }) quotaUsageInBytes!: Generated>; - @Column({ type: 'character varying', default: UserStatus.ACTIVE }) + @Column({ type: 'character varying', default: UserStatus.Active }) status!: Generated; @Column({ type: 'timestamp with time zone', default: () => 'now()' }) diff --git a/server/src/services/activity.service.ts b/server/src/services/activity.service.ts index 8256a34f02..b1c25f8286 100644 --- a/server/src/services/activity.service.ts +++ b/server/src/services/activity.service.ts @@ -18,7 +18,7 @@ import { BaseService } from 'src/services/base.service'; @Injectable() export class ActivityService extends BaseService { async getAll(auth: AuthDto, dto: ActivitySearchDto): Promise { - await this.requireAccess({ auth, permission: Permission.ALBUM_READ, ids: [dto.albumId] }); + await this.requireAccess({ auth, permission: Permission.AlbumRead, ids: [dto.albumId] }); const activities = await this.activityRepository.search({ userId: dto.userId, albumId: dto.albumId, @@ -30,12 +30,12 @@ export class ActivityService extends BaseService { } async getStatistics(auth: AuthDto, dto: ActivityDto): Promise { - await this.requireAccess({ auth, permission: Permission.ALBUM_READ, ids: [dto.albumId] }); + await this.requireAccess({ auth, permission: Permission.AlbumRead, ids: [dto.albumId] }); return await this.activityRepository.getStatistics({ albumId: dto.albumId, assetId: dto.assetId }); } async create(auth: AuthDto, dto: ActivityCreateDto): Promise> { - await this.requireAccess({ auth, permission: Permission.ACTIVITY_CREATE, ids: [dto.albumId] }); + await this.requireAccess({ auth, permission: Permission.ActivityCreate, ids: [dto.albumId] }); const common = { userId: auth.user.id, @@ -69,7 +69,7 @@ export class ActivityService extends BaseService { } async delete(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.ACTIVITY_DELETE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.ActivityDelete, ids: [id] }); await this.activityRepository.delete(id); } } diff --git a/server/src/services/album.service.spec.ts b/server/src/services/album.service.spec.ts index cdace249c0..6f07a31dd9 100644 --- a/server/src/services/album.service.spec.ts +++ b/server/src/services/album.service.spec.ts @@ -146,7 +146,7 @@ describe(AlbumService.name, () => { await sut.create(authStub.admin, { albumName: 'Empty album', - albumUsers: [{ userId: 'user-id', role: AlbumUserRole.EDITOR }], + albumUsers: [{ userId: 'user-id', role: AlbumUserRole.Editor }], description: '', assetIds: ['123'], }); @@ -160,7 +160,7 @@ describe(AlbumService.name, () => { albumThumbnailAssetId: '123', }, ['123'], - [{ userId: 'user-id', role: AlbumUserRole.EDITOR }], + [{ userId: 'user-id', role: AlbumUserRole.Editor }], ); expect(mocks.user.get).toHaveBeenCalledWith('user-id', {}); @@ -177,10 +177,10 @@ describe(AlbumService.name, () => { mocks.user.get.mockResolvedValue(userStub.user1); mocks.user.getMetadata.mockResolvedValue([ { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: { albums: { - defaultAssetOrder: AssetOrder.ASC, + defaultAssetOrder: AssetOrder.Asc, }, }, }, @@ -189,7 +189,7 @@ describe(AlbumService.name, () => { await sut.create(authStub.admin, { albumName: 'Empty album', - albumUsers: [{ userId: 'user-id', role: AlbumUserRole.EDITOR }], + albumUsers: [{ userId: 'user-id', role: AlbumUserRole.Editor }], description: '', assetIds: ['123'], }); @@ -203,7 +203,7 @@ describe(AlbumService.name, () => { albumThumbnailAssetId: '123', }, ['123'], - [{ userId: 'user-id', role: AlbumUserRole.EDITOR }], + [{ userId: 'user-id', role: AlbumUserRole.Editor }], ); expect(mocks.user.get).toHaveBeenCalledWith('user-id', {}); @@ -220,7 +220,7 @@ describe(AlbumService.name, () => { await expect( sut.create(authStub.admin, { albumName: 'Empty album', - albumUsers: [{ userId: 'user-3', role: AlbumUserRole.EDITOR }], + albumUsers: [{ userId: 'user-3', role: AlbumUserRole.Editor }], }), ).rejects.toBeInstanceOf(BadRequestException); expect(mocks.user.get).toHaveBeenCalledWith('user-3', {}); @@ -262,7 +262,7 @@ describe(AlbumService.name, () => { await expect( sut.create(authStub.admin, { albumName: 'Empty album', - albumUsers: [{ userId: userStub.admin.id, role: AlbumUserRole.EDITOR }], + albumUsers: [{ userId: userStub.admin.id, role: AlbumUserRole.Editor }], }), ).rejects.toBeInstanceOf(BadRequestException); expect(mocks.album.create).not.toHaveBeenCalled(); @@ -404,7 +404,7 @@ describe(AlbumService.name, () => { mocks.albumUser.create.mockResolvedValue({ usersId: userStub.user2.id, albumsId: albumStub.sharedWithAdmin.id, - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, }); await sut.addUsers(authStub.user1, albumStub.sharedWithAdmin.id, { albumUsers: [{ userId: authStub.user2.user.id }], @@ -512,11 +512,11 @@ describe(AlbumService.name, () => { mocks.albumUser.update.mockResolvedValue(null as any); await sut.updateUser(authStub.user1, albumStub.sharedWithAdmin.id, userStub.admin.id, { - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, }); expect(mocks.albumUser.update).toHaveBeenCalledWith( { albumsId: albumStub.sharedWithAdmin.id, usersId: userStub.admin.id }, - { role: AlbumUserRole.EDITOR }, + { role: AlbumUserRole.Editor }, ); }); }); @@ -585,7 +585,7 @@ describe(AlbumService.name, () => { expect(mocks.access.album.checkSharedAlbumAccess).toHaveBeenCalledWith( authStub.user1.user.id, new Set(['album-123']), - AlbumUserRole.VIEWER, + AlbumUserRole.Viewer, ); }); @@ -596,7 +596,7 @@ describe(AlbumService.name, () => { expect(mocks.access.album.checkSharedAlbumAccess).toHaveBeenCalledWith( authStub.admin.user.id, new Set(['album-123']), - AlbumUserRole.VIEWER, + AlbumUserRole.Viewer, ); }); }); diff --git a/server/src/services/album.service.ts b/server/src/services/album.service.ts index 9008685e81..e4f2a44bfd 100644 --- a/server/src/services/album.service.ts +++ b/server/src/services/album.service.ts @@ -71,7 +71,7 @@ export class AlbumService extends BaseService { } async get(auth: AuthDto, id: string, dto: AlbumInfoDto): Promise { - await this.requireAccess({ auth, permission: Permission.ALBUM_READ, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AlbumRead, ids: [id] }); await this.albumRepository.updateThumbnails(); const withAssets = dto.withoutAssets === undefined ? true : !dto.withoutAssets; const album = await this.findOrFail(id, { withAssets }); @@ -102,7 +102,7 @@ export class AlbumService extends BaseService { const allowedAssetIdsSet = await this.checkAccess({ auth, - permission: Permission.ASSET_SHARE, + permission: Permission.AssetShare, ids: dto.assetIds || [], }); const assetIds = [...allowedAssetIdsSet].map((id) => id); @@ -129,7 +129,7 @@ export class AlbumService extends BaseService { } async update(auth: AuthDto, id: string, dto: UpdateAlbumDto): Promise { - await this.requireAccess({ auth, permission: Permission.ALBUM_UPDATE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AlbumUpdate, ids: [id] }); const album = await this.findOrFail(id, { withAssets: true }); @@ -152,13 +152,13 @@ export class AlbumService extends BaseService { } async delete(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.ALBUM_DELETE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AlbumDelete, ids: [id] }); await this.albumRepository.delete(id); } async addAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise { const album = await this.findOrFail(id, { withAssets: false }); - await this.requireAccess({ auth, permission: Permission.ALBUM_ADD_ASSET, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AlbumAddAsset, ids: [id] }); const results = await addAssets( auth, @@ -187,13 +187,13 @@ export class AlbumService extends BaseService { } async removeAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise { - await this.requireAccess({ auth, permission: Permission.ALBUM_REMOVE_ASSET, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AlbumRemoveAsset, ids: [id] }); const album = await this.findOrFail(id, { withAssets: false }); const results = await removeAssets( auth, { access: this.accessRepository, bulk: this.albumRepository }, - { parentId: id, assetIds: dto.ids, canAlwaysRemove: Permission.ALBUM_DELETE }, + { parentId: id, assetIds: dto.ids, canAlwaysRemove: Permission.AlbumDelete }, ); const removedIds = results.filter(({ success }) => success).map(({ id }) => id); @@ -205,7 +205,7 @@ export class AlbumService extends BaseService { } async addUsers(auth: AuthDto, id: string, { albumUsers }: AddUsersDto): Promise { - await this.requireAccess({ auth, permission: Permission.ALBUM_SHARE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] }); const album = await this.findOrFail(id, { withAssets: false }); @@ -249,14 +249,14 @@ export class AlbumService extends BaseService { // non-admin can remove themselves if (auth.user.id !== userId) { - await this.requireAccess({ auth, permission: Permission.ALBUM_SHARE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] }); } await this.albumUserRepository.delete({ albumsId: id, usersId: userId }); } async updateUser(auth: AuthDto, id: string, userId: string, dto: UpdateAlbumUserDto): Promise { - await this.requireAccess({ auth, permission: Permission.ALBUM_SHARE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] }); await this.albumUserRepository.update({ albumsId: id, usersId: userId }, { role: dto.role }); } diff --git a/server/src/services/api-key.service.spec.ts b/server/src/services/api-key.service.spec.ts index 3448b4330f..fffe7bb536 100644 --- a/server/src/services/api-key.service.spec.ts +++ b/server/src/services/api-key.service.spec.ts @@ -15,7 +15,7 @@ describe(ApiKeyService.name, () => { describe('create', () => { it('should create a new key', async () => { const auth = factory.auth(); - const apiKey = factory.apiKey({ userId: auth.user.id, permissions: [Permission.ALL] }); + const apiKey = factory.apiKey({ userId: auth.user.id, permissions: [Permission.All] }); const key = 'super-secret'; mocks.crypto.randomBytesAsText.mockReturnValue(key); @@ -41,12 +41,12 @@ describe(ApiKeyService.name, () => { mocks.crypto.randomBytesAsText.mockReturnValue(key); mocks.apiKey.create.mockResolvedValue(apiKey); - await sut.create(auth, { permissions: [Permission.ALL] }); + await sut.create(auth, { permissions: [Permission.All] }); expect(mocks.apiKey.create).toHaveBeenCalledWith({ key: 'super-secret (hashed)', name: 'API Key', - permissions: [Permission.ALL], + permissions: [Permission.All], userId: auth.user.id, }); expect(mocks.crypto.randomBytesAsText).toHaveBeenCalled(); @@ -54,9 +54,9 @@ describe(ApiKeyService.name, () => { }); it('should throw an error if the api key does not have sufficient permissions', async () => { - const auth = factory.auth({ apiKey: { permissions: [Permission.ASSET_READ] } }); + const auth = factory.auth({ apiKey: { permissions: [Permission.AssetRead] } }); - await expect(sut.create(auth, { permissions: [Permission.ASSET_UPDATE] })).rejects.toBeInstanceOf( + await expect(sut.create(auth, { permissions: [Permission.AssetUpdate] })).rejects.toBeInstanceOf( BadRequestException, ); }); @@ -69,7 +69,7 @@ describe(ApiKeyService.name, () => { mocks.apiKey.getById.mockResolvedValue(void 0); - await expect(sut.update(auth, id, { name: 'New Name', permissions: [Permission.ALL] })).rejects.toBeInstanceOf( + await expect(sut.update(auth, id, { name: 'New Name', permissions: [Permission.All] })).rejects.toBeInstanceOf( BadRequestException, ); @@ -84,18 +84,18 @@ describe(ApiKeyService.name, () => { mocks.apiKey.getById.mockResolvedValue(apiKey); mocks.apiKey.update.mockResolvedValue(apiKey); - await sut.update(auth, apiKey.id, { name: newName, permissions: [Permission.ALL] }); + await sut.update(auth, apiKey.id, { name: newName, permissions: [Permission.All] }); expect(mocks.apiKey.update).toHaveBeenCalledWith(auth.user.id, apiKey.id, { name: newName, - permissions: [Permission.ALL], + permissions: [Permission.All], }); }); it('should update permissions', async () => { const auth = factory.auth(); const apiKey = factory.apiKey({ userId: auth.user.id }); - const newPermissions = [Permission.ACTIVITY_CREATE, Permission.ACTIVITY_READ, Permission.ACTIVITY_UPDATE]; + const newPermissions = [Permission.ActivityCreate, Permission.ActivityRead, Permission.ActivityUpdate]; mocks.apiKey.getById.mockResolvedValue(apiKey); mocks.apiKey.update.mockResolvedValue(apiKey); diff --git a/server/src/services/asset-media.service.spec.ts b/server/src/services/asset-media.service.spec.ts index bb8f7115b8..c881ebb497 100644 --- a/server/src/services/asset-media.service.spec.ts +++ b/server/src/services/asset-media.service.spec.ts @@ -157,7 +157,7 @@ const assetEntity = Object.freeze({ ownerId: 'user_id_1', deviceAssetId: 'device_asset_id_1', deviceId: 'device_id_1', - type: AssetType.VIDEO, + type: AssetType.Video, originalPath: 'fake_path/asset_1.jpeg', fileModifiedAt: new Date('2022-06-19T23:41:36.910Z'), fileCreatedAt: new Date('2022-06-19T23:41:36.910Z'), @@ -177,7 +177,7 @@ const assetEntity = Object.freeze({ const existingAsset = Object.freeze({ ...assetEntity, duration: null, - type: AssetType.IMAGE, + type: AssetType.Image, checksum: Buffer.from('_getExistingAsset', 'utf8'), libraryId: 'libraryId', originalFileName: 'existing-filename.jpeg', @@ -384,7 +384,7 @@ describe(AssetMediaService.name, () => { }); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.DELETE_FILES, + name: JobName.DeleteFiles, data: { files: ['fake_path/asset_1.jpeg', undefined] }, }); expect(mocks.user.updateUsage).not.toHaveBeenCalled(); @@ -409,7 +409,7 @@ describe(AssetMediaService.name, () => { ); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.DELETE_FILES, + name: JobName.DeleteFiles, data: { files: ['fake_path/asset_1.jpeg', undefined] }, }); expect(mocks.user.updateUsage).not.toHaveBeenCalled(); @@ -437,7 +437,7 @@ describe(AssetMediaService.name, () => { it('should hide the linked motion asset', async () => { mocks.asset.getById.mockResolvedValueOnce({ ...assetStub.livePhotoMotionAsset, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }); mocks.asset.create.mockResolvedValueOnce(assetStub.livePhotoStillAsset); @@ -455,7 +455,7 @@ describe(AssetMediaService.name, () => { expect(mocks.asset.getById).toHaveBeenCalledWith('live-photo-motion-asset'); expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'live-photo-motion-asset', - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, }); }); @@ -506,7 +506,7 @@ describe(AssetMediaService.name, () => { new ImmichFileResponse({ path: '/original/path.jpg', contentType: 'image/jpeg', - cacheControl: CacheControl.PRIVATE_WITH_CACHE, + cacheControl: CacheControl.PrivateWithCache, }), ); }); @@ -546,7 +546,7 @@ describe(AssetMediaService.name, () => { { id: '42', path: '/path/to/preview', - type: AssetFileType.THUMBNAIL, + type: AssetFileType.Thumbnail, }, ], }); @@ -563,7 +563,7 @@ describe(AssetMediaService.name, () => { { id: '42', path: '/path/to/preview.jpg', - type: AssetFileType.PREVIEW, + type: AssetFileType.Preview, }, ], }); @@ -573,7 +573,7 @@ describe(AssetMediaService.name, () => { ).resolves.toEqual( new ImmichFileResponse({ path: '/path/to/preview.jpg', - cacheControl: CacheControl.PRIVATE_WITH_CACHE, + cacheControl: CacheControl.PrivateWithCache, contentType: 'image/jpeg', fileName: 'asset-id_thumbnail.jpg', }), @@ -588,7 +588,7 @@ describe(AssetMediaService.name, () => { ).resolves.toEqual( new ImmichFileResponse({ path: '/uploads/user-id/thumbs/path.jpg', - cacheControl: CacheControl.PRIVATE_WITH_CACHE, + cacheControl: CacheControl.PrivateWithCache, contentType: 'image/jpeg', fileName: 'asset-id_preview.jpg', }), @@ -603,7 +603,7 @@ describe(AssetMediaService.name, () => { ).resolves.toEqual( new ImmichFileResponse({ path: '/uploads/user-id/webp/path.ext', - cacheControl: CacheControl.PRIVATE_WITH_CACHE, + cacheControl: CacheControl.PrivateWithCache, contentType: 'application/octet-stream', fileName: 'asset-id_thumbnail.ext', }), @@ -640,7 +640,7 @@ describe(AssetMediaService.name, () => { await expect(sut.playbackVideo(authStub.admin, assetStub.hasEncodedVideo.id)).resolves.toEqual( new ImmichFileResponse({ path: assetStub.hasEncodedVideo.encodedVideoPath!, - cacheControl: CacheControl.PRIVATE_WITH_CACHE, + cacheControl: CacheControl.PrivateWithCache, contentType: 'video/mp4', }), ); @@ -653,7 +653,7 @@ describe(AssetMediaService.name, () => { await expect(sut.playbackVideo(authStub.admin, assetStub.video.id)).resolves.toEqual( new ImmichFileResponse({ path: assetStub.video.originalPath, - cacheControl: CacheControl.PRIVATE_WITH_CACHE, + cacheControl: CacheControl.PrivateWithCache, contentType: 'application/octet-stream', }), ); @@ -723,7 +723,7 @@ describe(AssetMediaService.name, () => { expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], { deletedAt: expect.any(Date), - status: AssetStatus.TRASHED, + status: AssetStatus.Trashed, }); expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size); expect(mocks.storage.utimes).toHaveBeenCalledWith( @@ -754,7 +754,7 @@ describe(AssetMediaService.name, () => { expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], { deletedAt: expect.any(Date), - status: AssetStatus.TRASHED, + status: AssetStatus.Trashed, }); expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size); expect(mocks.storage.utimes).toHaveBeenCalledWith( @@ -783,7 +783,7 @@ describe(AssetMediaService.name, () => { expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], { deletedAt: expect.any(Date), - status: AssetStatus.TRASHED, + status: AssetStatus.Trashed, }); expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size); expect(mocks.storage.utimes).toHaveBeenCalledWith( @@ -815,7 +815,7 @@ describe(AssetMediaService.name, () => { expect(mocks.asset.create).not.toHaveBeenCalled(); expect(mocks.asset.updateAll).not.toHaveBeenCalled(); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.DELETE_FILES, + name: JobName.DeleteFiles, data: { files: [updatedFile.originalPath, undefined] }, }); expect(mocks.user.updateUsage).not.toHaveBeenCalled(); @@ -912,7 +912,7 @@ describe(AssetMediaService.name, () => { await sut.onUploadError(request, file); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.DELETE_FILES, + name: JobName.DeleteFiles, data: { files: ['upload/upload/user-id/ra/nd/random-uuid.jpg'] }, }); }); diff --git a/server/src/services/asset-media.service.ts b/server/src/services/asset-media.service.ts index 6fc438481d..127fdbe8eb 100644 --- a/server/src/services/asset-media.service.ts +++ b/server/src/services/asset-media.service.ts @@ -106,9 +106,9 @@ export class AssetMediaService extends BaseService { getUploadFolder({ auth, fieldName, file }: UploadRequest): string { auth = requireUploadAccess(auth); - let folder = StorageCore.getNestedFolder(StorageFolder.UPLOAD, auth.user.id, file.uuid); + let folder = StorageCore.getNestedFolder(StorageFolder.Upload, auth.user.id, file.uuid); if (fieldName === UploadFieldName.PROFILE_DATA) { - folder = StorageCore.getFolderLocation(StorageFolder.PROFILE, auth.user.id); + folder = StorageCore.getFolderLocation(StorageFolder.Profile, auth.user.id); } this.storageRepository.mkdirSync(folder); @@ -121,7 +121,7 @@ export class AssetMediaService extends BaseService { const uploadFolder = this.getUploadFolder(asRequest(request, file)); const uploadPath = `${uploadFolder}/${uploadFilename}`; - await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [uploadPath] } }); + await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files: [uploadPath] } }); } async uploadAsset( @@ -133,7 +133,7 @@ export class AssetMediaService extends BaseService { try { await this.requireAccess({ auth, - permission: Permission.ASSET_UPLOAD, + permission: Permission.AssetUpload, // do not need an id here, but the interface requires it ids: [auth.user.id], }); @@ -164,7 +164,7 @@ export class AssetMediaService extends BaseService { sidecarFile?: UploadFile, ): Promise { try { - await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: [id] }); const asset = await this.assetRepository.getById(id); if (!asset) { @@ -179,7 +179,7 @@ export class AssetMediaService extends BaseService { // but the local variable holds the original file data paths. const copiedPhoto = await this.createCopy(asset); // and immediate trash it - await this.assetRepository.updateAll([copiedPhoto.id], { deletedAt: new Date(), status: AssetStatus.TRASHED }); + await this.assetRepository.updateAll([copiedPhoto.id], { deletedAt: new Date(), status: AssetStatus.Trashed }); await this.eventRepository.emit('AssetTrash', { assetId: copiedPhoto.id, userId: auth.user.id }); await this.userRepository.updateUsage(auth.user.id, file.size); @@ -191,14 +191,14 @@ export class AssetMediaService extends BaseService { } async downloadOriginal(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.ASSET_DOWNLOAD, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AssetDownload, ids: [id] }); const asset = await this.findOrFail(id); return new ImmichFileResponse({ path: asset.originalPath, contentType: mimeTypes.lookup(asset.originalPath), - cacheControl: CacheControl.PRIVATE_WITH_CACHE, + cacheControl: CacheControl.PrivateWithCache, }); } @@ -207,7 +207,7 @@ export class AssetMediaService extends BaseService { id: string, dto: AssetMediaOptionsDto, ): Promise { - await this.requireAccess({ auth, permission: Permission.ASSET_VIEW, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AssetView, ids: [id] }); const asset = await this.findOrFail(id); const size = dto.size ?? AssetMediaSize.THUMBNAIL; @@ -240,16 +240,16 @@ export class AssetMediaService extends BaseService { fileName, path: filepath, contentType: mimeTypes.lookup(filepath), - cacheControl: CacheControl.PRIVATE_WITH_CACHE, + cacheControl: CacheControl.PrivateWithCache, }); } async playbackVideo(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.ASSET_VIEW, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AssetView, ids: [id] }); const asset = await this.findOrFail(id); - if (asset.type !== AssetType.VIDEO) { + if (asset.type !== AssetType.Video) { throw new BadRequestException('Asset is not a video'); } @@ -258,7 +258,7 @@ export class AssetMediaService extends BaseService { return new ImmichFileResponse({ path: filepath, contentType: mimeTypes.lookup(filepath), - cacheControl: CacheControl.PRIVATE_WITH_CACHE, + cacheControl: CacheControl.PrivateWithCache, }); } @@ -312,7 +312,7 @@ export class AssetMediaService extends BaseService { ): Promise { // clean up files await this.jobRepository.queue({ - name: JobName.DELETE_FILES, + name: JobName.DeleteFiles, data: { files: [file.originalPath, sidecarFile?.originalPath] }, }); @@ -365,7 +365,7 @@ export class AssetMediaService extends BaseService { await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt)); await this.assetRepository.upsertExif({ assetId, fileSizeInByte: file.size }); await this.jobRepository.queue({ - name: JobName.METADATA_EXTRACTION, + name: JobName.MetadataExtraction, data: { id: assetId, source: 'upload' }, }); } @@ -394,7 +394,7 @@ export class AssetMediaService extends BaseService { const { size } = await this.storageRepository.stat(created.originalPath); await this.assetRepository.upsertExif({ assetId: created.id, fileSizeInByte: size }); - await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: created.id, source: 'copy' } }); + await this.jobRepository.queue({ name: JobName.MetadataExtraction, data: { id: created.id, source: 'copy' } }); return created; } @@ -416,7 +416,7 @@ export class AssetMediaService extends BaseService { type: mimeTypes.assetType(file.originalPath), isFavorite: dto.isFavorite, duration: dto.duration || null, - visibility: dto.visibility ?? AssetVisibility.TIMELINE, + visibility: dto.visibility ?? AssetVisibility.Timeline, livePhotoVideoId: dto.livePhotoVideoId, originalFileName: dto.filename || file.originalName, sidecarPath: sidecarFile?.originalPath, @@ -427,7 +427,7 @@ export class AssetMediaService extends BaseService { } await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt)); await this.assetRepository.upsertExif({ assetId: asset.id, fileSizeInByte: file.size }); - await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id, source: 'upload' } }); + await this.jobRepository.queue({ name: JobName.MetadataExtraction, data: { id: asset.id, source: 'upload' } }); return asset; } diff --git a/server/src/services/asset.service.spec.ts b/server/src/services/asset.service.spec.ts index 65b14ca2da..606c93eab8 100755 --- a/server/src/services/asset.service.spec.ts +++ b/server/src/services/asset.service.spec.ts @@ -13,10 +13,10 @@ import { factory } from 'test/small.factory'; import { makeStream, newTestService, ServiceMocks } from 'test/utils'; const stats: AssetStats = { - [AssetType.IMAGE]: 10, - [AssetType.VIDEO]: 23, - [AssetType.AUDIO]: 0, - [AssetType.OTHER]: 0, + [AssetType.Image]: 10, + [AssetType.Video]: 23, + [AssetType.Audio]: 0, + [AssetType.Other]: 0, }; const statResponse: AssetStatsResponseDto = { @@ -46,21 +46,21 @@ describe(AssetService.name, () => { describe('getStatistics', () => { it('should get the statistics for a user, excluding archived assets', async () => { mocks.asset.getStatistics.mockResolvedValue(stats); - await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.TIMELINE })).resolves.toEqual( + await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.Timeline })).resolves.toEqual( statResponse, ); expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, { - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }); }); it('should get the statistics for a user for archived assets', async () => { mocks.asset.getStatistics.mockResolvedValue(stats); - await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.ARCHIVE })).resolves.toEqual( + await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.Archive })).resolves.toEqual( statResponse, ); expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, { - visibility: AssetVisibility.ARCHIVE, + visibility: AssetVisibility.Archive, }); }); @@ -202,7 +202,7 @@ describe(AssetService.name, () => { describe('update', () => { it('should require asset write access for the id', async () => { await expect( - sut.update(authStub.admin, 'asset-1', { visibility: AssetVisibility.TIMELINE }), + sut.update(authStub.admin, 'asset-1', { visibility: AssetVisibility.Timeline }), ).rejects.toBeInstanceOf(BadRequestException); expect(mocks.asset.update).not.toHaveBeenCalled(); @@ -253,7 +253,7 @@ describe(AssetService.name, () => { }); expect(mocks.asset.update).not.toHaveBeenCalledWith({ id: assetStub.livePhotoMotionAsset.id, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }); expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', { assetId: assetStub.livePhotoMotionAsset.id, @@ -277,7 +277,7 @@ describe(AssetService.name, () => { }); expect(mocks.asset.update).not.toHaveBeenCalledWith({ id: assetStub.livePhotoMotionAsset.id, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }); expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', { assetId: assetStub.livePhotoMotionAsset.id, @@ -301,7 +301,7 @@ describe(AssetService.name, () => { }); expect(mocks.asset.update).not.toHaveBeenCalledWith({ id: assetStub.livePhotoMotionAsset.id, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }); expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', { assetId: assetStub.livePhotoMotionAsset.id, @@ -314,7 +314,7 @@ describe(AssetService.name, () => { mocks.asset.getById.mockResolvedValueOnce({ ...assetStub.livePhotoMotionAsset, ownerId: authStub.admin.user.id, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }); mocks.asset.getById.mockResolvedValueOnce(assetStub.image); mocks.asset.update.mockResolvedValue(assetStub.image); @@ -325,7 +325,7 @@ describe(AssetService.name, () => { expect(mocks.asset.update).toHaveBeenCalledWith({ id: assetStub.livePhotoMotionAsset.id, - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, }); expect(mocks.event.emit).toHaveBeenCalledWith('AssetHide', { assetId: assetStub.livePhotoMotionAsset.id, @@ -392,10 +392,10 @@ describe(AssetService.name, () => { it('should update all assets', async () => { mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2'])); - await sut.updateAll(authStub.admin, { ids: ['asset-1', 'asset-2'], visibility: AssetVisibility.ARCHIVE }); + await sut.updateAll(authStub.admin, { ids: ['asset-1', 'asset-2'], visibility: AssetVisibility.Archive }); expect(mocks.asset.updateAll).toHaveBeenCalledWith(['asset-1', 'asset-2'], { - visibility: AssetVisibility.ARCHIVE, + visibility: AssetVisibility.Archive, }); }); @@ -428,7 +428,7 @@ describe(AssetService.name, () => { expect(mocks.asset.updateAll).toHaveBeenCalled(); expect(mocks.asset.updateAllExif).toHaveBeenCalledWith(['asset-1'], { latitude: 0, longitude: 0 }); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.SIDECAR_WRITE, data: { id: 'asset-1', latitude: 0, longitude: 0 } }, + { name: JobName.SidecarWrite, data: { id: 'asset-1', latitude: 0, longitude: 0 } }, ]); }); @@ -451,7 +451,7 @@ describe(AssetService.name, () => { longitude: 50, }); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.SIDECAR_WRITE, data: { id: 'asset-1', dateTimeOriginal, latitude: 30, longitude: 50 } }, + { name: JobName.SidecarWrite, data: { id: 'asset-1', dateTimeOriginal, latitude: 30, longitude: 50 } }, ]); }); @@ -497,7 +497,7 @@ describe(AssetService.name, () => { expect(mocks.asset.updateAll).toHaveBeenCalledWith(['asset1', 'asset2'], { deletedAt: expect.any(Date), - status: AssetStatus.TRASHED, + status: AssetStatus.Trashed, }); expect(mocks.job.queue.mock.calls).toEqual([]); }); @@ -518,11 +518,11 @@ describe(AssetService.name, () => { mocks.assetJob.streamForDeletedJob.mockReturnValue(makeStream([asset])); mocks.systemMetadata.get.mockResolvedValue({ trash: { enabled: false } }); - await expect(sut.handleAssetDeletionCheck()).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleAssetDeletionCheck()).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.streamForDeletedJob).toHaveBeenCalledWith(new Date()); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.ASSET_DELETION, data: { id: asset.id, deleteOnDisk: true } }, + { name: JobName.AssetDeletion, data: { id: asset.id, deleteOnDisk: true } }, ]); }); @@ -532,11 +532,11 @@ describe(AssetService.name, () => { mocks.assetJob.streamForDeletedJob.mockReturnValue(makeStream([asset])); mocks.systemMetadata.get.mockResolvedValue({ trash: { enabled: true, days: 7 } }); - await expect(sut.handleAssetDeletionCheck()).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleAssetDeletionCheck()).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.streamForDeletedJob).toHaveBeenCalledWith(DateTime.now().minus({ days: 7 }).toJSDate()); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.ASSET_DELETION, data: { id: asset.id, deleteOnDisk: true } }, + { name: JobName.AssetDeletion, data: { id: asset.id, deleteOnDisk: true } }, ]); }); }); @@ -552,7 +552,7 @@ describe(AssetService.name, () => { expect(mocks.job.queue.mock.calls).toEqual([ [ { - name: JobName.DELETE_FILES, + name: JobName.DeleteFiles, data: { files: [ '/uploads/user-id/webp/path.ext', @@ -606,7 +606,7 @@ describe(AssetService.name, () => { expect(mocks.job.queue.mock.calls).toEqual([ [ { - name: JobName.ASSET_DELETION, + name: JobName.AssetDeletion, data: { id: assetStub.livePhotoMotionAsset.id, deleteOnDisk: true, @@ -615,7 +615,7 @@ describe(AssetService.name, () => { ], [ { - name: JobName.DELETE_FILES, + name: JobName.DeleteFiles, data: { files: [ '/uploads/user-id/webp/path.ext', @@ -643,7 +643,7 @@ describe(AssetService.name, () => { expect(mocks.job.queue.mock.calls).toEqual([ [ { - name: JobName.DELETE_FILES, + name: JobName.DeleteFiles, data: { files: [ '/uploads/user-id/webp/path.ext', @@ -668,7 +668,7 @@ describe(AssetService.name, () => { it('should fail if asset could not be found', async () => { mocks.assetJob.getForAssetDeletion.mockResolvedValue(void 0); await expect(sut.handleAssetDeletion({ id: assetStub.image.id, deleteOnDisk: true })).resolves.toBe( - JobStatus.FAILED, + JobStatus.Failed, ); }); }); @@ -679,7 +679,7 @@ describe(AssetService.name, () => { await sut.run(authStub.admin, { assetIds: ['asset-1'], name: AssetJobName.REFRESH_FACES }); - expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.FACE_DETECTION, data: { id: 'asset-1' } }]); + expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.FaceDetection, data: { id: 'asset-1' } }]); }); it('should run the refresh metadata job', async () => { @@ -687,7 +687,7 @@ describe(AssetService.name, () => { await sut.run(authStub.admin, { assetIds: ['asset-1'], name: AssetJobName.REFRESH_METADATA }); - expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.METADATA_EXTRACTION, data: { id: 'asset-1' } }]); + expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.MetadataExtraction, data: { id: 'asset-1' } }]); }); it('should run the refresh thumbnails job', async () => { @@ -695,7 +695,7 @@ describe(AssetService.name, () => { await sut.run(authStub.admin, { assetIds: ['asset-1'], name: AssetJobName.REGENERATE_THUMBNAIL }); - expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.GENERATE_THUMBNAILS, data: { id: 'asset-1' } }]); + expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.GenerateThumbnails, data: { id: 'asset-1' } }]); }); it('should run the transcode video', async () => { @@ -703,7 +703,7 @@ describe(AssetService.name, () => { await sut.run(authStub.admin, { assetIds: ['asset-1'], name: AssetJobName.TRANSCODE_VIDEO }); - expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.VIDEO_CONVERSION, data: { id: 'asset-1' } }]); + expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.VideoConversation, data: { id: 'asset-1' } }]); }); }); diff --git a/server/src/services/asset.service.ts b/server/src/services/asset.service.ts index 351e8827dd..d9b0527d5b 100644 --- a/server/src/services/asset.service.ts +++ b/server/src/services/asset.service.ts @@ -23,7 +23,7 @@ import { getAssetFiles, getMyPartnerIds, onAfterUnlink, onBeforeLink, onBeforeUn @Injectable() export class AssetService extends BaseService { async getStatistics(auth: AuthDto, dto: AssetStatsDto) { - if (dto.visibility === AssetVisibility.LOCKED) { + if (dto.visibility === AssetVisibility.Locked) { requireElevatedPermission(auth); } @@ -46,7 +46,7 @@ export class AssetService extends BaseService { } async get(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.ASSET_READ, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] }); const asset = await this.assetRepository.getById(id, { exifInfo: true, @@ -78,7 +78,7 @@ export class AssetService extends BaseService { } async update(auth: AuthDto, id: string, dto: UpdateAssetDto): Promise { - await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: [id] }); const { description, dateTimeOriginal, latitude, longitude, rating, ...rest } = dto; const repos = { asset: this.assetRepository, event: this.eventRepository }; @@ -114,7 +114,7 @@ export class AssetService extends BaseService { async updateAll(auth: AuthDto, dto: AssetBulkUpdateDto): Promise { const { ids, description, dateTimeOriginal, latitude, longitude, ...options } = dto; - await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids }); + await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids }); if ( description !== undefined || @@ -125,7 +125,7 @@ export class AssetService extends BaseService { await this.assetRepository.updateAllExif(ids, { description, dateTimeOriginal, latitude, longitude }); await this.jobRepository.queueAll( ids.map((id) => ({ - name: JobName.SIDECAR_WRITE, + name: JobName.SidecarWrite, data: { id, description, dateTimeOriginal, latitude, longitude }, })), ); @@ -139,13 +139,13 @@ export class AssetService extends BaseService { ) { await this.assetRepository.updateAll(ids, options); - if (options.visibility === AssetVisibility.LOCKED) { + if (options.visibility === AssetVisibility.Locked) { await this.albumRepository.removeAssetsFromAll(ids); } } } - @OnJob({ name: JobName.ASSET_DELETION_CHECK, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.AssetDeletionCheck, queue: QueueName.BackgroundTask }) async handleAssetDeletionCheck(): Promise { const config = await this.getConfig({ withCache: false }); const trashedDays = config.trash.enabled ? config.trash.days : 0; @@ -158,7 +158,7 @@ export class AssetService extends BaseService { if (chunk.length > 0) { await this.jobRepository.queueAll( chunk.map(({ id, isOffline }) => ({ - name: JobName.ASSET_DELETION, + name: JobName.AssetDeletion, data: { id, deleteOnDisk: !isOffline }, })), ); @@ -176,17 +176,17 @@ export class AssetService extends BaseService { await queueChunk(); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.ASSET_DELETION, queue: QueueName.BACKGROUND_TASK }) - async handleAssetDeletion(job: JobOf): Promise { + @OnJob({ name: JobName.AssetDeletion, queue: QueueName.BackgroundTask }) + async handleAssetDeletion(job: JobOf): Promise { const { id, deleteOnDisk } = job; const asset = await this.assetJobRepository.getForAssetDeletion(id); if (!asset) { - return JobStatus.FAILED; + return JobStatus.Failed; } // Replace the parent of the stack children with a new asset @@ -215,7 +215,7 @@ export class AssetService extends BaseService { const count = await this.assetRepository.getLivePhotoCount(asset.livePhotoVideoId); if (count === 0) { await this.jobRepository.queue({ - name: JobName.ASSET_DELETION, + name: JobName.AssetDeletion, data: { id: asset.livePhotoVideoId, deleteOnDisk }, }); } @@ -228,18 +228,18 @@ export class AssetService extends BaseService { files.push(asset.sidecarPath, asset.originalPath); } - await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files } }); + await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files } }); - return JobStatus.SUCCESS; + return JobStatus.Success; } async deleteAll(auth: AuthDto, dto: AssetBulkDeleteDto): Promise { const { ids, force } = dto; - await this.requireAccess({ auth, permission: Permission.ASSET_DELETE, ids }); + await this.requireAccess({ auth, permission: Permission.AssetDelete, ids }); await this.assetRepository.updateAll(ids, { deletedAt: new Date(), - status: force ? AssetStatus.DELETED : AssetStatus.TRASHED, + status: force ? AssetStatus.Deleted : AssetStatus.Trashed, }); await this.eventRepository.emit(force ? 'AssetDeleteAll' : 'AssetTrashAll', { assetIds: ids, @@ -248,29 +248,29 @@ export class AssetService extends BaseService { } async run(auth: AuthDto, dto: AssetJobsDto) { - await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids: dto.assetIds }); + await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: dto.assetIds }); const jobs: JobItem[] = []; for (const id of dto.assetIds) { switch (dto.name) { case AssetJobName.REFRESH_FACES: { - jobs.push({ name: JobName.FACE_DETECTION, data: { id } }); + jobs.push({ name: JobName.FaceDetection, data: { id } }); break; } case AssetJobName.REFRESH_METADATA: { - jobs.push({ name: JobName.METADATA_EXTRACTION, data: { id } }); + jobs.push({ name: JobName.MetadataExtraction, data: { id } }); break; } case AssetJobName.REGENERATE_THUMBNAIL: { - jobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id } }); + jobs.push({ name: JobName.GenerateThumbnails, data: { id } }); break; } case AssetJobName.TRANSCODE_VIDEO: { - jobs.push({ name: JobName.VIDEO_CONVERSION, data: { id } }); + jobs.push({ name: JobName.VideoConversation, data: { id } }); break; } } @@ -292,7 +292,7 @@ export class AssetService extends BaseService { const writes = _.omitBy({ description, dateTimeOriginal, latitude, longitude, rating }, _.isUndefined); if (Object.keys(writes).length > 0) { await this.assetRepository.upsertExif({ assetId: id, ...writes }); - await this.jobRepository.queue({ name: JobName.SIDECAR_WRITE, data: { id, ...writes } }); + await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id, ...writes } }); } } } diff --git a/server/src/services/audit.service.spec.ts b/server/src/services/audit.service.spec.ts index 381b2ec7e8..7363ea74e1 100644 --- a/server/src/services/audit.service.spec.ts +++ b/server/src/services/audit.service.spec.ts @@ -18,7 +18,7 @@ describe(AuditService.name, () => { it('should delete old audit entries', async () => { mocks.audit.removeBefore.mockResolvedValue(); - await expect(sut.handleCleanup()).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleCleanup()).resolves.toBe(JobStatus.Success); expect(mocks.audit.removeBefore).toHaveBeenCalledWith(expect.any(Date)); }); diff --git a/server/src/services/audit.service.ts b/server/src/services/audit.service.ts index 7c9a070dd0..eaa48de1f4 100644 --- a/server/src/services/audit.service.ts +++ b/server/src/services/audit.service.ts @@ -7,9 +7,9 @@ import { BaseService } from 'src/services/base.service'; @Injectable() export class AuditService extends BaseService { - @OnJob({ name: JobName.CLEAN_OLD_AUDIT_LOGS, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.CleanOldAuditLogs, queue: QueueName.BackgroundTask }) async handleCleanup(): Promise { await this.auditRepository.removeBefore(DateTime.now().minus(AUDIT_LOG_MAX_DURATION).toJSDate()); - return JobStatus.SUCCESS; + return JobStatus.Success; } } diff --git a/server/src/services/auth.service.spec.ts b/server/src/services/auth.service.spec.ts index 93bd265ba0..f52fd9dd81 100644 --- a/server/src/services/auth.service.spec.ts +++ b/server/src/services/auth.service.spec.ts @@ -154,7 +154,7 @@ describe(AuthService.name, () => { mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled); - await expect(sut.logout(auth, AuthType.OAUTH)).resolves.toEqual({ + await expect(sut.logout(auth, AuthType.OAuth)).resolves.toEqual({ successful: true, redirectUri: 'http://end-session-endpoint', }); @@ -163,7 +163,7 @@ describe(AuthService.name, () => { it('should return the default redirect', async () => { const auth = factory.auth(); - await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({ + await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({ successful: true, redirectUri: '/auth/login?autoLaunch=0', }); @@ -173,7 +173,7 @@ describe(AuthService.name, () => { const auth = { user: { id: '123' }, session: { id: 'token123' } } as AuthDto; mocks.session.delete.mockResolvedValue(); - await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({ + await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({ successful: true, redirectUri: '/auth/login?autoLaunch=0', }); @@ -185,7 +185,7 @@ describe(AuthService.name, () => { it('should return the default redirect if auth type is OAUTH but oauth is not enabled', async () => { const auth = { user: { id: '123' } } as AuthDto; - await expect(sut.logout(auth, AuthType.OAUTH)).resolves.toEqual({ + await expect(sut.logout(auth, AuthType.OAuth)).resolves.toEqual({ successful: true, redirectUri: '/auth/login?autoLaunch=0', }); @@ -463,7 +463,7 @@ describe(AuthService.name, () => { sut.authenticate({ headers: { 'x-api-key': 'auth_token' }, queryParams: {}, - metadata: { adminRoute: false, sharedLinkRoute: false, uri: 'test', permission: Permission.ASSET_READ }, + metadata: { adminRoute: false, sharedLinkRoute: false, uri: 'test', permission: Permission.AssetRead }, }), ).rejects.toBeInstanceOf(ForbiddenException); }); diff --git a/server/src/services/auth.service.ts b/server/src/services/auth.service.ts index a7b0cb3259..00b7c1e9aa 100644 --- a/server/src/services/auth.service.ts +++ b/server/src/services/auth.service.ts @@ -194,13 +194,13 @@ export class AuthService extends BaseService { } private async validate({ headers, queryParams }: Omit): Promise { - const shareKey = (headers[ImmichHeader.SHARED_LINK_KEY] || queryParams[ImmichQuery.SHARED_LINK_KEY]) as string; - const session = (headers[ImmichHeader.USER_TOKEN] || - headers[ImmichHeader.SESSION_TOKEN] || - queryParams[ImmichQuery.SESSION_KEY] || + const shareKey = (headers[ImmichHeader.SharedLinkKey] || queryParams[ImmichQuery.SharedLinkKey]) as string; + const session = (headers[ImmichHeader.UserToken] || + headers[ImmichHeader.SessionToken] || + queryParams[ImmichQuery.SessionKey] || this.getBearerToken(headers) || this.getCookieToken(headers)) as string; - const apiKey = (headers[ImmichHeader.API_KEY] || queryParams[ImmichQuery.API_KEY]) as string; + const apiKey = (headers[ImmichHeader.ApiKey] || queryParams[ImmichQuery.ApiKey]) as string; if (shareKey) { return this.validateSharedLink(shareKey); @@ -321,7 +321,7 @@ export class AuthService extends BaseService { const { contentType, data } = await this.oauthRepository.getProfilePicture(url); const extensionWithDot = mimeTypes.toExtension(contentType || 'image/jpeg') ?? 'jpg'; const profileImagePath = join( - StorageCore.getFolderLocation(StorageFolder.PROFILE, user.id), + StorageCore.getFolderLocation(StorageFolder.Profile, user.id), `${this.cryptoRepository.randomUUID()}${extensionWithDot}`, ); @@ -330,7 +330,7 @@ export class AuthService extends BaseService { await this.userRepository.update(user.id, { profileImagePath, profileChangedAt: new Date() }); if (oldPath) { - await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [oldPath] } }); + await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files: [oldPath] } }); } } catch (error: Error | any) { this.logger.warn(`Unable to sync oauth profile picture: ${error}`, error?.stack); @@ -366,7 +366,7 @@ export class AuthService extends BaseService { } private async getLogoutEndpoint(authType: AuthType): Promise { - if (authType !== AuthType.OAUTH) { + if (authType !== AuthType.OAuth) { return LOGIN_URL; } @@ -389,17 +389,17 @@ export class AuthService extends BaseService { private getCookieToken(headers: IncomingHttpHeaders): string | null { const cookies = parse(headers.cookie || ''); - return cookies[ImmichCookie.ACCESS_TOKEN] || null; + return cookies[ImmichCookie.AccessToken] || null; } private getCookieOauthState(headers: IncomingHttpHeaders): string | null { const cookies = parse(headers.cookie || ''); - return cookies[ImmichCookie.OAUTH_STATE] || null; + return cookies[ImmichCookie.OAuthState] || null; } private getCookieCodeVerifier(headers: IncomingHttpHeaders): string | null { const cookies = parse(headers.cookie || ''); - return cookies[ImmichCookie.OAUTH_CODE_VERIFIER] || null; + return cookies[ImmichCookie.OAuthCodeVerifier] || null; } async validateSharedLink(key: string | string[]): Promise { diff --git a/server/src/services/backup.service.spec.ts b/server/src/services/backup.service.spec.ts index aa72fd588a..e36f699f53 100644 --- a/server/src/services/backup.service.spec.ts +++ b/server/src/services/backup.service.spec.ts @@ -38,7 +38,7 @@ describe(BackupService.name, () => { }); it('should not initialise backup database job when running on microservices', async () => { - mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES); + mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices); await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig }); expect(mocks.cron.create).not.toHaveBeenCalled(); @@ -98,10 +98,10 @@ describe(BackupService.name, () => { await sut.cleanupDatabaseBackups(); expect(mocks.storage.unlink).toHaveBeenCalledTimes(2); expect(mocks.storage.unlink).toHaveBeenCalledWith( - `${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-123.sql.gz.tmp`, + `${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-123.sql.gz.tmp`, ); expect(mocks.storage.unlink).toHaveBeenCalledWith( - `${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-345.sql.gz.tmp`, + `${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-345.sql.gz.tmp`, ); }); @@ -111,7 +111,7 @@ describe(BackupService.name, () => { await sut.cleanupDatabaseBackups(); expect(mocks.storage.unlink).toHaveBeenCalledTimes(1); expect(mocks.storage.unlink).toHaveBeenCalledWith( - `${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz`, + `${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1.sql.gz`, ); }); @@ -125,10 +125,10 @@ describe(BackupService.name, () => { await sut.cleanupDatabaseBackups(); expect(mocks.storage.unlink).toHaveBeenCalledTimes(2); expect(mocks.storage.unlink).toHaveBeenCalledWith( - `${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz.tmp`, + `${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1.sql.gz.tmp`, ); expect(mocks.storage.unlink).toHaveBeenCalledWith( - `${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-2.sql.gz`, + `${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-2.sql.gz`, ); }); }); @@ -145,13 +145,13 @@ describe(BackupService.name, () => { it('should run a database backup successfully', async () => { const result = await sut.handleBackupDatabase(); - expect(result).toBe(JobStatus.SUCCESS); + expect(result).toBe(JobStatus.Success); expect(mocks.storage.createWriteStream).toHaveBeenCalled(); }); it('should rename file on success', async () => { const result = await sut.handleBackupDatabase(); - expect(result).toBe(JobStatus.SUCCESS); + expect(result).toBe(JobStatus.Success); expect(mocks.storage.rename).toHaveBeenCalled(); }); @@ -219,7 +219,7 @@ describe(BackupService.name, () => { mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion); const result = await sut.handleBackupDatabase(); expect(mocks.process.spawn).not.toHaveBeenCalled(); - expect(result).toBe(JobStatus.FAILED); + expect(result).toBe(JobStatus.Failed); }); }); }); diff --git a/server/src/services/backup.service.ts b/server/src/services/backup.service.ts index 79c5deee57..a5882b4f80 100644 --- a/server/src/services/backup.service.ts +++ b/server/src/services/backup.service.ts @@ -14,7 +14,7 @@ import { handlePromiseError } from 'src/utils/misc'; export class BackupService extends BaseService { private backupLock = false; - @OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.MICROSERVICES] }) + @OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] }) async onConfigInit({ newConfig: { backup: { database }, @@ -26,7 +26,7 @@ export class BackupService extends BaseService { this.cronRepository.create({ name: 'backupDatabase', expression: database.cronExpression, - onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.BACKUP_DATABASE }), this.logger), + onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.BackupDatabase }), this.logger), start: database.enabled, }); } @@ -51,7 +51,7 @@ export class BackupService extends BaseService { backup: { database: config }, } = await this.getConfig({ withCache: false }); - const backupsFolder = StorageCore.getBaseFolder(StorageFolder.BACKUPS); + const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups); const files = await this.storageRepository.readdir(backupsFolder); const failedBackups = files.filter((file) => file.match(/immich-db-backup-\d+\.sql\.gz\.tmp$/)); const backups = files @@ -68,7 +68,7 @@ export class BackupService extends BaseService { this.logger.debug(`Database Backup Cleanup Finished, deleted ${toDelete.length} backups`); } - @OnJob({ name: JobName.BACKUP_DATABASE, queue: QueueName.BACKUP_DATABASE }) + @OnJob({ name: JobName.BackupDatabase, queue: QueueName.BackupDatabase }) async handleBackupDatabase(): Promise { this.logger.debug(`Database Backup Started`); const { database } = this.configRepository.getEnv(); @@ -92,7 +92,7 @@ export class BackupService extends BaseService { databaseParams.push('--clean', '--if-exists'); const databaseVersion = await this.databaseRepository.getPostgresVersion(); const backupFilePath = path.join( - StorageCore.getBaseFolder(StorageFolder.BACKUPS), + StorageCore.getBaseFolder(StorageFolder.Backups), `immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`, ); const databaseSemver = semver.coerce(databaseVersion); @@ -100,7 +100,7 @@ export class BackupService extends BaseService { if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <18.0.0')) { this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`); - return JobStatus.FAILED; + return JobStatus.Failed; } this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`); @@ -179,6 +179,6 @@ export class BackupService extends BaseService { this.logger.log(`Database Backup Success`); await this.cleanupDatabaseBackups(); - return JobStatus.SUCCESS; + return JobStatus.Success; } } diff --git a/server/src/services/database.service.spec.ts b/server/src/services/database.service.spec.ts index 09b22dfd5e..b4022ee864 100644 --- a/server/src/services/database.service.spec.ts +++ b/server/src/services/database.service.spec.ts @@ -19,7 +19,7 @@ describe(DatabaseService.name, () => { ({ sut, mocks } = newTestService(DatabaseService)); extensionRange = '0.2.x'; - mocks.database.getVectorExtension.mockResolvedValue(DatabaseExtension.VECTORCHORD); + mocks.database.getVectorExtension.mockResolvedValue(DatabaseExtension.VectorChord); mocks.database.getExtensionVersionRange.mockReturnValue(extensionRange); versionBelowRange = '0.1.0'; @@ -28,7 +28,7 @@ describe(DatabaseService.name, () => { versionAboveRange = '0.3.0'; mocks.database.getExtensionVersions.mockResolvedValue([ { - name: DatabaseExtension.VECTORCHORD, + name: DatabaseExtension.VectorChord, installedVersion: null, availableVersion: minVersionInRange, }, @@ -49,9 +49,9 @@ describe(DatabaseService.name, () => { }); describe.each(>[ - { extension: DatabaseExtension.VECTOR, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTOR] }, - { extension: DatabaseExtension.VECTORS, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTORS] }, - { extension: DatabaseExtension.VECTORCHORD, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTORCHORD] }, + { extension: DatabaseExtension.Vector, extensionName: EXTENSION_NAMES[DatabaseExtension.Vector] }, + { extension: DatabaseExtension.Vectors, extensionName: EXTENSION_NAMES[DatabaseExtension.Vectors] }, + { extension: DatabaseExtension.VectorChord, extensionName: EXTENSION_NAMES[DatabaseExtension.VectorChord] }, ])('should work with $extensionName', ({ extension, extensionName }) => { beforeEach(() => { mocks.database.getExtensionVersions.mockResolvedValue([ @@ -292,8 +292,8 @@ describe(DatabaseService.name, () => { await expect(sut.onBootstrap()).resolves.toBeUndefined(); expect(mocks.database.reindexVectorsIfNeeded).toHaveBeenCalledExactlyOnceWith([ - VectorIndex.CLIP, - VectorIndex.FACE, + VectorIndex.Clip, + VectorIndex.Face, ]); expect(mocks.database.reindexVectorsIfNeeded).toHaveBeenCalledTimes(1); expect(mocks.database.runMigrations).toHaveBeenCalledTimes(1); @@ -306,8 +306,8 @@ describe(DatabaseService.name, () => { await expect(sut.onBootstrap()).rejects.toBeDefined(); expect(mocks.database.reindexVectorsIfNeeded).toHaveBeenCalledExactlyOnceWith([ - VectorIndex.CLIP, - VectorIndex.FACE, + VectorIndex.Clip, + VectorIndex.Face, ]); expect(mocks.database.runMigrations).not.toHaveBeenCalled(); expect(mocks.logger.fatal).not.toHaveBeenCalled(); @@ -330,7 +330,7 @@ describe(DatabaseService.name, () => { database: 'immich', }, skipMigrations: true, - vectorExtension: DatabaseExtension.VECTORS, + vectorExtension: DatabaseExtension.Vectors, }, }), ); @@ -356,12 +356,12 @@ describe(DatabaseService.name, () => { it(`should drop unused extension`, async () => { mocks.database.getExtensionVersions.mockResolvedValue([ { - name: DatabaseExtension.VECTORS, + name: DatabaseExtension.Vectors, installedVersion: minVersionInRange, availableVersion: minVersionInRange, }, { - name: DatabaseExtension.VECTORCHORD, + name: DatabaseExtension.VectorChord, installedVersion: null, availableVersion: minVersionInRange, }, @@ -369,19 +369,19 @@ describe(DatabaseService.name, () => { await expect(sut.onBootstrap()).resolves.toBeUndefined(); - expect(mocks.database.createExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VECTORCHORD); - expect(mocks.database.dropExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VECTORS); + expect(mocks.database.createExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VectorChord); + expect(mocks.database.dropExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.Vectors); }); it(`should warn if unused extension could not be dropped`, async () => { mocks.database.getExtensionVersions.mockResolvedValue([ { - name: DatabaseExtension.VECTORS, + name: DatabaseExtension.Vectors, installedVersion: minVersionInRange, availableVersion: minVersionInRange, }, { - name: DatabaseExtension.VECTORCHORD, + name: DatabaseExtension.VectorChord, installedVersion: null, availableVersion: minVersionInRange, }, @@ -390,8 +390,8 @@ describe(DatabaseService.name, () => { await expect(sut.onBootstrap()).resolves.toBeUndefined(); - expect(mocks.database.createExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VECTORCHORD); - expect(mocks.database.dropExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VECTORS); + expect(mocks.database.createExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VectorChord); + expect(mocks.database.dropExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.Vectors); expect(mocks.logger.warn).toHaveBeenCalledTimes(1); expect(mocks.logger.warn.mock.calls[0][0]).toContain('DROP EXTENSION vectors'); }); @@ -399,12 +399,12 @@ describe(DatabaseService.name, () => { it(`should not try to drop pgvector when using vectorchord`, async () => { mocks.database.getExtensionVersions.mockResolvedValue([ { - name: DatabaseExtension.VECTOR, + name: DatabaseExtension.Vector, installedVersion: minVersionInRange, availableVersion: minVersionInRange, }, { - name: DatabaseExtension.VECTORCHORD, + name: DatabaseExtension.VectorChord, installedVersion: minVersionInRange, availableVersion: minVersionInRange, }, diff --git a/server/src/services/database.service.ts b/server/src/services/database.service.ts index fd59e3aa67..e54be28fc2 100644 --- a/server/src/services/database.service.ts +++ b/server/src/services/database.service.ts @@ -100,7 +100,7 @@ export class DatabaseService extends BaseService { } try { - await this.databaseRepository.reindexVectorsIfNeeded([VectorIndex.CLIP, VectorIndex.FACE]); + await this.databaseRepository.reindexVectorsIfNeeded([VectorIndex.Clip, VectorIndex.Face]); } catch (error) { this.logger.warn( 'Could not run vector reindexing checks. If the extension was updated, please restart the Postgres instance. If you are upgrading directly from a version below 1.107.2, please upgrade to 1.107.2 first.', @@ -109,7 +109,7 @@ export class DatabaseService extends BaseService { } for (const { name: dbName, installedVersion } of extensionVersions) { - const isDepended = dbName === DatabaseExtension.VECTOR && extension === DatabaseExtension.VECTORCHORD; + const isDepended = dbName === DatabaseExtension.Vector && extension === DatabaseExtension.VectorChord; if (dbName !== extension && installedVersion && !isDepended) { await this.dropExtension(dbName); } @@ -120,8 +120,8 @@ export class DatabaseService extends BaseService { await this.databaseRepository.runMigrations(); } await Promise.all([ - this.databaseRepository.prewarm(VectorIndex.CLIP), - this.databaseRepository.prewarm(VectorIndex.FACE), + this.databaseRepository.prewarm(VectorIndex.Clip), + this.databaseRepository.prewarm(VectorIndex.Face), ]); }); } diff --git a/server/src/services/download.service.ts b/server/src/services/download.service.ts index 02711b9bfd..a5f734e59c 100644 --- a/server/src/services/download.service.ts +++ b/server/src/services/download.service.ts @@ -17,15 +17,15 @@ export class DownloadService extends BaseService { if (dto.assetIds) { const assetIds = dto.assetIds; - await this.requireAccess({ auth, permission: Permission.ASSET_DOWNLOAD, ids: assetIds }); + await this.requireAccess({ auth, permission: Permission.AssetDownload, ids: assetIds }); assets = this.downloadRepository.downloadAssetIds(assetIds); } else if (dto.albumId) { const albumId = dto.albumId; - await this.requireAccess({ auth, permission: Permission.ALBUM_DOWNLOAD, ids: [albumId] }); + await this.requireAccess({ auth, permission: Permission.AlbumDownload, ids: [albumId] }); assets = this.downloadRepository.downloadAlbumId(albumId); } else if (dto.userId) { const userId = dto.userId; - await this.requireAccess({ auth, permission: Permission.TIMELINE_DOWNLOAD, ids: [userId] }); + await this.requireAccess({ auth, permission: Permission.TimelineDownload, ids: [userId] }); assets = this.downloadRepository.downloadUserId(userId); } else { throw new BadRequestException('assetIds, albumId, or userId is required'); @@ -81,7 +81,7 @@ export class DownloadService extends BaseService { } async downloadArchive(auth: AuthDto, dto: AssetIdsDto): Promise { - await this.requireAccess({ auth, permission: Permission.ASSET_DOWNLOAD, ids: dto.assetIds }); + await this.requireAccess({ auth, permission: Permission.AssetDownload, ids: dto.assetIds }); const zip = this.storageRepository.createZipStream(); const assets = await this.assetRepository.getByIds(dto.assetIds); diff --git a/server/src/services/duplicate.service.spec.ts b/server/src/services/duplicate.service.spec.ts index b7d6d5fc96..0a2243e322 100644 --- a/server/src/services/duplicate.service.spec.ts +++ b/server/src/services/duplicate.service.spec.ts @@ -12,10 +12,10 @@ const hasEmbedding = { id: 'asset-1', ownerId: 'user-id', stackId: null, - type: AssetType.IMAGE, + type: AssetType.Image, duplicateId: null, embedding: '[1, 2, 3, 4]', - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }; const hasDupe = { @@ -78,7 +78,7 @@ describe(SearchService.name, () => { }, }); - await expect(sut.handleQueueSearchDuplicates({})).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleQueueSearchDuplicates({})).resolves.toBe(JobStatus.Skipped); expect(mocks.job.queue).not.toHaveBeenCalled(); expect(mocks.job.queueAll).not.toHaveBeenCalled(); expect(mocks.systemMetadata.get).toHaveBeenCalled(); @@ -94,7 +94,7 @@ describe(SearchService.name, () => { }, }); - await expect(sut.handleQueueSearchDuplicates({})).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleQueueSearchDuplicates({})).resolves.toBe(JobStatus.Skipped); expect(mocks.job.queue).not.toHaveBeenCalled(); expect(mocks.job.queueAll).not.toHaveBeenCalled(); expect(mocks.systemMetadata.get).toHaveBeenCalled(); @@ -108,7 +108,7 @@ describe(SearchService.name, () => { expect(mocks.assetJob.streamForSearchDuplicates).toHaveBeenCalledWith(undefined); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.DUPLICATE_DETECTION, + name: JobName.DuplicateDetection, data: { id: assetStub.image.id }, }, ]); @@ -122,7 +122,7 @@ describe(SearchService.name, () => { expect(mocks.assetJob.streamForSearchDuplicates).toHaveBeenCalledWith(true); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.DUPLICATE_DETECTION, + name: JobName.DuplicateDetection, data: { id: assetStub.image.id }, }, ]); @@ -154,7 +154,7 @@ describe(SearchService.name, () => { const result = await sut.handleSearchDuplicates({ id }); - expect(result).toBe(JobStatus.SKIPPED); + expect(result).toBe(JobStatus.Skipped); expect(mocks.assetJob.getForSearchDuplicatesJob).not.toHaveBeenCalled(); }); @@ -171,7 +171,7 @@ describe(SearchService.name, () => { const result = await sut.handleSearchDuplicates({ id }); - expect(result).toBe(JobStatus.SKIPPED); + expect(result).toBe(JobStatus.Skipped); expect(mocks.assetJob.getForSearchDuplicatesJob).not.toHaveBeenCalled(); }); @@ -180,7 +180,7 @@ describe(SearchService.name, () => { const result = await sut.handleSearchDuplicates({ id: assetStub.image.id }); - expect(result).toBe(JobStatus.FAILED); + expect(result).toBe(JobStatus.Failed); expect(mocks.logger.error).toHaveBeenCalledWith(`Asset ${assetStub.image.id} not found`); }); @@ -190,7 +190,7 @@ describe(SearchService.name, () => { const result = await sut.handleSearchDuplicates({ id }); - expect(result).toBe(JobStatus.SKIPPED); + expect(result).toBe(JobStatus.Skipped); expect(mocks.logger.debug).toHaveBeenCalledWith(`Asset ${id} is part of a stack, skipping`); }); @@ -198,12 +198,12 @@ describe(SearchService.name, () => { const id = assetStub.livePhotoMotionAsset.id; mocks.assetJob.getForSearchDuplicatesJob.mockResolvedValue({ ...hasEmbedding, - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, }); const result = await sut.handleSearchDuplicates({ id }); - expect(result).toBe(JobStatus.SKIPPED); + expect(result).toBe(JobStatus.Skipped); expect(mocks.logger.debug).toHaveBeenCalledWith(`Asset ${id} is not visible, skipping`); }); @@ -212,7 +212,7 @@ describe(SearchService.name, () => { const result = await sut.handleSearchDuplicates({ id: assetStub.image.id }); - expect(result).toBe(JobStatus.FAILED); + expect(result).toBe(JobStatus.Failed); expect(mocks.logger.debug).toHaveBeenCalledWith(`Asset ${assetStub.image.id} is missing embedding`); }); @@ -226,7 +226,7 @@ describe(SearchService.name, () => { const result = await sut.handleSearchDuplicates({ id: hasEmbedding.id }); - expect(result).toBe(JobStatus.SUCCESS); + expect(result).toBe(JobStatus.Success); expect(mocks.duplicateRepository.search).toHaveBeenCalledWith({ assetId: hasEmbedding.id, embedding: hasEmbedding.embedding, @@ -253,7 +253,7 @@ describe(SearchService.name, () => { const result = await sut.handleSearchDuplicates({ id: hasEmbedding.id }); - expect(result).toBe(JobStatus.SUCCESS); + expect(result).toBe(JobStatus.Success); expect(mocks.duplicateRepository.search).toHaveBeenCalledWith({ assetId: hasEmbedding.id, embedding: hasEmbedding.embedding, @@ -277,7 +277,7 @@ describe(SearchService.name, () => { const result = await sut.handleSearchDuplicates({ id: hasDupe.id }); - expect(result).toBe(JobStatus.SUCCESS); + expect(result).toBe(JobStatus.Success); expect(mocks.asset.update).toHaveBeenCalledWith({ id: hasDupe.id, duplicateId: null }); expect(mocks.asset.upsertJobStatus).toHaveBeenCalledWith({ assetId: hasDupe.id, diff --git a/server/src/services/duplicate.service.ts b/server/src/services/duplicate.service.ts index 99674d4c36..1699cf73f6 100644 --- a/server/src/services/duplicate.service.ts +++ b/server/src/services/duplicate.service.ts @@ -29,11 +29,11 @@ export class DuplicateService extends BaseService { await this.duplicateRepository.deleteAll(auth.user.id, dto.ids); } - @OnJob({ name: JobName.QUEUE_DUPLICATE_DETECTION, queue: QueueName.DUPLICATE_DETECTION }) - async handleQueueSearchDuplicates({ force }: JobOf): Promise { + @OnJob({ name: JobName.QueueDuplicateDetection, queue: QueueName.DuplicateDetection }) + async handleQueueSearchDuplicates({ force }: JobOf): Promise { const { machineLearning } = await this.getConfig({ withCache: false }); if (!isDuplicateDetectionEnabled(machineLearning)) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } let jobs: JobItem[] = []; @@ -44,7 +44,7 @@ export class DuplicateService extends BaseService { const assets = this.assetJobRepository.streamForSearchDuplicates(force); for await (const asset of assets) { - jobs.push({ name: JobName.DUPLICATE_DETECTION, data: { id: asset.id } }); + jobs.push({ name: JobName.DuplicateDetection, data: { id: asset.id } }); if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) { await queueAll(); } @@ -52,40 +52,40 @@ export class DuplicateService extends BaseService { await queueAll(); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.DUPLICATE_DETECTION, queue: QueueName.DUPLICATE_DETECTION }) - async handleSearchDuplicates({ id }: JobOf): Promise { + @OnJob({ name: JobName.DuplicateDetection, queue: QueueName.DuplicateDetection }) + async handleSearchDuplicates({ id }: JobOf): Promise { const { machineLearning } = await this.getConfig({ withCache: true }); if (!isDuplicateDetectionEnabled(machineLearning)) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const asset = await this.assetJobRepository.getForSearchDuplicatesJob(id); if (!asset) { this.logger.error(`Asset ${id} not found`); - return JobStatus.FAILED; + return JobStatus.Failed; } if (asset.stackId) { this.logger.debug(`Asset ${id} is part of a stack, skipping`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } - if (asset.visibility === AssetVisibility.HIDDEN) { + if (asset.visibility === AssetVisibility.Hidden) { this.logger.debug(`Asset ${id} is not visible, skipping`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } - if (asset.visibility === AssetVisibility.LOCKED) { + if (asset.visibility === AssetVisibility.Locked) { this.logger.debug(`Asset ${id} is locked, skipping`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } if (!asset.embedding) { this.logger.debug(`Asset ${id} is missing embedding`); - return JobStatus.FAILED; + return JobStatus.Failed; } const duplicateAssets = await this.duplicateRepository.search({ @@ -110,7 +110,7 @@ export class DuplicateService extends BaseService { const duplicatesDetectedAt = new Date(); await this.assetRepository.upsertJobStatus(...assetIds.map((assetId) => ({ assetId, duplicatesDetectedAt }))); - return JobStatus.SUCCESS; + return JobStatus.Success; } private async updateDuplicates( diff --git a/server/src/services/job.service.spec.ts b/server/src/services/job.service.spec.ts index a18eccdd8b..497d81b4ca 100644 --- a/server/src/services/job.service.spec.ts +++ b/server/src/services/job.service.spec.ts @@ -13,7 +13,7 @@ describe(JobService.name, () => { beforeEach(() => { ({ sut, mocks } = newTestService(JobService, {})); - mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES); + mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices); }); it('should work', () => { @@ -25,10 +25,10 @@ describe(JobService.name, () => { sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig }); expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(15); - expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1); - expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1); - expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5); - expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(9, QueueName.STORAGE_TEMPLATE_MIGRATION, 1); + expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FacialRecognition, 1); + expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DuplicateDetection, 1); + expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BackgroundTask, 5); + expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(9, QueueName.StorageTemplateMigration, 1); }); }); @@ -37,16 +37,16 @@ describe(JobService.name, () => { await sut.handleNightlyJobs(); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.ASSET_DELETION_CHECK }, - { name: JobName.USER_DELETE_CHECK }, - { name: JobName.PERSON_CLEANUP }, - { name: JobName.MEMORIES_CLEANUP }, - { name: JobName.CLEAN_OLD_SESSION_TOKENS }, - { name: JobName.CLEAN_OLD_AUDIT_LOGS }, - { name: JobName.MEMORIES_CREATE }, - { name: JobName.USER_SYNC_USAGE }, - { name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } }, - { name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false, nightly: true } }, + { name: JobName.AssetDeletionCheck }, + { name: JobName.UserDeleteCheck }, + { name: JobName.PersonCleanup }, + { name: JobName.MemoriesCleanup }, + { name: JobName.CleanOldSessionTokens }, + { name: JobName.CleanOldAuditLogs }, + { name: JobName.MemoriesCreate }, + { name: JobName.userSyncUsage }, + { name: JobName.QueueGenerateThumbnails, data: { force: false } }, + { name: JobName.QueueFacialRecognition, data: { force: false, nightly: true } }, ]); }); }); @@ -82,49 +82,49 @@ describe(JobService.name, () => { }; await expect(sut.getAllJobsStatus()).resolves.toEqual({ - [QueueName.BACKGROUND_TASK]: expectedJobStatus, - [QueueName.DUPLICATE_DETECTION]: expectedJobStatus, - [QueueName.SMART_SEARCH]: expectedJobStatus, - [QueueName.METADATA_EXTRACTION]: expectedJobStatus, - [QueueName.SEARCH]: expectedJobStatus, - [QueueName.STORAGE_TEMPLATE_MIGRATION]: expectedJobStatus, - [QueueName.MIGRATION]: expectedJobStatus, - [QueueName.THUMBNAIL_GENERATION]: expectedJobStatus, - [QueueName.VIDEO_CONVERSION]: expectedJobStatus, - [QueueName.FACE_DETECTION]: expectedJobStatus, - [QueueName.FACIAL_RECOGNITION]: expectedJobStatus, - [QueueName.SIDECAR]: expectedJobStatus, - [QueueName.LIBRARY]: expectedJobStatus, - [QueueName.NOTIFICATION]: expectedJobStatus, - [QueueName.BACKUP_DATABASE]: expectedJobStatus, + [QueueName.BackgroundTask]: expectedJobStatus, + [QueueName.DuplicateDetection]: expectedJobStatus, + [QueueName.SmartSearch]: expectedJobStatus, + [QueueName.MetadataExtraction]: expectedJobStatus, + [QueueName.Search]: expectedJobStatus, + [QueueName.StorageTemplateMigration]: expectedJobStatus, + [QueueName.Migration]: expectedJobStatus, + [QueueName.ThumbnailGeneration]: expectedJobStatus, + [QueueName.VideoConversion]: expectedJobStatus, + [QueueName.FaceDetection]: expectedJobStatus, + [QueueName.FacialRecognition]: expectedJobStatus, + [QueueName.Sidecar]: expectedJobStatus, + [QueueName.Library]: expectedJobStatus, + [QueueName.Notification]: expectedJobStatus, + [QueueName.BackupDatabase]: expectedJobStatus, }); }); }); describe('handleCommand', () => { it('should handle a pause command', async () => { - await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.PAUSE, force: false }); + await sut.handleCommand(QueueName.MetadataExtraction, { command: JobCommand.Pause, force: false }); - expect(mocks.job.pause).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION); + expect(mocks.job.pause).toHaveBeenCalledWith(QueueName.MetadataExtraction); }); it('should handle a resume command', async () => { - await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.RESUME, force: false }); + await sut.handleCommand(QueueName.MetadataExtraction, { command: JobCommand.Resume, force: false }); - expect(mocks.job.resume).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION); + expect(mocks.job.resume).toHaveBeenCalledWith(QueueName.MetadataExtraction); }); it('should handle an empty command', async () => { - await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.EMPTY, force: false }); + await sut.handleCommand(QueueName.MetadataExtraction, { command: JobCommand.Empty, force: false }); - expect(mocks.job.empty).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION); + expect(mocks.job.empty).toHaveBeenCalledWith(QueueName.MetadataExtraction); }); it('should not start a job that is already running', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: true, isPaused: false }); await expect( - sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false }), + sut.handleCommand(QueueName.VideoConversion, { command: JobCommand.Start, force: false }), ).rejects.toBeInstanceOf(BadRequestException); expect(mocks.job.queue).not.toHaveBeenCalled(); @@ -134,80 +134,80 @@ describe(JobService.name, () => { it('should handle a start video conversion command', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false }); - await sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false }); + await sut.handleCommand(QueueName.VideoConversion, { command: JobCommand.Start, force: false }); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_VIDEO_CONVERSION, data: { force: false } }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueVideoConversion, data: { force: false } }); }); it('should handle a start storage template migration command', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false }); - await sut.handleCommand(QueueName.STORAGE_TEMPLATE_MIGRATION, { command: JobCommand.START, force: false }); + await sut.handleCommand(QueueName.StorageTemplateMigration, { command: JobCommand.Start, force: false }); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.STORAGE_TEMPLATE_MIGRATION }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.StorageTemplateMigration }); }); it('should handle a start smart search command', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false }); - await sut.handleCommand(QueueName.SMART_SEARCH, { command: JobCommand.START, force: false }); + await sut.handleCommand(QueueName.SmartSearch, { command: JobCommand.Start, force: false }); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_SMART_SEARCH, data: { force: false } }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueSmartSearch, data: { force: false } }); }); it('should handle a start metadata extraction command', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false }); - await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.START, force: false }); + await sut.handleCommand(QueueName.MetadataExtraction, { command: JobCommand.Start, force: false }); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_METADATA_EXTRACTION, data: { force: false } }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueMetadataExtraction, data: { force: false } }); }); it('should handle a start sidecar command', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false }); - await sut.handleCommand(QueueName.SIDECAR, { command: JobCommand.START, force: false }); + await sut.handleCommand(QueueName.Sidecar, { command: JobCommand.Start, force: false }); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_SIDECAR, data: { force: false } }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueSidecar, data: { force: false } }); }); it('should handle a start thumbnail generation command', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false }); - await sut.handleCommand(QueueName.THUMBNAIL_GENERATION, { command: JobCommand.START, force: false }); + await sut.handleCommand(QueueName.ThumbnailGeneration, { command: JobCommand.Start, force: false }); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueGenerateThumbnails, data: { force: false } }); }); it('should handle a start face detection command', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false }); - await sut.handleCommand(QueueName.FACE_DETECTION, { command: JobCommand.START, force: false }); + await sut.handleCommand(QueueName.FaceDetection, { command: JobCommand.Start, force: false }); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_FACE_DETECTION, data: { force: false } }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueFaceDetection, data: { force: false } }); }); it('should handle a start facial recognition command', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false }); - await sut.handleCommand(QueueName.FACIAL_RECOGNITION, { command: JobCommand.START, force: false }); + await sut.handleCommand(QueueName.FacialRecognition, { command: JobCommand.Start, force: false }); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueFacialRecognition, data: { force: false } }); }); it('should handle a start backup database command', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false }); - await sut.handleCommand(QueueName.BACKUP_DATABASE, { command: JobCommand.START, force: false }); + await sut.handleCommand(QueueName.BackupDatabase, { command: JobCommand.Start, force: false }); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.BACKUP_DATABASE, data: { force: false } }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.BackupDatabase, data: { force: false } }); }); it('should throw a bad request when an invalid queue is used', async () => { mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false }); await expect( - sut.handleCommand(QueueName.BACKGROUND_TASK, { command: JobCommand.START, force: false }), + sut.handleCommand(QueueName.BackgroundTask, { command: JobCommand.Start, force: false }), ).rejects.toBeInstanceOf(BadRequestException); expect(mocks.job.queue).not.toHaveBeenCalled(); @@ -217,10 +217,10 @@ describe(JobService.name, () => { describe('onJobStart', () => { it('should process a successful job', async () => { - mocks.job.run.mockResolvedValue(JobStatus.SUCCESS); + mocks.job.run.mockResolvedValue(JobStatus.Success); - await sut.onJobStart(QueueName.BACKGROUND_TASK, { - name: JobName.DELETE_FILES, + await sut.onJobStart(QueueName.BackgroundTask, { + name: JobName.DeleteFiles, data: { files: ['path/to/file'] }, }); @@ -232,55 +232,55 @@ describe(JobService.name, () => { const tests: Array<{ item: JobItem; jobs: JobName[]; stub?: any }> = [ { - item: { name: JobName.SIDECAR_SYNC, data: { id: 'asset-1' } }, - jobs: [JobName.METADATA_EXTRACTION], + item: { name: JobName.SidecarSync, data: { id: 'asset-1' } }, + jobs: [JobName.MetadataExtraction], }, { - item: { name: JobName.SIDECAR_DISCOVERY, data: { id: 'asset-1' } }, - jobs: [JobName.METADATA_EXTRACTION], + item: { name: JobName.SidecarDiscovery, data: { id: 'asset-1' } }, + jobs: [JobName.MetadataExtraction], }, { - item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1', source: 'upload' } }, - jobs: [JobName.GENERATE_THUMBNAILS], + item: { name: JobName.StorageTemplateMigrationSingle, data: { id: 'asset-1', source: 'upload' } }, + jobs: [JobName.GenerateThumbnails], }, { - item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1' } }, + item: { name: JobName.StorageTemplateMigrationSingle, data: { id: 'asset-1' } }, jobs: [], }, { - item: { name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: 'asset-1' } }, + item: { name: JobName.GeneratePersonThumbnail, data: { id: 'asset-1' } }, jobs: [], }, { - item: { name: JobName.GENERATE_THUMBNAILS, data: { id: 'asset-1' } }, + item: { name: JobName.GenerateThumbnails, data: { id: 'asset-1' } }, jobs: [], stub: [assetStub.image], }, { - item: { name: JobName.GENERATE_THUMBNAILS, data: { id: 'asset-1' } }, + item: { name: JobName.GenerateThumbnails, data: { id: 'asset-1' } }, jobs: [], stub: [assetStub.video], }, { - item: { name: JobName.GENERATE_THUMBNAILS, data: { id: 'asset-1', source: 'upload' } }, - jobs: [JobName.SMART_SEARCH, JobName.FACE_DETECTION], + item: { name: JobName.GenerateThumbnails, data: { id: 'asset-1', source: 'upload' } }, + jobs: [JobName.SmartSearch, JobName.FaceDetection], stub: [assetStub.livePhotoStillAsset], }, { - item: { name: JobName.GENERATE_THUMBNAILS, data: { id: 'asset-1', source: 'upload' } }, - jobs: [JobName.SMART_SEARCH, JobName.FACE_DETECTION, JobName.VIDEO_CONVERSION], + item: { name: JobName.GenerateThumbnails, data: { id: 'asset-1', source: 'upload' } }, + jobs: [JobName.SmartSearch, JobName.FaceDetection, JobName.VideoConversation], stub: [assetStub.video], }, { - item: { name: JobName.SMART_SEARCH, data: { id: 'asset-1' } }, + item: { name: JobName.SmartSearch, data: { id: 'asset-1' } }, jobs: [], }, { - item: { name: JobName.FACE_DETECTION, data: { id: 'asset-1' } }, + item: { name: JobName.FaceDetection, data: { id: 'asset-1' } }, jobs: [], }, { - item: { name: JobName.FACIAL_RECOGNITION, data: { id: 'asset-1' } }, + item: { name: JobName.FacialRecognition, data: { id: 'asset-1' } }, jobs: [], }, ]; @@ -291,9 +291,9 @@ describe(JobService.name, () => { mocks.asset.getByIdsWithAllRelationsButStacks.mockResolvedValue(stub); } - mocks.job.run.mockResolvedValue(JobStatus.SUCCESS); + mocks.job.run.mockResolvedValue(JobStatus.Success); - await sut.onJobStart(QueueName.BACKGROUND_TASK, item); + await sut.onJobStart(QueueName.BackgroundTask, item); if (jobs.length > 1) { expect(mocks.job.queueAll).toHaveBeenCalledWith( @@ -308,9 +308,9 @@ describe(JobService.name, () => { }); it(`should not queue any jobs when ${item.name} fails`, async () => { - mocks.job.run.mockResolvedValue(JobStatus.FAILED); + mocks.job.run.mockResolvedValue(JobStatus.Failed); - await sut.onJobStart(QueueName.BACKGROUND_TASK, item); + await sut.onJobStart(QueueName.BackgroundTask, item); expect(mocks.job.queueAll).not.toHaveBeenCalled(); }); diff --git a/server/src/services/job.service.ts b/server/src/services/job.service.ts index f0bbefc8f0..73254b9f38 100644 --- a/server/src/services/job.service.ts +++ b/server/src/services/job.service.ts @@ -27,28 +27,28 @@ import { handlePromiseError } from 'src/utils/misc'; const asJobItem = (dto: JobCreateDto): JobItem => { switch (dto.name) { - case ManualJobName.TAG_CLEANUP: { - return { name: JobName.TAG_CLEANUP }; + case ManualJobName.TagCleanup: { + return { name: JobName.TagCleanup }; } - case ManualJobName.PERSON_CLEANUP: { - return { name: JobName.PERSON_CLEANUP }; + case ManualJobName.PersonCleanup: { + return { name: JobName.PersonCleanup }; } - case ManualJobName.USER_CLEANUP: { - return { name: JobName.USER_DELETE_CHECK }; + case ManualJobName.UserCleanup: { + return { name: JobName.UserDeleteCheck }; } - case ManualJobName.MEMORY_CLEANUP: { - return { name: JobName.MEMORIES_CLEANUP }; + case ManualJobName.MemoryCleanup: { + return { name: JobName.MemoriesCleanup }; } - case ManualJobName.MEMORY_CREATE: { - return { name: JobName.MEMORIES_CREATE }; + case ManualJobName.MemoryCreate: { + return { name: JobName.MemoriesCreate }; } - case ManualJobName.BACKUP_DATABASE: { - return { name: JobName.BACKUP_DATABASE }; + case ManualJobName.BackupDatabase: { + return { name: JobName.BackupDatabase }; } default: { @@ -69,7 +69,7 @@ export class JobService extends BaseService { @OnEvent({ name: 'ConfigInit' }) async onConfigInit({ newConfig: config }: ArgOf<'ConfigInit'>) { - if (this.worker === ImmichWorker.MICROSERVICES) { + if (this.worker === ImmichWorker.Microservices) { this.updateQueueConcurrency(config); return; } @@ -89,7 +89,7 @@ export class JobService extends BaseService { @OnEvent({ name: 'ConfigUpdate', server: true }) onConfigUpdate({ newConfig: config }: ArgOf<'ConfigUpdate'>) { - if (this.worker === ImmichWorker.MICROSERVICES) { + if (this.worker === ImmichWorker.Microservices) { this.updateQueueConcurrency(config); return; } @@ -104,7 +104,7 @@ export class JobService extends BaseService { @OnEvent({ name: 'AppBootstrap', priority: BootstrapEventPriority.JobService }) onBootstrap() { this.jobRepository.setup(this.services); - if (this.worker === ImmichWorker.MICROSERVICES) { + if (this.worker === ImmichWorker.Microservices) { this.jobRepository.startWorkers(); } } @@ -133,28 +133,28 @@ export class JobService extends BaseService { this.logger.debug(`Handling command: queue=${queueName},command=${dto.command},force=${dto.force}`); switch (dto.command) { - case JobCommand.START: { + case JobCommand.Start: { await this.start(queueName, dto); break; } - case JobCommand.PAUSE: { + case JobCommand.Pause: { await this.jobRepository.pause(queueName); break; } - case JobCommand.RESUME: { + case JobCommand.Resume: { await this.jobRepository.resume(queueName); break; } - case JobCommand.EMPTY: { + case JobCommand.Empty: { await this.jobRepository.empty(queueName); break; } - case JobCommand.CLEAR_FAILED: { - const failedJobs = await this.jobRepository.clear(queueName, QueueCleanType.FAILED); + case JobCommand.ClearFailed: { + const failedJobs = await this.jobRepository.clear(queueName, QueueCleanType.Failed); this.logger.debug(`Cleared failed jobs: ${failedJobs}`); break; } @@ -189,52 +189,52 @@ export class JobService extends BaseService { this.telemetryRepository.jobs.addToCounter(`immich.queues.${snakeCase(name)}.started`, 1); switch (name) { - case QueueName.VIDEO_CONVERSION: { - return this.jobRepository.queue({ name: JobName.QUEUE_VIDEO_CONVERSION, data: { force } }); + case QueueName.VideoConversion: { + return this.jobRepository.queue({ name: JobName.QueueVideoConversion, data: { force } }); } - case QueueName.STORAGE_TEMPLATE_MIGRATION: { - return this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION }); + case QueueName.StorageTemplateMigration: { + return this.jobRepository.queue({ name: JobName.StorageTemplateMigration }); } - case QueueName.MIGRATION: { - return this.jobRepository.queue({ name: JobName.QUEUE_MIGRATION }); + case QueueName.Migration: { + return this.jobRepository.queue({ name: JobName.QueueMigration }); } - case QueueName.SMART_SEARCH: { - return this.jobRepository.queue({ name: JobName.QUEUE_SMART_SEARCH, data: { force } }); + case QueueName.SmartSearch: { + return this.jobRepository.queue({ name: JobName.QueueSmartSearch, data: { force } }); } - case QueueName.DUPLICATE_DETECTION: { - return this.jobRepository.queue({ name: JobName.QUEUE_DUPLICATE_DETECTION, data: { force } }); + case QueueName.DuplicateDetection: { + return this.jobRepository.queue({ name: JobName.QueueDuplicateDetection, data: { force } }); } - case QueueName.METADATA_EXTRACTION: { - return this.jobRepository.queue({ name: JobName.QUEUE_METADATA_EXTRACTION, data: { force } }); + case QueueName.MetadataExtraction: { + return this.jobRepository.queue({ name: JobName.QueueMetadataExtraction, data: { force } }); } - case QueueName.SIDECAR: { - return this.jobRepository.queue({ name: JobName.QUEUE_SIDECAR, data: { force } }); + case QueueName.Sidecar: { + return this.jobRepository.queue({ name: JobName.QueueSidecar, data: { force } }); } - case QueueName.THUMBNAIL_GENERATION: { - return this.jobRepository.queue({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force } }); + case QueueName.ThumbnailGeneration: { + return this.jobRepository.queue({ name: JobName.QueueGenerateThumbnails, data: { force } }); } - case QueueName.FACE_DETECTION: { - return this.jobRepository.queue({ name: JobName.QUEUE_FACE_DETECTION, data: { force } }); + case QueueName.FaceDetection: { + return this.jobRepository.queue({ name: JobName.QueueFaceDetection, data: { force } }); } - case QueueName.FACIAL_RECOGNITION: { - return this.jobRepository.queue({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force } }); + case QueueName.FacialRecognition: { + return this.jobRepository.queue({ name: JobName.QueueFacialRecognition, data: { force } }); } - case QueueName.LIBRARY: { - return this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, data: { force } }); + case QueueName.Library: { + return this.jobRepository.queue({ name: JobName.LibraryQueueScanAll, data: { force } }); } - case QueueName.BACKUP_DATABASE: { - return this.jobRepository.queue({ name: JobName.BACKUP_DATABASE, data: { force } }); + case QueueName.BackupDatabase: { + return this.jobRepository.queue({ name: JobName.BackupDatabase, data: { force } }); } default: { @@ -251,7 +251,7 @@ export class JobService extends BaseService { const status = await this.jobRepository.run(job); const jobMetric = `immich.jobs.${job.name.replaceAll('-', '_')}.${status}`; this.telemetryRepository.jobs.addToCounter(jobMetric, 1); - if (status === JobStatus.SUCCESS || status == JobStatus.SKIPPED) { + if (status === JobStatus.Success || status == JobStatus.Skipped) { await this.onDone(job); } } catch (error: Error | any) { @@ -263,10 +263,10 @@ export class JobService extends BaseService { private isConcurrentQueue(name: QueueName): name is ConcurrentQueueName { return ![ - QueueName.FACIAL_RECOGNITION, - QueueName.STORAGE_TEMPLATE_MIGRATION, - QueueName.DUPLICATE_DETECTION, - QueueName.BACKUP_DATABASE, + QueueName.FacialRecognition, + QueueName.StorageTemplateMigration, + QueueName.DuplicateDetection, + QueueName.BackupDatabase, ].includes(name); } @@ -276,29 +276,29 @@ export class JobService extends BaseService { if (config.nightlyTasks.databaseCleanup) { jobs.push( - { name: JobName.ASSET_DELETION_CHECK }, - { name: JobName.USER_DELETE_CHECK }, - { name: JobName.PERSON_CLEANUP }, - { name: JobName.MEMORIES_CLEANUP }, - { name: JobName.CLEAN_OLD_SESSION_TOKENS }, - { name: JobName.CLEAN_OLD_AUDIT_LOGS }, + { name: JobName.AssetDeletionCheck }, + { name: JobName.UserDeleteCheck }, + { name: JobName.PersonCleanup }, + { name: JobName.MemoriesCleanup }, + { name: JobName.CleanOldSessionTokens }, + { name: JobName.CleanOldAuditLogs }, ); } if (config.nightlyTasks.generateMemories) { - jobs.push({ name: JobName.MEMORIES_CREATE }); + jobs.push({ name: JobName.MemoriesCreate }); } if (config.nightlyTasks.syncQuotaUsage) { - jobs.push({ name: JobName.USER_SYNC_USAGE }); + jobs.push({ name: JobName.userSyncUsage }); } if (config.nightlyTasks.missingThumbnails) { - jobs.push({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } }); + jobs.push({ name: JobName.QueueGenerateThumbnails, data: { force: false } }); } if (config.nightlyTasks.clusterNewFaces) { - jobs.push({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false, nightly: true } }); + jobs.push({ name: JobName.QueueFacialRecognition, data: { force: false, nightly: true } }); } await this.jobRepository.queueAll(jobs); @@ -309,28 +309,28 @@ export class JobService extends BaseService { */ private async onDone(item: JobItem) { switch (item.name) { - case JobName.SIDECAR_SYNC: - case JobName.SIDECAR_DISCOVERY: { - await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: item.data }); + case JobName.SidecarSync: + case JobName.SidecarDiscovery: { + await this.jobRepository.queue({ name: JobName.MetadataExtraction, data: item.data }); break; } - case JobName.SIDECAR_WRITE: { + case JobName.SidecarWrite: { await this.jobRepository.queue({ - name: JobName.METADATA_EXTRACTION, + name: JobName.MetadataExtraction, data: { id: item.data.id, source: 'sidecar-write' }, }); break; } - case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: { + case JobName.StorageTemplateMigrationSingle: { if (item.data.source === 'upload' || item.data.source === 'copy') { - await this.jobRepository.queue({ name: JobName.GENERATE_THUMBNAILS, data: item.data }); + await this.jobRepository.queue({ name: JobName.GenerateThumbnails, data: item.data }); } break; } - case JobName.GENERATE_PERSON_THUMBNAIL: { + case JobName.GeneratePersonThumbnail: { const { id } = item.data; const person = await this.personRepository.getById(id); if (person) { @@ -339,7 +339,7 @@ export class JobService extends BaseService { break; } - case JobName.GENERATE_THUMBNAILS: { + case JobName.GenerateThumbnails: { if (!item.data.notify && item.data.source !== 'upload') { break; } @@ -351,16 +351,16 @@ export class JobService extends BaseService { } const jobs: JobItem[] = [ - { name: JobName.SMART_SEARCH, data: item.data }, - { name: JobName.FACE_DETECTION, data: item.data }, + { name: JobName.SmartSearch, data: item.data }, + { name: JobName.FaceDetection, data: item.data }, ]; - if (asset.type === AssetType.VIDEO) { - jobs.push({ name: JobName.VIDEO_CONVERSION, data: item.data }); + if (asset.type === AssetType.Video) { + jobs.push({ name: JobName.VideoConversation, data: item.data }); } await this.jobRepository.queueAll(jobs); - if (asset.visibility === AssetVisibility.TIMELINE || asset.visibility === AssetVisibility.ARCHIVE) { + if (asset.visibility === AssetVisibility.Timeline || asset.visibility === AssetVisibility.Archive) { this.eventRepository.clientSend('on_upload_success', asset.ownerId, mapAsset(asset)); if (asset.exifInfo) { const exif = asset.exifInfo; @@ -417,14 +417,14 @@ export class JobService extends BaseService { break; } - case JobName.SMART_SEARCH: { + case JobName.SmartSearch: { if (item.data.source === 'upload') { - await this.jobRepository.queue({ name: JobName.DUPLICATE_DETECTION, data: item.data }); + await this.jobRepository.queue({ name: JobName.DuplicateDetection, data: item.data }); } break; } - case JobName.USER_DELETION: { + case JobName.UserDeletion: { this.eventRepository.clientBroadcast('on_user_delete', item.data.id); break; } diff --git a/server/src/services/library.service.spec.ts b/server/src/services/library.service.spec.ts index ab69e22b99..aa2b80d928 100644 --- a/server/src/services/library.service.spec.ts +++ b/server/src/services/library.service.spec.ts @@ -27,7 +27,7 @@ describe(LibraryService.name, () => { ({ sut, mocks } = newTestService(LibraryService, {})); mocks.database.tryLock.mockResolvedValue(true); - mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES); + mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices); }); it('should work', () => { @@ -173,7 +173,7 @@ describe(LibraryService.name, () => { await sut.handleQueueSyncFiles({ id: library.id }); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.LIBRARY_SYNC_FILES, + name: JobName.LibrarySyncFiles, data: { libraryId: library.id, paths: ['/data/user1/photo.jpg'], @@ -185,7 +185,7 @@ describe(LibraryService.name, () => { it('should fail when library is not found', async () => { const library = factory.library({ importPaths: ['/foo', '/bar'] }); - await expect(sut.handleQueueSyncFiles({ id: library.id })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleQueueSyncFiles({ id: library.id })).resolves.toBe(JobStatus.Skipped); }); it('should ignore import paths that do not exist', async () => { @@ -228,7 +228,7 @@ describe(LibraryService.name, () => { await sut.handleQueueSyncFiles({ id: library.id }); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.LIBRARY_SYNC_FILES, + name: JobName.LibrarySyncFiles, data: { libraryId: library.id, paths: ['/data/user1/photo.jpg'], @@ -240,7 +240,7 @@ describe(LibraryService.name, () => { it("should fail when library can't be found", async () => { const library = factory.library({ importPaths: ['/foo', '/bar'] }); - await expect(sut.handleQueueSyncFiles({ id: library.id })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleQueueSyncFiles({ id: library.id })).resolves.toBe(JobStatus.Skipped); }); it('should ignore import paths that do not exist', async () => { @@ -282,7 +282,7 @@ describe(LibraryService.name, () => { const response = await sut.handleQueueSyncAssets({ id: library.id }); - expect(response).toBe(JobStatus.SUCCESS); + expect(response).toBe(JobStatus.Success); expect(mocks.asset.detectOfflineExternalAssets).toHaveBeenCalledWith( library.id, library.importPaths, @@ -300,7 +300,7 @@ describe(LibraryService.name, () => { const response = await sut.handleQueueSyncAssets({ id: library.id }); - expect(response).toBe(JobStatus.SUCCESS); + expect(response).toBe(JobStatus.Success); expect(mocks.asset.detectOfflineExternalAssets).not.toHaveBeenCalled(); }); @@ -317,7 +317,7 @@ describe(LibraryService.name, () => { const response = await sut.handleQueueSyncAssets({ id: library.id }); expect(mocks.job.queue).toBeCalledWith({ - name: JobName.LIBRARY_SYNC_ASSETS, + name: JobName.LibrarySyncAssets, data: { libraryId: library.id, importPaths: library.importPaths, @@ -328,7 +328,7 @@ describe(LibraryService.name, () => { }, }); - expect(response).toBe(JobStatus.SUCCESS); + expect(response).toBe(JobStatus.Success); expect(mocks.asset.detectOfflineExternalAssets).toHaveBeenCalledWith( library.id, library.importPaths, @@ -337,7 +337,7 @@ describe(LibraryService.name, () => { }); it("should fail if library can't be found", async () => { - await expect(sut.handleQueueSyncAssets({ id: newUuid() })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleQueueSyncAssets({ id: newUuid() })).resolves.toBe(JobStatus.Skipped); }); }); @@ -355,7 +355,7 @@ describe(LibraryService.name, () => { mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]); mocks.storage.stat.mockRejectedValue(new Error('ENOENT, no such file or directory')); - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success); expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: true, @@ -376,7 +376,7 @@ describe(LibraryService.name, () => { mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]); mocks.storage.stat.mockRejectedValue(new Error('Could not read file')); - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success); expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: true, @@ -397,7 +397,7 @@ describe(LibraryService.name, () => { mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]); mocks.storage.stat.mockRejectedValue(new Error('Could not read file')); - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success); expect(mocks.asset.updateAll).not.toHaveBeenCalled(); }); @@ -415,7 +415,7 @@ describe(LibraryService.name, () => { mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]); mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success); expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: false, @@ -436,7 +436,7 @@ describe(LibraryService.name, () => { mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]); mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success); expect(mocks.asset.updateAll).not.toHaveBeenCalled(); @@ -456,7 +456,7 @@ describe(LibraryService.name, () => { mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]); mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success); expect(mocks.asset.updateAll).not.toHaveBeenCalled(); @@ -476,7 +476,7 @@ describe(LibraryService.name, () => { mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]); mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success); expect(mocks.asset.updateAll).not.toHaveBeenCalled(); }); @@ -494,7 +494,7 @@ describe(LibraryService.name, () => { mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]); mocks.storage.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats); - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success); expect(mocks.asset.updateAll).toHaveBeenCalledWith( [assetStub.trashedOffline.id], @@ -523,11 +523,11 @@ describe(LibraryService.name, () => { mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]); mocks.storage.stat.mockResolvedValue({ mtime } as Stats); - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.SIDECAR_DISCOVERY, + name: JobName.SidecarDiscovery, data: { id: assetStub.external.id, source: 'upload', @@ -557,7 +557,7 @@ describe(LibraryService.name, () => { mocks.asset.createAll.mockResolvedValue([assetStub.image]); mocks.library.get.mockResolvedValue(library); - await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.Success); expect(mocks.asset.createAll).toHaveBeenCalledWith([ expect.objectContaining({ @@ -565,7 +565,7 @@ describe(LibraryService.name, () => { libraryId: library.id, originalPath: '/data/user1/photo.jpg', deviceId: 'Library Import', - type: AssetType.IMAGE, + type: AssetType.Image, originalFileName: 'photo.jpg', isExternal: true, }), @@ -573,7 +573,7 @@ describe(LibraryService.name, () => { expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.SIDECAR_DISCOVERY, + name: JobName.SidecarDiscovery, data: { id: assetStub.image.id, source: 'upload', @@ -592,7 +592,7 @@ describe(LibraryService.name, () => { mocks.library.get.mockResolvedValue(library); - await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.FAILED); + await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.Failed); expect(mocks.asset.createAll.mock.calls).toEqual([]); }); @@ -607,7 +607,7 @@ describe(LibraryService.name, () => { await sut.delete(library.id); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.LIBRARY_DELETE, data: { id: library.id } }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.LibraryDelete, data: { id: library.id } }); expect(mocks.library.softDelete).toHaveBeenCalledWith(library.id); }); @@ -620,7 +620,7 @@ describe(LibraryService.name, () => { await sut.delete(library.id); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.LIBRARY_DELETE, + name: JobName.LibraryDelete, data: { id: library.id }, }); @@ -838,11 +838,11 @@ describe(LibraryService.name, () => { const library2 = factory.library({ deletedAt: new Date() }); mocks.library.getAllDeleted.mockResolvedValue([library1, library2]); - await expect(sut.handleQueueCleanup()).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleQueueCleanup()).resolves.toBe(JobStatus.Success); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_DELETE, data: { id: library1.id } }, - { name: JobName.LIBRARY_DELETE, data: { id: library2.id } }, + { name: JobName.LibraryDelete, data: { id: library1.id } }, + { name: JobName.LibraryDelete, data: { id: library2.id } }, ]); }); }); @@ -968,7 +968,7 @@ describe(LibraryService.name, () => { await sut.watchAll(); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.LIBRARY_SYNC_FILES, + name: JobName.LibrarySyncFiles, data: { libraryId: library.id, paths: ['/foo/photo.jpg'], @@ -989,7 +989,7 @@ describe(LibraryService.name, () => { await sut.watchAll(); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.LIBRARY_SYNC_FILES, + name: JobName.LibrarySyncFiles, data: { libraryId: library.id, paths: ['/foo/photo.jpg'], @@ -1010,7 +1010,7 @@ describe(LibraryService.name, () => { await sut.watchAll(); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.LIBRARY_ASSET_REMOVAL, + name: JobName.LibraryAssetRemoval, data: { libraryId: library.id, paths: [assetStub.image.originalPath], @@ -1106,7 +1106,7 @@ describe(LibraryService.name, () => { mocks.library.get.mockResolvedValue(library); mocks.library.streamAssetIds.mockReturnValue(makeStream([])); - await expect(sut.handleDeleteLibrary({ id: library.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleDeleteLibrary({ id: library.id })).resolves.toBe(JobStatus.Success); expect(mocks.library.delete).toHaveBeenCalled(); }); @@ -1117,7 +1117,7 @@ describe(LibraryService.name, () => { mocks.library.get.mockResolvedValue(library); mocks.library.streamAssetIds.mockReturnValue(makeStream([assetStub.image1])); - await expect(sut.handleDeleteLibrary({ id: library.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleDeleteLibrary({ id: library.id })).resolves.toBe(JobStatus.Success); }); }); @@ -1131,11 +1131,11 @@ describe(LibraryService.name, () => { expect(mocks.job.queue).toHaveBeenCalledTimes(2); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.LIBRARY_QUEUE_SYNC_FILES, + name: JobName.LibraryQueueSyncFiles, data: { id: library.id }, }); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, + name: JobName.LibraryQueueSyncAssets, data: { id: library.id }, }); }); @@ -1147,14 +1147,14 @@ describe(LibraryService.name, () => { mocks.library.getAll.mockResolvedValue([library]); - await expect(sut.handleQueueScanAll()).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleQueueScanAll()).resolves.toBe(JobStatus.Success); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.LIBRARY_QUEUE_CLEANUP, + name: JobName.LibraryQueueCleanup, data: {}, }); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_QUEUE_SYNC_FILES, data: { id: library.id } }, + { name: JobName.LibraryQueueSyncFiles, data: { id: library.id } }, ]); }); }); diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index 911ea3b702..f8377081fa 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -32,7 +32,7 @@ export class LibraryService extends BaseService { private lock = false; private watchers: Record Promise> = {}; - @OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.MICROSERVICES] }) + @OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] }) async onConfigInit({ newConfig: { library: { watch, scan }, @@ -47,8 +47,7 @@ export class LibraryService extends BaseService { this.cronRepository.create({ name: CronJob.LibraryScan, expression: scan.cronExpression, - onTick: () => - handlePromiseError(this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL }), this.logger), + onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.LibraryQueueScanAll }), this.logger), start: scan.enabled, }); } @@ -103,7 +102,7 @@ export class LibraryService extends BaseService { if (matcher(path)) { this.logger.debug(`File ${event} event received for ${path} in library ${library.id}}`); await this.jobRepository.queue({ - name: JobName.LIBRARY_SYNC_FILES, + name: JobName.LibrarySyncFiles, data: { libraryId: library.id, paths: [path] }, }); } else { @@ -114,7 +113,7 @@ export class LibraryService extends BaseService { const deletionHandler = async (path: string) => { this.logger.debug(`File unlink event received for ${path} in library ${library.id}}`); await this.jobRepository.queue({ - name: JobName.LIBRARY_ASSET_REMOVAL, + name: JobName.LibraryAssetRemoval, data: { libraryId: library.id, paths: [path] }, }); }; @@ -199,7 +198,7 @@ export class LibraryService extends BaseService { return libraries.map((library) => mapLibrary(library)); } - @OnJob({ name: JobName.LIBRARY_QUEUE_CLEANUP, queue: QueueName.LIBRARY }) + @OnJob({ name: JobName.LibraryQueueCleanup, queue: QueueName.Library }) async handleQueueCleanup(): Promise { this.logger.log('Checking for any libraries pending deletion...'); const pendingDeletions = await this.libraryRepository.getAllDeleted(); @@ -208,11 +207,11 @@ export class LibraryService extends BaseService { this.logger.log(`Found ${pendingDeletions.length} ${libraryString} pending deletion, cleaning up...`); await this.jobRepository.queueAll( - pendingDeletions.map((libraryToDelete) => ({ name: JobName.LIBRARY_DELETE, data: { id: libraryToDelete.id } })), + pendingDeletions.map((libraryToDelete) => ({ name: JobName.LibraryDelete, data: { id: libraryToDelete.id } })), ); } - return JobStatus.SUCCESS; + return JobStatus.Success; } async create(dto: CreateLibraryDto): Promise { @@ -225,16 +224,16 @@ export class LibraryService extends BaseService { return mapLibrary(library); } - @OnJob({ name: JobName.LIBRARY_SYNC_FILES, queue: QueueName.LIBRARY }) - async handleSyncFiles(job: JobOf): Promise { + @OnJob({ name: JobName.LibrarySyncFiles, queue: QueueName.Library }) + async handleSyncFiles(job: JobOf): Promise { const library = await this.libraryRepository.get(job.libraryId); // We need to check if the library still exists as it could have been deleted after the scan was queued if (!library) { this.logger.debug(`Library ${job.libraryId} not found, skipping file import`); - return JobStatus.FAILED; + return JobStatus.Failed; } else if (library.deletedAt) { this.logger.debug(`Library ${job.libraryId} is deleted, won't import assets into it`); - return JobStatus.FAILED; + return JobStatus.Failed; } const assetImports: Insertable[] = []; @@ -263,7 +262,7 @@ export class LibraryService extends BaseService { await this.queuePostSyncJobs(assetIds); - return JobStatus.SUCCESS; + return JobStatus.Success; } private async validateImportPath(importPath: string): Promise { @@ -339,11 +338,11 @@ export class LibraryService extends BaseService { } await this.libraryRepository.softDelete(id); - await this.jobRepository.queue({ name: JobName.LIBRARY_DELETE, data: { id } }); + await this.jobRepository.queue({ name: JobName.LibraryDelete, data: { id } }); } - @OnJob({ name: JobName.LIBRARY_DELETE, queue: QueueName.LIBRARY }) - async handleDeleteLibrary(job: JobOf): Promise { + @OnJob({ name: JobName.LibraryDelete, queue: QueueName.Library }) + async handleDeleteLibrary(job: JobOf): Promise { const libraryId = job.id; await this.assetRepository.updateByLibraryId(libraryId, { deletedAt: new Date() }); @@ -356,7 +355,7 @@ export class LibraryService extends BaseService { assetsFound = true; this.logger.debug(`Queueing deletion of ${chunk.length} asset(s) in library ${libraryId}`); await this.jobRepository.queueAll( - chunk.map((id) => ({ name: JobName.ASSET_DELETION, data: { id, deleteOnDisk: false } })), + chunk.map((id) => ({ name: JobName.AssetDeletion, data: { id, deleteOnDisk: false } })), ); chunk = []; } @@ -379,7 +378,7 @@ export class LibraryService extends BaseService { await this.libraryRepository.delete(libraryId); } - return JobStatus.SUCCESS; + return JobStatus.Success; } private async processEntity(filePath: string, ownerId: string, libraryId: string) { @@ -398,7 +397,7 @@ export class LibraryService extends BaseService { // TODO: device asset id is deprecated, remove it deviceAssetId: `${basename(assetPath)}`.replaceAll(/\s+/g, ''), deviceId: 'Library Import', - type: mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE, + type: mimeTypes.isVideo(assetPath) ? AssetType.Video : AssetType.Image, originalFileName: parse(assetPath).base, isExternal: true, livePhotoVideoId: null, @@ -411,7 +410,7 @@ export class LibraryService extends BaseService { // We queue a sidecar discovery which, in turn, queues metadata extraction await this.jobRepository.queueAll( assetIds.map((assetId) => ({ - name: JobName.SIDECAR_DISCOVERY, + name: JobName.SidecarDiscovery, data: { id: assetId, source: 'upload' }, })), ); @@ -423,30 +422,30 @@ export class LibraryService extends BaseService { this.logger.log(`Starting to scan library ${id}`); await this.jobRepository.queue({ - name: JobName.LIBRARY_QUEUE_SYNC_FILES, + name: JobName.LibraryQueueSyncFiles, data: { id, }, }); - await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, data: { id } }); + await this.jobRepository.queue({ name: JobName.LibraryQueueSyncAssets, data: { id } }); } async queueScanAll() { - await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, data: {} }); + await this.jobRepository.queue({ name: JobName.LibraryQueueScanAll, data: {} }); } - @OnJob({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, queue: QueueName.LIBRARY }) + @OnJob({ name: JobName.LibraryQueueScanAll, queue: QueueName.Library }) async handleQueueScanAll(): Promise { this.logger.log(`Initiating scan of all external libraries...`); - await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_CLEANUP, data: {} }); + await this.jobRepository.queue({ name: JobName.LibraryQueueCleanup, data: {} }); const libraries = await this.libraryRepository.getAll(true); await this.jobRepository.queueAll( libraries.map((library) => ({ - name: JobName.LIBRARY_QUEUE_SYNC_FILES, + name: JobName.LibraryQueueSyncFiles, data: { id: library.id, }, @@ -454,18 +453,18 @@ export class LibraryService extends BaseService { ); await this.jobRepository.queueAll( libraries.map((library) => ({ - name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, + name: JobName.LibraryQueueSyncAssets, data: { id: library.id, }, })), ); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.LIBRARY_SYNC_ASSETS, queue: QueueName.LIBRARY }) - async handleSyncAssets(job: JobOf): Promise { + @OnJob({ name: JobName.LibrarySyncAssets, queue: QueueName.Library }) + async handleSyncAssets(job: JobOf): Promise { const assets = await this.assetJobRepository.getForSyncAssets(job.assetIds); const assetIdsToOffline: string[] = []; @@ -486,7 +485,7 @@ export class LibraryService extends BaseService { const action = this.checkExistingAsset(asset, stat); switch (action) { case AssetSyncResult.OFFLINE: { - if (asset.status === AssetStatus.TRASHED) { + if (asset.status === AssetStatus.Trashed) { trashedAssetIdsToOffline.push(asset.id); } else { assetIdsToOffline.push(asset.id); @@ -511,7 +510,7 @@ export class LibraryService extends BaseService { if (!isExcluded) { this.logger.debug(`Offline asset ${asset.originalPath} is now online in library ${job.libraryId}`); - if (asset.status === AssetStatus.TRASHED) { + if (asset.status === AssetStatus.Trashed) { trashedAssetIdsToOnline.push(asset.id); } else { assetIdsToOnline.push(asset.id); @@ -557,7 +556,7 @@ export class LibraryService extends BaseService { `Checked existing asset(s): ${assetIdsToOffline.length + trashedAssetIdsToOffline.length} offlined, ${assetIdsToOnline.length + trashedAssetIdsToOnline.length} onlined, ${assetIdsToUpdate.length} updated, ${remainingCount} unchanged of current batch of ${assets.length} (Total progress: ${job.progressCounter} of ${job.totalAssets}, ${cumulativePercentage} %) in library ${job.libraryId}.`, ); - return JobStatus.SUCCESS; + return JobStatus.Success; } private checkExistingAsset( @@ -585,7 +584,7 @@ export class LibraryService extends BaseService { return AssetSyncResult.OFFLINE; } - if (asset.isOffline && asset.status !== AssetStatus.DELETED) { + if (asset.isOffline && asset.status !== AssetStatus.Deleted) { // Only perform the expensive check if the asset is offline return AssetSyncResult.CHECK_OFFLINE; } @@ -599,12 +598,12 @@ export class LibraryService extends BaseService { return AssetSyncResult.DO_NOTHING; } - @OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_FILES, queue: QueueName.LIBRARY }) - async handleQueueSyncFiles(job: JobOf): Promise { + @OnJob({ name: JobName.LibraryQueueSyncFiles, queue: QueueName.Library }) + async handleQueueSyncFiles(job: JobOf): Promise { const library = await this.libraryRepository.get(job.id); if (!library) { this.logger.debug(`Library ${job.id} not found, skipping refresh`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } this.logger.debug(`Validating import paths for library ${library.id}...`); @@ -623,7 +622,7 @@ export class LibraryService extends BaseService { if (validImportPaths.length === 0) { this.logger.warn(`No valid import paths found for library ${library.id}`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const pathsOnDisk = this.storageRepository.walk({ @@ -646,7 +645,7 @@ export class LibraryService extends BaseService { importCount += paths.length; await this.jobRepository.queue({ - name: JobName.LIBRARY_SYNC_FILES, + name: JobName.LibrarySyncFiles, data: { libraryId: library.id, paths, @@ -666,11 +665,11 @@ export class LibraryService extends BaseService { await this.libraryRepository.update(job.id, { refreshedAt: new Date() }); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.LIBRARY_ASSET_REMOVAL, queue: QueueName.LIBRARY }) - async handleAssetRemoval(job: JobOf): Promise { + @OnJob({ name: JobName.LibraryAssetRemoval, queue: QueueName.Library }) + async handleAssetRemoval(job: JobOf): Promise { // This is only for handling file unlink events via the file watcher this.logger.verbose(`Deleting asset(s) ${job.paths} from library ${job.libraryId}`); for (const assetPath of job.paths) { @@ -680,20 +679,20 @@ export class LibraryService extends BaseService { } } - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, queue: QueueName.LIBRARY }) - async handleQueueSyncAssets(job: JobOf): Promise { + @OnJob({ name: JobName.LibraryQueueSyncAssets, queue: QueueName.Library }) + async handleQueueSyncAssets(job: JobOf): Promise { const library = await this.libraryRepository.get(job.id); if (!library) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const assetCount = await this.assetRepository.getLibraryAssetCount(job.id); if (!assetCount) { this.logger.log(`Library ${library.id} is empty, no need to check assets`); - return JobStatus.SUCCESS; + return JobStatus.Success; } this.logger.log( @@ -713,7 +712,7 @@ export class LibraryService extends BaseService { ); if (affectedAssetCount === assetCount) { - return JobStatus.SUCCESS; + return JobStatus.Success; } let chunk: string[] = []; @@ -724,7 +723,7 @@ export class LibraryService extends BaseService { count += chunk.length; await this.jobRepository.queue({ - name: JobName.LIBRARY_SYNC_ASSETS, + name: JobName.LibrarySyncAssets, data: { libraryId: library.id, importPaths: library.importPaths, @@ -758,7 +757,7 @@ export class LibraryService extends BaseService { this.logger.log(`Finished queuing ${count} asset check(s) for library ${library.id}`); - return JobStatus.SUCCESS; + return JobStatus.Success; } private async findOrFail(id: string) { diff --git a/server/src/services/media.service.spec.ts b/server/src/services/media.service.spec.ts index 3b9eafde8f..f535bb65ff 100644 --- a/server/src/services/media.service.spec.ts +++ b/server/src/services/media.service.spec.ts @@ -12,7 +12,7 @@ import { JobName, JobStatus, RawExtractedFormat, - TranscodeHWAccel, + TranscodeHardwareAcceleration, TranscodePolicy, VideoCodec, } from 'src/enum'; @@ -49,7 +49,7 @@ describe(MediaService.name, () => { expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(true); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_THUMBNAILS, + name: JobName.GenerateThumbnails, data: { id: assetStub.image.id }, }, ]); @@ -57,7 +57,7 @@ describe(MediaService.name, () => { expect(mocks.person.getAll).toHaveBeenCalledWith(undefined); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_PERSON_THUMBNAIL, + name: JobName.GeneratePersonThumbnail, data: { id: personStub.newThumbnail.id }, }, ]); @@ -72,7 +72,7 @@ describe(MediaService.name, () => { expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(true); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_THUMBNAILS, + name: JobName.GenerateThumbnails, data: { id: assetStub.trashed.id }, }, ]); @@ -87,7 +87,7 @@ describe(MediaService.name, () => { expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(true); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_THUMBNAILS, + name: JobName.GenerateThumbnails, data: { id: assetStub.archived.id }, }, ]); @@ -106,7 +106,7 @@ describe(MediaService.name, () => { expect(mocks.person.update).toHaveBeenCalledTimes(1); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_PERSON_THUMBNAIL, + name: JobName.GeneratePersonThumbnail, data: { id: personStub.newThumbnail.id, }, @@ -122,7 +122,7 @@ describe(MediaService.name, () => { expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(false); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_THUMBNAILS, + name: JobName.GenerateThumbnails, data: { id: assetStub.image.id }, }, ]); @@ -138,7 +138,7 @@ describe(MediaService.name, () => { expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(false); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_THUMBNAILS, + name: JobName.GenerateThumbnails, data: { id: assetStub.image.id }, }, ]); @@ -154,7 +154,7 @@ describe(MediaService.name, () => { expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(false); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_THUMBNAILS, + name: JobName.GenerateThumbnails, data: { id: assetStub.image.id }, }, ]); @@ -169,14 +169,14 @@ describe(MediaService.name, () => { mocks.job.getJobCounts.mockResolvedValue({ active: 1, waiting: 0 } as JobCounts); mocks.person.getAll.mockReturnValue(makeStream([personStub.withName])); - await expect(sut.handleQueueMigration()).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleQueueMigration()).resolves.toBe(JobStatus.Success); expect(mocks.storage.removeEmptyDirs).toHaveBeenCalledTimes(2); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.MIGRATE_ASSET, data: { id: assetStub.image.id } }, + { name: JobName.MigrateAsset, data: { id: assetStub.image.id } }, ]); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.MIGRATE_PERSON, data: { id: personStub.withName.id } }, + { name: JobName.MigratePerson, data: { id: personStub.withName.id } }, ]); }); }); @@ -184,7 +184,7 @@ describe(MediaService.name, () => { describe('handleAssetMigration', () => { it('should fail if asset does not exist', async () => { mocks.assetJob.getForMigrationJob.mockResolvedValue(void 0); - await expect(sut.handleAssetMigration({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED); + await expect(sut.handleAssetMigration({ id: assetStub.image.id })).resolves.toBe(JobStatus.Failed); expect(mocks.move.getByEntity).not.toHaveBeenCalled(); }); @@ -196,25 +196,25 @@ describe(MediaService.name, () => { id: 'move-id', newPath: '/new/path', oldPath: '/old/path', - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, }); - await expect(sut.handleAssetMigration({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleAssetMigration({ id: assetStub.image.id })).resolves.toBe(JobStatus.Success); expect(mocks.move.create).toHaveBeenCalledWith({ entityId: assetStub.image.id, - pathType: AssetPathType.FULLSIZE, + pathType: AssetPathType.FullSize, oldPath: '/uploads/user-id/fullsize/path.webp', newPath: 'upload/thumbs/user-id/as/se/asset-id-fullsize.jpeg', }); expect(mocks.move.create).toHaveBeenCalledWith({ entityId: assetStub.image.id, - pathType: AssetPathType.PREVIEW, + pathType: AssetPathType.Preview, oldPath: '/uploads/user-id/thumbs/path.jpg', newPath: 'upload/thumbs/user-id/as/se/asset-id-preview.jpeg', }); expect(mocks.move.create).toHaveBeenCalledWith({ entityId: assetStub.image.id, - pathType: AssetPathType.THUMBNAIL, + pathType: AssetPathType.Thumbnail, oldPath: '/uploads/user-id/webp/path.ext', newPath: 'upload/thumbs/user-id/as/se/asset-id-thumbnail.webp', }); @@ -253,7 +253,7 @@ describe(MediaService.name, () => { it('should skip thumbnail generation if asset type is unknown', async () => { mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({ ...assetStub.image, type: 'foo' as AssetType }); - await expect(sut.handleGenerateThumbnails({ id: assetStub.image.id })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleGenerateThumbnails({ id: assetStub.image.id })).resolves.toBe(JobStatus.Skipped); expect(mocks.media.probe).not.toHaveBeenCalled(); expect(mocks.media.generateThumbnail).not.toHaveBeenCalled(); expect(mocks.asset.update).not.toHaveBeenCalledWith(); @@ -270,14 +270,14 @@ describe(MediaService.name, () => { it('should skip invisible assets', async () => { mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.livePhotoMotionAsset); - expect(await sut.handleGenerateThumbnails({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.SKIPPED); + expect(await sut.handleGenerateThumbnails({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.Skipped); expect(mocks.media.generateThumbnail).not.toHaveBeenCalled(); expect(mocks.asset.update).not.toHaveBeenCalledWith(); }); it('should delete previous preview if different path', async () => { - mocks.systemMetadata.get.mockResolvedValue({ image: { thumbnail: { format: ImageFormat.WEBP } } }); + mocks.systemMetadata.get.mockResolvedValue({ image: { thumbnail: { format: ImageFormat.Webp } } }); mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -309,7 +309,7 @@ describe(MediaService.name, () => { rawBuffer, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, size: 1440, quality: 80, processInvalidImages: false, @@ -321,7 +321,7 @@ describe(MediaService.name, () => { rawBuffer, { colorspace: Colorspace.P3, - format: ImageFormat.WEBP, + format: ImageFormat.Webp, size: 250, quality: 80, processInvalidImages: false, @@ -340,12 +340,12 @@ describe(MediaService.name, () => { expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([ { assetId: 'asset-id', - type: AssetFileType.PREVIEW, + type: AssetFileType.Preview, path: 'upload/thumbs/user-id/as/se/asset-id-preview.jpeg', }, { assetId: 'asset-id', - type: AssetFileType.THUMBNAIL, + type: AssetFileType.Thumbnail, path: 'upload/thumbs/user-id/as/se/asset-id-thumbnail.webp', }, ]); @@ -376,12 +376,12 @@ describe(MediaService.name, () => { expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([ { assetId: 'asset-id', - type: AssetFileType.PREVIEW, + type: AssetFileType.Preview, path: 'upload/thumbs/user-id/as/se/asset-id-preview.jpeg', }, { assetId: 'asset-id', - type: AssetFileType.THUMBNAIL, + type: AssetFileType.Thumbnail, path: 'upload/thumbs/user-id/as/se/asset-id-thumbnail.webp', }, ]); @@ -411,12 +411,12 @@ describe(MediaService.name, () => { expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([ { assetId: 'asset-id', - type: AssetFileType.PREVIEW, + type: AssetFileType.Preview, path: 'upload/thumbs/user-id/as/se/asset-id-preview.jpeg', }, { assetId: 'asset-id', - type: AssetFileType.THUMBNAIL, + type: AssetFileType.Thumbnail, path: 'upload/thumbs/user-id/as/se/asset-id-thumbnail.webp', }, ]); @@ -465,7 +465,7 @@ describe(MediaService.name, () => { it('should use scaling divisible by 2 even when using quick sync', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv } }); mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video); await sut.handleGenerateThumbnails({ id: assetStub.video.id }); @@ -493,7 +493,7 @@ describe(MediaService.name, () => { expect(mocks.storage.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/as/se'); expect(mocks.media.decodeImage).toHaveBeenCalledOnce(); expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.image.originalPath, { - colorspace: Colorspace.SRGB, + colorspace: Colorspace.Srgb, processInvalidImages: false, size: 1440, }); @@ -502,7 +502,7 @@ describe(MediaService.name, () => { expect(mocks.media.generateThumbnail).toHaveBeenCalledWith( rawBuffer, { - colorspace: Colorspace.SRGB, + colorspace: Colorspace.Srgb, format, size: 1440, quality: 80, @@ -514,8 +514,8 @@ describe(MediaService.name, () => { expect(mocks.media.generateThumbnail).toHaveBeenCalledWith( rawBuffer, { - colorspace: Colorspace.SRGB, - format: ImageFormat.WEBP, + colorspace: Colorspace.Srgb, + format: ImageFormat.Webp, size: 250, quality: 80, processInvalidImages: false, @@ -538,7 +538,7 @@ describe(MediaService.name, () => { expect(mocks.storage.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/as/se'); expect(mocks.media.decodeImage).toHaveBeenCalledOnce(); expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.image.originalPath, { - colorspace: Colorspace.SRGB, + colorspace: Colorspace.Srgb, processInvalidImages: false, size: 1440, }); @@ -547,8 +547,8 @@ describe(MediaService.name, () => { expect(mocks.media.generateThumbnail).toHaveBeenCalledWith( rawBuffer, { - colorspace: Colorspace.SRGB, - format: ImageFormat.JPEG, + colorspace: Colorspace.Srgb, + format: ImageFormat.Jpeg, size: 1440, quality: 80, processInvalidImages: false, @@ -559,7 +559,7 @@ describe(MediaService.name, () => { expect(mocks.media.generateThumbnail).toHaveBeenCalledWith( rawBuffer, { - colorspace: Colorspace.SRGB, + colorspace: Colorspace.Srgb, format, size: 250, quality: 80, @@ -571,7 +571,7 @@ describe(MediaService.name, () => { }); it('should delete previous thumbnail if different path', async () => { - mocks.systemMetadata.get.mockResolvedValue({ image: { thumbnail: { format: ImageFormat.WEBP } } }); + mocks.systemMetadata.get.mockResolvedValue({ image: { thumbnail: { format: ImageFormat.Webp } } }); mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image); await sut.handleGenerateThumbnails({ id: assetStub.image.id }); @@ -580,7 +580,7 @@ describe(MediaService.name, () => { }); it('should extract embedded image if enabled and available', async () => { - mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG }); + mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg }); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } }); mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); @@ -596,7 +596,7 @@ describe(MediaService.name, () => { }); it('should resize original image if embedded image is too small', async () => { - mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG }); + mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg }); mocks.media.getImageDimensions.mockResolvedValue({ width: 1000, height: 1000 }); mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } }); mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); @@ -678,9 +678,9 @@ describe(MediaService.name, () => { it('should extract full-size JPEG preview from RAW', async () => { mocks.systemMetadata.get.mockResolvedValue({ - image: { fullsize: { enabled: true, format: ImageFormat.WEBP }, extractEmbedded: true }, + image: { fullsize: { enabled: true, format: ImageFormat.Webp }, extractEmbedded: true }, }); - mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG }); + mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg }); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); @@ -698,7 +698,7 @@ describe(MediaService.name, () => { fullsizeBuffer, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, size: 1440, quality: 80, processInvalidImages: false, @@ -710,9 +710,9 @@ describe(MediaService.name, () => { it('should convert full-size WEBP preview from JXL preview of RAW', async () => { mocks.systemMetadata.get.mockResolvedValue({ - image: { fullsize: { enabled: true, format: ImageFormat.WEBP }, extractEmbedded: true }, + image: { fullsize: { enabled: true, format: ImageFormat.Webp }, extractEmbedded: true }, }); - mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JXL }); + mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jxl }); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); @@ -729,7 +729,7 @@ describe(MediaService.name, () => { fullsizeBuffer, { colorspace: Colorspace.P3, - format: ImageFormat.WEBP, + format: ImageFormat.Webp, quality: 80, processInvalidImages: false, raw: rawInfo, @@ -740,7 +740,7 @@ describe(MediaService.name, () => { fullsizeBuffer, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, size: 1440, quality: 80, processInvalidImages: false, @@ -752,7 +752,7 @@ describe(MediaService.name, () => { it('should generate full-size preview directly from RAW images when extractEmbedded is false', async () => { mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true }, extractEmbedded: false } }); - mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG }); + mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg }); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng); @@ -769,7 +769,7 @@ describe(MediaService.name, () => { rawBuffer, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, processInvalidImages: false, raw: rawInfo, @@ -780,7 +780,7 @@ describe(MediaService.name, () => { rawBuffer, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, size: 1440, processInvalidImages: false, @@ -792,7 +792,7 @@ describe(MediaService.name, () => { it('should generate full-size preview from non-web-friendly images', async () => { mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } }); - mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG }); + mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg }); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); // HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari. mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif); @@ -810,7 +810,7 @@ describe(MediaService.name, () => { rawBuffer, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, processInvalidImages: false, raw: rawInfo, @@ -821,7 +821,7 @@ describe(MediaService.name, () => { it('should skip generating full-size preview for web-friendly images', async () => { mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } }); - mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG }); + mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg }); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image); @@ -829,7 +829,7 @@ describe(MediaService.name, () => { expect(mocks.media.decodeImage).toHaveBeenCalledOnce(); expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.image.originalPath, { - colorspace: Colorspace.SRGB, + colorspace: Colorspace.Srgb, processInvalidImages: false, size: 1440, }); @@ -844,9 +844,9 @@ describe(MediaService.name, () => { it('should respect encoding options when generating full-size preview', async () => { mocks.systemMetadata.get.mockResolvedValue({ - image: { fullsize: { enabled: true, format: ImageFormat.WEBP, quality: 90 } }, + image: { fullsize: { enabled: true, format: ImageFormat.Webp, quality: 90 } }, }); - mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG }); + mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg }); mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 }); // HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari. mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif); @@ -864,7 +864,7 @@ describe(MediaService.name, () => { rawBuffer, { colorspace: Colorspace.P3, - format: ImageFormat.WEBP, + format: ImageFormat.Webp, quality: 90, processInvalidImages: false, raw: rawInfo, @@ -878,7 +878,7 @@ describe(MediaService.name, () => { it('should skip if machine learning is disabled', async () => { mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled); - await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(JobStatus.Skipped); expect(mocks.asset.getByIds).not.toHaveBeenCalled(); expect(mocks.systemMetadata.get).toHaveBeenCalled(); }); @@ -907,7 +907,7 @@ describe(MediaService.name, () => { mocks.media.decodeImage.mockResolvedValue({ data, info }); await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( - JobStatus.SUCCESS, + JobStatus.Success, ); expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(personStub.primaryPerson.id); @@ -921,7 +921,7 @@ describe(MediaService.name, () => { data, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, crop: { left: 238, @@ -949,7 +949,7 @@ describe(MediaService.name, () => { mocks.media.decodeImage.mockResolvedValue({ data, info }); await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( - JobStatus.SUCCESS, + JobStatus.Success, ); expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(personStub.primaryPerson.id); @@ -963,7 +963,7 @@ describe(MediaService.name, () => { data, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, crop: { left: 238, @@ -991,7 +991,7 @@ describe(MediaService.name, () => { mocks.media.decodeImage.mockResolvedValue({ data, info }); await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( - JobStatus.SUCCESS, + JobStatus.Success, ); expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailStart.originalPath, { @@ -1003,7 +1003,7 @@ describe(MediaService.name, () => { data, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, crop: { left: 0, @@ -1028,7 +1028,7 @@ describe(MediaService.name, () => { mocks.media.decodeImage.mockResolvedValue({ data, info }); await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( - JobStatus.SUCCESS, + JobStatus.Success, ); expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailEnd.originalPath, { @@ -1040,7 +1040,7 @@ describe(MediaService.name, () => { data, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, crop: { left: 591, @@ -1065,7 +1065,7 @@ describe(MediaService.name, () => { mocks.media.decodeImage.mockResolvedValue({ data, info }); await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( - JobStatus.SUCCESS, + JobStatus.Success, ); expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.negativeCoordinate.originalPath, { @@ -1077,7 +1077,7 @@ describe(MediaService.name, () => { data, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, crop: { left: 0, @@ -1102,7 +1102,7 @@ describe(MediaService.name, () => { mocks.media.decodeImage.mockResolvedValue({ data, info }); await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( - JobStatus.SUCCESS, + JobStatus.Success, ); expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.overflowingCoordinate.originalPath, { @@ -1114,7 +1114,7 @@ describe(MediaService.name, () => { data, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, crop: { left: 4485, @@ -1138,12 +1138,12 @@ describe(MediaService.name, () => { const extracted = Buffer.from(''); const data = Buffer.from(''); const info = { width: 2160, height: 3840 } as OutputInfo; - mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.JPEG }); + mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.Jpeg }); mocks.media.decodeImage.mockResolvedValue({ data, info }); mocks.media.getImageDimensions.mockResolvedValue(info); await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( - JobStatus.SUCCESS, + JobStatus.Success, ); expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath); @@ -1156,7 +1156,7 @@ describe(MediaService.name, () => { data, { colorspace: Colorspace.P3, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, crop: { height: 844, @@ -1180,7 +1180,7 @@ describe(MediaService.name, () => { mocks.media.decodeImage.mockResolvedValue({ data, info }); await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( - JobStatus.SUCCESS, + JobStatus.Success, ); expect(mocks.media.extract).not.toHaveBeenCalled(); @@ -1196,7 +1196,7 @@ describe(MediaService.name, () => { mocks.media.decodeImage.mockResolvedValue({ data, info }); await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( - JobStatus.SUCCESS, + JobStatus.Success, ); expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath); @@ -1216,11 +1216,11 @@ describe(MediaService.name, () => { const data = Buffer.from(''); const info = { width: 1000, height: 1000 } as OutputInfo; mocks.media.decodeImage.mockResolvedValue({ data, info }); - mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.JPEG }); + mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.Jpeg }); mocks.media.getImageDimensions.mockResolvedValue(info); await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe( - JobStatus.SUCCESS, + JobStatus.Success, ); expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath); @@ -1243,7 +1243,7 @@ describe(MediaService.name, () => { expect(mocks.assetJob.streamForVideoConversion).toHaveBeenCalledWith(true); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.VIDEO_CONVERSION, + name: JobName.VideoConversation, data: { id: assetStub.video.id }, }, ]); @@ -1257,7 +1257,7 @@ describe(MediaService.name, () => { expect(mocks.assetJob.streamForVideoConversion).toHaveBeenCalledWith(void 0); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.VIDEO_CONVERSION, + name: JobName.VideoConversation, data: { id: assetStub.video.id }, }, ]); @@ -1340,17 +1340,17 @@ describe(MediaService.name, () => { it('should throw an error if transcoding fails and hw acceleration is disabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { transcode: TranscodePolicy.ALL, accel: TranscodeHWAccel.DISABLED }, + ffmpeg: { transcode: TranscodePolicy.All, accel: TranscodeHardwareAcceleration.Disabled }, }); mocks.media.transcode.mockRejectedValue(new Error('Error transcoding video')); - await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED); + await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.Failed); expect(mocks.media.transcode).toHaveBeenCalledTimes(1); }); it('should transcode when set to all', async () => { mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.ALL } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.All } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1365,7 +1365,7 @@ describe(MediaService.name, () => { it('should transcode when optimal and too big', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Optimal } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1380,7 +1380,7 @@ describe(MediaService.name, () => { it('should transcode when policy bitrate and bitrate higher than max bitrate', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream40Mbps); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.BITRATE, maxBitrate: '30M' } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Bitrate, maxBitrate: '30M' } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1395,7 +1395,7 @@ describe(MediaService.name, () => { it('should transcode when max bitrate is not a number', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream40Mbps); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.BITRATE, maxBitrate: 'foo' } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Bitrate, maxBitrate: 'foo' } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1411,7 +1411,7 @@ describe(MediaService.name, () => { it('should not scale resolution if no target resolution', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { transcode: TranscodePolicy.ALL, targetResolution: 'original' }, + ffmpeg: { transcode: TranscodePolicy.All, targetResolution: 'original' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -1427,7 +1427,7 @@ describe(MediaService.name, () => { it('should scale horizontally when video is horizontal', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Optimal } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1442,7 +1442,7 @@ describe(MediaService.name, () => { it('should scale vertically when video is vertical', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Optimal } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1458,7 +1458,7 @@ describe(MediaService.name, () => { it('should always scale video if height is uneven', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamOddHeight); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { transcode: TranscodePolicy.ALL, targetResolution: 'original' }, + ffmpeg: { transcode: TranscodePolicy.All, targetResolution: 'original' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -1475,7 +1475,7 @@ describe(MediaService.name, () => { it('should always scale video if width is uneven', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamOddWidth); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { transcode: TranscodePolicy.ALL, targetResolution: 'original' }, + ffmpeg: { transcode: TranscodePolicy.All, targetResolution: 'original' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -1492,7 +1492,7 @@ describe(MediaService.name, () => { it('should copy video stream when video matches target', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { targetVideoCodec: VideoCodec.HEVC, acceptedAudioCodecs: [AudioCodec.AAC] }, + ffmpeg: { targetVideoCodec: VideoCodec.Hevc, acceptedAudioCodecs: [AudioCodec.Aac] }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -1510,9 +1510,9 @@ describe(MediaService.name, () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamH264); mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { - targetVideoCodec: VideoCodec.HEVC, - acceptedVideoCodecs: [VideoCodec.H264, VideoCodec.HEVC], - acceptedAudioCodecs: [AudioCodec.AAC], + targetVideoCodec: VideoCodec.Hevc, + acceptedVideoCodecs: [VideoCodec.H264, VideoCodec.Hevc], + acceptedAudioCodecs: [AudioCodec.Aac], }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -1531,9 +1531,9 @@ describe(MediaService.name, () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { - targetVideoCodec: VideoCodec.HEVC, - acceptedVideoCodecs: [VideoCodec.H264, VideoCodec.HEVC], - acceptedAudioCodecs: [AudioCodec.AAC], + targetVideoCodec: VideoCodec.Hevc, + acceptedVideoCodecs: [VideoCodec.H264, VideoCodec.Hevc], + acceptedAudioCodecs: [AudioCodec.Aac], }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -1550,7 +1550,7 @@ describe(MediaService.name, () => { it('should copy audio stream when audio matches target', async () => { mocks.media.probe.mockResolvedValue(probeStub.audioStreamAac); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Optimal } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1587,14 +1587,14 @@ describe(MediaService.name, () => { it('should not transcode if transcoding is disabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Disabled } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).not.toHaveBeenCalled(); }); it('should not remux when input is not an accepted container and transcoding is disabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Disabled } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).not.toHaveBeenCalled(); }); @@ -1609,14 +1609,14 @@ describe(MediaService.name, () => { it('should delete existing transcode if current policy does not require transcoding', async () => { const asset = assetStub.hasEncodedVideo; mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Disabled } }); mocks.assetJob.getForVideoConversion.mockResolvedValue(asset); await sut.handleVideoConversion({ id: asset.id }); expect(mocks.media.transcode).not.toHaveBeenCalled(); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.DELETE_FILES, + name: JobName.DeleteFiles, data: { files: [asset.encodedVideoPath] }, }); }); @@ -1687,7 +1687,7 @@ describe(MediaService.name, () => { ffmpeg: { maxBitrate: '4500k', twoPass: true, - targetVideoCodec: VideoCodec.VP9, + targetVideoCodec: VideoCodec.Vp9, }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -1708,7 +1708,7 @@ describe(MediaService.name, () => { ffmpeg: { maxBitrate: '0', twoPass: true, - targetVideoCodec: VideoCodec.VP9, + targetVideoCodec: VideoCodec.Vp9, }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -1725,7 +1725,7 @@ describe(MediaService.name, () => { it('should configure preset for vp9', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.VP9, preset: 'slow' } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Vp9, preset: 'slow' } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1740,7 +1740,7 @@ describe(MediaService.name, () => { it('should not configure preset for vp9 if invalid', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { preset: 'invalid', targetVideoCodec: VideoCodec.VP9 } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { preset: 'invalid', targetVideoCodec: VideoCodec.Vp9 } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1755,7 +1755,7 @@ describe(MediaService.name, () => { it('should configure threads if above 0', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.VP9, threads: 2 } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Vp9, threads: 2 } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1800,7 +1800,7 @@ describe(MediaService.name, () => { it('should disable thread pooling for hevc if thread limit is 1', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 1, targetVideoCodec: VideoCodec.HEVC } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 1, targetVideoCodec: VideoCodec.Hevc } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1815,7 +1815,7 @@ describe(MediaService.name, () => { it('should omit thread flags for hevc if thread limit is at or below 0', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 0, targetVideoCodec: VideoCodec.HEVC } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 0, targetVideoCodec: VideoCodec.Hevc } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1830,7 +1830,7 @@ describe(MediaService.name, () => { it('should use av1 if specified', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1 } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Av1 } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1855,7 +1855,7 @@ describe(MediaService.name, () => { it('should map `veryslow` preset to 4 for av1', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, preset: 'veryslow' } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Av1, preset: 'veryslow' } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1870,7 +1870,7 @@ describe(MediaService.name, () => { it('should set max bitrate for av1 if specified', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, maxBitrate: '2M' } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Av1, maxBitrate: '2M' } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1885,7 +1885,7 @@ describe(MediaService.name, () => { it('should set threads for av1 if specified', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, threads: 4 } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Av1, threads: 4 } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1901,7 +1901,7 @@ describe(MediaService.name, () => { it('should set both bitrate and threads for av1 if specified', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { targetVideoCodec: VideoCodec.AV1, threads: 4, maxBitrate: '2M' }, + ffmpeg: { targetVideoCodec: VideoCodec.Av1, threads: 4, maxBitrate: '2M' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -1919,8 +1919,8 @@ describe(MediaService.name, () => { mocks.media.probe.mockResolvedValue(probeStub.noAudioStreams); mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { - targetVideoCodec: VideoCodec.HEVC, - transcode: TranscodePolicy.OPTIMAL, + targetVideoCodec: VideoCodec.Hevc, + transcode: TranscodePolicy.Optimal, targetResolution: '1080p', }, }); @@ -1931,7 +1931,7 @@ describe(MediaService.name, () => { it('should fail if hwaccel is enabled for an unsupported codec', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.NVENC, targetVideoCodec: VideoCodec.VP9 }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, targetVideoCodec: VideoCodec.Vp9 }, }); await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError(); expect(mocks.media.transcode).not.toHaveBeenCalled(); @@ -1946,7 +1946,7 @@ describe(MediaService.name, () => { it('should set options for nvenc', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -1979,7 +1979,7 @@ describe(MediaService.name, () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { - accel: TranscodeHWAccel.NVENC, + accel: TranscodeHardwareAcceleration.Nvenc, maxBitrate: '10000k', twoPass: true, }, @@ -1998,7 +1998,9 @@ describe(MediaService.name, () => { it('should set vbr options for nvenc when max bitrate is enabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, maxBitrate: '10000k' } }); + mocks.systemMetadata.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, maxBitrate: '10000k' }, + }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2013,7 +2015,9 @@ describe(MediaService.name, () => { it('should set cq options for nvenc when max bitrate is disabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, maxBitrate: '10000k' } }); + mocks.systemMetadata.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, maxBitrate: '10000k' }, + }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2028,7 +2032,9 @@ describe(MediaService.name, () => { it('should omit preset for nvenc if invalid', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, preset: 'invalid' } }); + mocks.systemMetadata.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, preset: 'invalid' }, + }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2043,7 +2049,7 @@ describe(MediaService.name, () => { it('should ignore two pass for nvenc if max bitrate is disabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2059,7 +2065,7 @@ describe(MediaService.name, () => { it('should use hardware decoding for nvenc if enabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, accelDecode: true }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -2081,7 +2087,7 @@ describe(MediaService.name, () => { it('should use hardware tone-mapping for nvenc if hardware decoding is enabled and should tone map', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, accelDecode: true }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -2102,7 +2108,7 @@ describe(MediaService.name, () => { it('should set format to nv12 for nvenc if input is not yuv420p', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, accelDecode: true }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -2118,7 +2124,9 @@ describe(MediaService.name, () => { it('should set options for qsv', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, maxBitrate: '10000k' } }); + mocks.systemMetadata.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, maxBitrate: '10000k' }, + }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2154,7 +2162,7 @@ describe(MediaService.name, () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { - accel: TranscodeHWAccel.QSV, + accel: TranscodeHardwareAcceleration.Qsv, maxBitrate: '10000k', preferredHwDevice: '/dev/dri/renderD128', }, @@ -2176,7 +2184,9 @@ describe(MediaService.name, () => { it('should omit preset for qsv if invalid', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, preset: 'invalid' } }); + mocks.systemMetadata.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, preset: 'invalid' }, + }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2195,7 +2205,7 @@ describe(MediaService.name, () => { it('should set low power mode for qsv if target video codec is vp9', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.QSV, targetVideoCodec: VideoCodec.VP9 }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, targetVideoCodec: VideoCodec.Vp9 }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -2215,7 +2225,7 @@ describe(MediaService.name, () => { it('should fail for qsv if no hw devices', async () => { sut.videoInterfaces = { dri: [], mali: false }; mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv } }); await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError(); @@ -2225,7 +2235,7 @@ describe(MediaService.name, () => { it('should prefer higher index renderD* device for qsv', async () => { sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false }; mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2244,7 +2254,7 @@ describe(MediaService.name, () => { it('should use hardware decoding for qsv if enabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, accelDecode: true }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -2270,7 +2280,7 @@ describe(MediaService.name, () => { it('should use hardware tone-mapping for qsv if hardware decoding is enabled and should tone map', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, accelDecode: true }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -2299,7 +2309,7 @@ describe(MediaService.name, () => { sut.videoInterfaces = { dri: ['renderD128', 'renderD129', 'renderD130'], mali: false }; mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true, preferredHwDevice: 'renderD129' }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, accelDecode: true, preferredHwDevice: 'renderD129' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -2317,7 +2327,7 @@ describe(MediaService.name, () => { it('should set format to nv12 for qsv if input is not yuv420p', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, accelDecode: true }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -2340,7 +2350,7 @@ describe(MediaService.name, () => { it('should set options for vaapi', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2370,7 +2380,9 @@ describe(MediaService.name, () => { it('should set vbr options for vaapi when max bitrate is enabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, maxBitrate: '10000k' } }); + mocks.systemMetadata.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, maxBitrate: '10000k' }, + }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2394,7 +2406,7 @@ describe(MediaService.name, () => { it('should set cq options for vaapi when max bitrate is disabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2418,7 +2430,9 @@ describe(MediaService.name, () => { it('should omit preset for vaapi if invalid', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, preset: 'invalid' } }); + mocks.systemMetadata.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, preset: 'invalid' }, + }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2437,7 +2451,7 @@ describe(MediaService.name, () => { it('should prefer higher index renderD* device for vaapi', async () => { sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false }; mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2457,7 +2471,7 @@ describe(MediaService.name, () => { sut.videoInterfaces = { dri: ['renderD129', 'card1', 'card0', 'renderD128'], mali: false }; mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.VAAPI, preferredHwDevice: '/dev/dri/renderD128' }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, preferredHwDevice: '/dev/dri/renderD128' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -2477,7 +2491,7 @@ describe(MediaService.name, () => { it('should use hardware decoding for vaapi if enabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -2502,7 +2516,7 @@ describe(MediaService.name, () => { it('should use hardware tone-mapping for vaapi if hardware decoding is enabled and should tone map', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -2525,7 +2539,7 @@ describe(MediaService.name, () => { it('should set format to nv12 for vaapi if input is not yuv420p', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -2545,7 +2559,7 @@ describe(MediaService.name, () => { sut.videoInterfaces = { dri: ['renderD128', 'renderD129', 'renderD130'], mali: false }; mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true, preferredHwDevice: 'renderD129' }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true, preferredHwDevice: 'renderD129' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -2562,7 +2576,9 @@ describe(MediaService.name, () => { it('should fallback to hw encoding and sw decoding if hw transcoding fails and hw decoding is enabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } }); + mocks.systemMetadata.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true }, + }); mocks.media.transcode.mockRejectedValueOnce(new Error('error')); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledTimes(2); @@ -2582,7 +2598,9 @@ describe(MediaService.name, () => { it('should fallback to sw decoding if fallback to sw decoding + hw encoding fails', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } }); + mocks.systemMetadata.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true }, + }); mocks.media.transcode.mockRejectedValueOnce(new Error('error')); mocks.media.transcode.mockRejectedValueOnce(new Error('error')); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -2600,7 +2618,7 @@ describe(MediaService.name, () => { it('should fallback to sw transcoding if hw transcoding fails and hw decoding is disabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } }); mocks.media.transcode.mockRejectedValueOnce(new Error('error')); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledTimes(2); @@ -2618,14 +2636,16 @@ describe(MediaService.name, () => { it('should fail for vaapi if no hw devices', async () => { sut.videoInterfaces = { dri: [], mali: true }; mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } }); await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError(); expect(mocks.media.transcode).not.toHaveBeenCalled(); }); it('should set options for rkmpp', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true } }); + mocks.systemMetadata.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true }, + }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2660,10 +2680,10 @@ describe(MediaService.name, () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9); mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { - accel: TranscodeHWAccel.RKMPP, + accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true, maxBitrate: '10000k', - targetVideoCodec: VideoCodec.HEVC, + targetVideoCodec: VideoCodec.Hevc, }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); @@ -2681,7 +2701,7 @@ describe(MediaService.name, () => { it('should set cqp options for rkmpp when max bitrate is disabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true, crf: 30, maxBitrate: '0' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -2698,7 +2718,7 @@ describe(MediaService.name, () => { it('should set OpenCL tonemapping options for rkmpp when OpenCL is available', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true, crf: 30, maxBitrate: '0' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -2720,7 +2740,7 @@ describe(MediaService.name, () => { sut.videoInterfaces = { dri: ['renderD128'], mali: false }; mocks.media.probe.mockResolvedValue(probeStub.noAudioStreams); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true, crf: 30, maxBitrate: '0' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -2739,7 +2759,7 @@ describe(MediaService.name, () => { it('should use software decoding and tone-mapping if hardware decoding is disabled', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: false, crf: 30, maxBitrate: '0' }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: false, crf: 30, maxBitrate: '0' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -2761,7 +2781,7 @@ describe(MediaService.name, () => { sut.videoInterfaces = { dri: ['renderD128'], mali: false }; mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR); mocks.systemMetadata.get.mockResolvedValue({ - ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' }, + ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true, crf: 30, maxBitrate: '0' }, }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( @@ -2781,7 +2801,7 @@ describe(MediaService.name, () => { it('should tonemap when policy is required and video is hdr', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Required } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2800,7 +2820,7 @@ describe(MediaService.name, () => { it('should tonemap when policy is optimal and video is hdr', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Optimal } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2819,7 +2839,7 @@ describe(MediaService.name, () => { it('should transcode when policy is required and video is not yuv420p', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Required } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', @@ -2834,7 +2854,7 @@ describe(MediaService.name, () => { it('should convert to yuv420p when scaling without tone-mapping', async () => { mocks.media.probe.mockResolvedValue(probeStub.videoStream4K10Bit); - mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } }); + mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Required } }); await sut.handleVideoConversion({ id: assetStub.video.id }); expect(mocks.media.transcode).toHaveBeenCalledWith( '/original/path.ext', diff --git a/server/src/services/media.service.ts b/server/src/services/media.service.ts index 452e4df5eb..b48df3797a 100644 --- a/server/src/services/media.service.ts +++ b/server/src/services/media.service.ts @@ -18,7 +18,7 @@ import { QueueName, RawExtractedFormat, StorageFolder, - TranscodeHWAccel, + TranscodeHardwareAcceleration, TranscodePolicy, TranscodeTarget, VideoCodec, @@ -57,8 +57,8 @@ export class MediaService extends BaseService { this.videoInterfaces = { dri, mali }; } - @OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION }) - async handleQueueGenerateThumbnails({ force }: JobOf): Promise { + @OnJob({ name: JobName.QueueGenerateThumbnails, queue: QueueName.ThumbnailGeneration }) + async handleQueueGenerateThumbnails({ force }: JobOf): Promise { let jobs: JobItem[] = []; const queueAll = async () => { @@ -70,7 +70,7 @@ export class MediaService extends BaseService { const { previewFile, thumbnailFile } = getAssetFiles(asset.files); if (!previewFile || !thumbnailFile || !asset.thumbhash || force) { - jobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } }); + jobs.push({ name: JobName.GenerateThumbnails, data: { id: asset.id } }); } if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) { @@ -92,7 +92,7 @@ export class MediaService extends BaseService { await this.personRepository.update({ id: person.id, faceAssetId: face.id }); } - jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: person.id } }); + jobs.push({ name: JobName.GeneratePersonThumbnail, data: { id: person.id } }); if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) { await queueAll(); } @@ -100,21 +100,21 @@ export class MediaService extends BaseService { await queueAll(); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.QUEUE_MIGRATION, queue: QueueName.MIGRATION }) + @OnJob({ name: JobName.QueueMigration, queue: QueueName.Migration }) async handleQueueMigration(): Promise { - const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.MIGRATION); + const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.Migration); if (active === 1 && waiting === 0) { - await this.storageCore.removeEmptyDirs(StorageFolder.THUMBNAILS); - await this.storageCore.removeEmptyDirs(StorageFolder.ENCODED_VIDEO); + await this.storageCore.removeEmptyDirs(StorageFolder.Thumbnails); + await this.storageCore.removeEmptyDirs(StorageFolder.EncodedVideo); } let jobs: JobItem[] = []; const assets = this.assetJobRepository.streamForMigrationJob(); for await (const asset of assets) { - jobs.push({ name: JobName.MIGRATE_ASSET, data: { id: asset.id } }); + jobs.push({ name: JobName.MigrateAsset, data: { id: asset.id } }); if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) { await this.jobRepository.queueAll(jobs); jobs = []; @@ -125,7 +125,7 @@ export class MediaService extends BaseService { jobs = []; for await (const person of this.personRepository.getAll()) { - jobs.push({ name: JobName.MIGRATE_PERSON, data: { id: person.id } }); + jobs.push({ name: JobName.MigratePerson, data: { id: person.id } }); if (jobs.length === JOBS_ASSET_PAGINATION_SIZE) { await this.jobRepository.queueAll(jobs); @@ -135,36 +135,36 @@ export class MediaService extends BaseService { await this.jobRepository.queueAll(jobs); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.MIGRATE_ASSET, queue: QueueName.MIGRATION }) - async handleAssetMigration({ id }: JobOf): Promise { + @OnJob({ name: JobName.MigrateAsset, queue: QueueName.Migration }) + async handleAssetMigration({ id }: JobOf): Promise { const { image } = await this.getConfig({ withCache: true }); const asset = await this.assetJobRepository.getForMigrationJob(id); if (!asset) { - return JobStatus.FAILED; + return JobStatus.Failed; } - await this.storageCore.moveAssetImage(asset, AssetPathType.FULLSIZE, image.fullsize.format); - await this.storageCore.moveAssetImage(asset, AssetPathType.PREVIEW, image.preview.format); - await this.storageCore.moveAssetImage(asset, AssetPathType.THUMBNAIL, image.thumbnail.format); + await this.storageCore.moveAssetImage(asset, AssetPathType.FullSize, image.fullsize.format); + await this.storageCore.moveAssetImage(asset, AssetPathType.Preview, image.preview.format); + await this.storageCore.moveAssetImage(asset, AssetPathType.Thumbnail, image.thumbnail.format); await this.storageCore.moveAssetVideo(asset); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION }) - async handleGenerateThumbnails({ id }: JobOf): Promise { + @OnJob({ name: JobName.GenerateThumbnails, queue: QueueName.ThumbnailGeneration }) + async handleGenerateThumbnails({ id }: JobOf): Promise { const asset = await this.assetJobRepository.getForGenerateThumbnailJob(id); if (!asset) { this.logger.warn(`Thumbnail generation failed for asset ${id}: not found`); - return JobStatus.FAILED; + return JobStatus.Failed; } - if (asset.visibility === AssetVisibility.HIDDEN) { + if (asset.visibility === AssetVisibility.Hidden) { this.logger.verbose(`Thumbnail generation skipped for asset ${id}: not visible`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } let generated: { @@ -173,27 +173,27 @@ export class MediaService extends BaseService { fullsizePath?: string; thumbhash: Buffer; }; - if (asset.type === AssetType.VIDEO || asset.originalFileName.toLowerCase().endsWith('.gif')) { + if (asset.type === AssetType.Video || asset.originalFileName.toLowerCase().endsWith('.gif')) { generated = await this.generateVideoThumbnails(asset); - } else if (asset.type === AssetType.IMAGE) { + } else if (asset.type === AssetType.Image) { generated = await this.generateImageThumbnails(asset); } else { this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const { previewFile, thumbnailFile, fullsizeFile } = getAssetFiles(asset.files); const toUpsert: UpsertFileOptions[] = []; if (previewFile?.path !== generated.previewPath) { - toUpsert.push({ assetId: asset.id, path: generated.previewPath, type: AssetFileType.PREVIEW }); + toUpsert.push({ assetId: asset.id, path: generated.previewPath, type: AssetFileType.Preview }); } if (thumbnailFile?.path !== generated.thumbnailPath) { - toUpsert.push({ assetId: asset.id, path: generated.thumbnailPath, type: AssetFileType.THUMBNAIL }); + toUpsert.push({ assetId: asset.id, path: generated.thumbnailPath, type: AssetFileType.Thumbnail }); } if (generated.fullsizePath && fullsizeFile?.path !== generated.fullsizePath) { - toUpsert.push({ assetId: asset.id, path: generated.fullsizePath, type: AssetFileType.FULLSIZE }); + toUpsert.push({ assetId: asset.id, path: generated.fullsizePath, type: AssetFileType.FullSize }); } if (toUpsert.length > 0) { @@ -230,7 +230,7 @@ export class MediaService extends BaseService { await this.assetRepository.upsertJobStatus({ assetId: asset.id, previewAt: new Date(), thumbnailAt: new Date() }); - return JobStatus.SUCCESS; + return JobStatus.Success; } private async extractImage(originalPath: string, minSize: number) { @@ -244,7 +244,7 @@ export class MediaService extends BaseService { private async decodeImage(thumbSource: string | Buffer, exifInfo: Exif, targetSize?: number) { const { image } = await this.getConfig({ withCache: true }); - const colorspace = this.isSRGB(exifInfo) ? Colorspace.SRGB : image.colorspace; + const colorspace = this.isSRGB(exifInfo) ? Colorspace.Srgb : image.colorspace; const decodeOptions: DecodeToBufferOptions = { colorspace, processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true', @@ -264,8 +264,8 @@ export class MediaService extends BaseService { exifInfo: Exif; }) { const { image } = await this.getConfig({ withCache: true }); - const previewPath = StorageCore.getImagePath(asset, AssetPathType.PREVIEW, image.preview.format); - const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.THUMBNAIL, image.thumbnail.format); + const previewPath = StorageCore.getImagePath(asset, AssetPathType.Preview, image.preview.format); + const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.Thumbnail, image.thumbnail.format); this.storageCore.ensureFolders(previewPath); // Handle embedded preview extraction for RAW files @@ -294,11 +294,11 @@ export class MediaService extends BaseService { if (convertFullsize) { // convert a new fullsize image from the same source as the thumbnail - fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, image.fullsize.format); + fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, image.fullsize.format); const fullsizeOptions = { format: image.fullsize.format, quality: image.fullsize.quality, ...thumbnailOptions }; promises.push(this.mediaRepository.generateThumbnail(data, fullsizeOptions, fullsizePath)); - } else if (generateFullsize && extracted && extracted.format === RawExtractedFormat.JPEG) { - fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, extracted.format); + } else if (generateFullsize && extracted && extracted.format === RawExtractedFormat.Jpeg) { + fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, extracted.format); this.storageCore.ensureFolders(fullsizePath); // Write the buffer to disk with essential EXIF data @@ -317,25 +317,25 @@ export class MediaService extends BaseService { return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer }; } - @OnJob({ name: JobName.GENERATE_PERSON_THUMBNAIL, queue: QueueName.THUMBNAIL_GENERATION }) - async handleGeneratePersonThumbnail({ id }: JobOf): Promise { + @OnJob({ name: JobName.GeneratePersonThumbnail, queue: QueueName.ThumbnailGeneration }) + async handleGeneratePersonThumbnail({ id }: JobOf): Promise { const { machineLearning, metadata, image } = await this.getConfig({ withCache: true }); if (!isFacialRecognitionEnabled(machineLearning) && !isFaceImportEnabled(metadata)) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const data = await this.personRepository.getDataForThumbnailGenerationJob(id); if (!data) { this.logger.error(`Could not generate person thumbnail for ${id}: missing data`); - return JobStatus.FAILED; + return JobStatus.Failed; } const { ownerId, x1, y1, x2, y2, oldWidth, oldHeight, exifOrientation, previewPath, originalPath } = data; let inputImage: string | Buffer; - if (data.type === AssetType.VIDEO) { + if (data.type === AssetType.Video) { if (!previewPath) { this.logger.error(`Could not generate person thumbnail for video ${id}: missing preview path`); - return JobStatus.FAILED; + return JobStatus.Failed; } inputImage = previewPath; } else if (image.extractEmbedded && mimeTypes.isRaw(originalPath)) { @@ -357,7 +357,7 @@ export class MediaService extends BaseService { const thumbnailOptions = { colorspace: image.colorspace, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, raw: info, quality: image.thumbnail.quality, crop: this.getCrop( @@ -371,7 +371,7 @@ export class MediaService extends BaseService { await this.mediaRepository.generateThumbnail(decodedImage, thumbnailOptions, thumbnailPath); await this.personRepository.update({ id, thumbnailPath }); - return JobStatus.SUCCESS; + return JobStatus.Success; } private getCrop(dims: { old: ImageDimensions; new: ImageDimensions }, { x1, y1, x2, y2 }: BoundingBox): CropOptions { @@ -411,8 +411,8 @@ export class MediaService extends BaseService { private async generateVideoThumbnails(asset: ThumbnailPathEntity & { originalPath: string }) { const { image, ffmpeg } = await this.getConfig({ withCache: true }); - const previewPath = StorageCore.getImagePath(asset, AssetPathType.PREVIEW, image.preview.format); - const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.THUMBNAIL, image.thumbnail.format); + const previewPath = StorageCore.getImagePath(asset, AssetPathType.Preview, image.preview.format); + const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.Thumbnail, image.thumbnail.format); this.storageCore.ensureFolders(previewPath); const { format, audioStreams, videoStreams } = await this.mediaRepository.probe(asset.originalPath); @@ -424,9 +424,9 @@ export class MediaService extends BaseService { const previewConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.preview.size.toString() }); const thumbnailConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.thumbnail.size.toString() }); - const previewOptions = previewConfig.getCommand(TranscodeTarget.VIDEO, mainVideoStream, mainAudioStream, format); + const previewOptions = previewConfig.getCommand(TranscodeTarget.Video, mainVideoStream, mainAudioStream, format); const thumbnailOptions = thumbnailConfig.getCommand( - TranscodeTarget.VIDEO, + TranscodeTarget.Video, mainVideoStream, mainAudioStream, format, @@ -443,13 +443,13 @@ export class MediaService extends BaseService { return { previewPath, thumbnailPath, thumbhash }; } - @OnJob({ name: JobName.QUEUE_VIDEO_CONVERSION, queue: QueueName.VIDEO_CONVERSION }) - async handleQueueVideoConversion(job: JobOf): Promise { + @OnJob({ name: JobName.QueueVideoConversion, queue: QueueName.VideoConversion }) + async handleQueueVideoConversion(job: JobOf): Promise { const { force } = job; - let queue: { name: JobName.VIDEO_CONVERSION; data: { id: string } }[] = []; + let queue: { name: JobName.VideoConversation; data: { id: string } }[] = []; for await (const asset of this.assetJobRepository.streamForVideoConversion(force)) { - queue.push({ name: JobName.VIDEO_CONVERSION, data: { id: asset.id } }); + queue.push({ name: JobName.VideoConversation, data: { id: asset.id } }); if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) { await this.jobRepository.queueAll(queue); @@ -459,14 +459,14 @@ export class MediaService extends BaseService { await this.jobRepository.queueAll(queue); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.VIDEO_CONVERSION, queue: QueueName.VIDEO_CONVERSION }) - async handleVideoConversion({ id }: JobOf): Promise { + @OnJob({ name: JobName.VideoConversation, queue: QueueName.VideoConversion }) + async handleVideoConversion({ id }: JobOf): Promise { const asset = await this.assetJobRepository.getForVideoConversion(id); if (!asset) { - return JobStatus.FAILED; + return JobStatus.Failed; } const input = asset.originalPath; @@ -474,35 +474,35 @@ export class MediaService extends BaseService { this.storageCore.ensureFolders(output); const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, { - countFrames: this.logger.isLevelEnabled(LogLevel.DEBUG), // makes frame count more reliable for progress logs + countFrames: this.logger.isLevelEnabled(LogLevel.Debug), // makes frame count more reliable for progress logs }); const videoStream = this.getMainStream(videoStreams); const audioStream = this.getMainStream(audioStreams); if (!videoStream || !format.formatName) { - return JobStatus.FAILED; + return JobStatus.Failed; } if (!videoStream.height || !videoStream.width) { this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`); - return JobStatus.FAILED; + return JobStatus.Failed; } let { ffmpeg } = await this.getConfig({ withCache: true }); const target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream); - if (target === TranscodeTarget.NONE && !this.isRemuxRequired(ffmpeg, format)) { + if (target === TranscodeTarget.None && !this.isRemuxRequired(ffmpeg, format)) { if (asset.encodedVideoPath) { this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`); - await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [asset.encodedVideoPath] } }); + await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files: [asset.encodedVideoPath] } }); await this.assetRepository.update({ id: asset.id, encodedVideoPath: null }); } else { this.logger.verbose(`Asset ${asset.id} does not require transcoding based on current policy, skipping`); } - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream); - if (ffmpeg.accel === TranscodeHWAccel.DISABLED) { + if (ffmpeg.accel === TranscodeHardwareAcceleration.Disabled) { this.logger.log(`Transcoding video ${asset.id} without hardware acceleration`); } else { this.logger.log( @@ -514,8 +514,8 @@ export class MediaService extends BaseService { await this.mediaRepository.transcode(input, output, command); } catch (error: any) { this.logger.error(`Error occurred during transcoding: ${error.message}`); - if (ffmpeg.accel === TranscodeHWAccel.DISABLED) { - return JobStatus.FAILED; + if (ffmpeg.accel === TranscodeHardwareAcceleration.Disabled) { + return JobStatus.Failed; } let partialFallbackSuccess = false; @@ -533,7 +533,7 @@ export class MediaService extends BaseService { if (!partialFallbackSuccess) { this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()} acceleration disabled`); - ffmpeg = { ...ffmpeg, accel: TranscodeHWAccel.DISABLED }; + ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled }; const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream); await this.mediaRepository.transcode(input, output, command); } @@ -543,7 +543,7 @@ export class MediaService extends BaseService { await this.assetRepository.update({ id: asset.id, encodedVideoPath: output }); - return JobStatus.SUCCESS; + return JobStatus.Success; } private getMainStream(streams: T[]): T { @@ -561,18 +561,18 @@ export class MediaService extends BaseService { const isVideoTranscodeRequired = this.isVideoTranscodeRequired(config, videoStream); if (isAudioTranscodeRequired && isVideoTranscodeRequired) { - return TranscodeTarget.ALL; + return TranscodeTarget.All; } if (isAudioTranscodeRequired) { - return TranscodeTarget.AUDIO; + return TranscodeTarget.Audio; } if (isVideoTranscodeRequired) { - return TranscodeTarget.VIDEO; + return TranscodeTarget.Video; } - return TranscodeTarget.NONE; + return TranscodeTarget.None; } private isAudioTranscodeRequired(ffmpegConfig: SystemConfigFFmpegDto, stream?: AudioStreamInfo): boolean { @@ -581,15 +581,15 @@ export class MediaService extends BaseService { } switch (ffmpegConfig.transcode) { - case TranscodePolicy.DISABLED: { + case TranscodePolicy.Disabled: { return false; } - case TranscodePolicy.ALL: { + case TranscodePolicy.All: { return true; } - case TranscodePolicy.REQUIRED: - case TranscodePolicy.OPTIMAL: - case TranscodePolicy.BITRATE: { + case TranscodePolicy.Required: + case TranscodePolicy.Optimal: + case TranscodePolicy.Bitrate: { return !ffmpegConfig.acceptedAudioCodecs.includes(stream.codecName as AudioCodec); } default: { @@ -608,19 +608,19 @@ export class MediaService extends BaseService { const isRequired = !isTargetVideoCodec || !stream.pixelFormat.endsWith('420p'); switch (ffmpegConfig.transcode) { - case TranscodePolicy.DISABLED: { + case TranscodePolicy.Disabled: { return false; } - case TranscodePolicy.ALL: { + case TranscodePolicy.All: { return true; } - case TranscodePolicy.REQUIRED: { + case TranscodePolicy.Required: { return isRequired; } - case TranscodePolicy.OPTIMAL: { + case TranscodePolicy.Optimal: { return isRequired || isLargerThanTargetRes; } - case TranscodePolicy.BITRATE: { + case TranscodePolicy.Bitrate: { return isRequired || isLargerThanTargetBitrate; } default: { @@ -630,12 +630,12 @@ export class MediaService extends BaseService { } private isRemuxRequired(ffmpegConfig: SystemConfigFFmpegDto, { formatName, formatLongName }: VideoFormat): boolean { - if (ffmpegConfig.transcode === TranscodePolicy.DISABLED) { + if (ffmpegConfig.transcode === TranscodePolicy.Disabled) { return false; } - const name = formatLongName === 'QuickTime / MOV' ? VideoContainer.MOV : (formatName as VideoContainer); - return name !== VideoContainer.MP4 && !ffmpegConfig.acceptedContainers.includes(name); + const name = formatLongName === 'QuickTime / MOV' ? VideoContainer.Mov : (formatName as VideoContainer); + return name !== VideoContainer.Mp4 && !ffmpegConfig.acceptedContainers.includes(name); } isSRGB({ colorspace, profileDescription, bitsPerSample }: Exif): boolean { diff --git a/server/src/services/memory.service.ts b/server/src/services/memory.service.ts index b0ea697edb..ff88b9029c 100644 --- a/server/src/services/memory.service.ts +++ b/server/src/services/memory.service.ts @@ -12,7 +12,7 @@ const DAYS = 3; @Injectable() export class MemoryService extends BaseService { - @OnJob({ name: JobName.MEMORIES_CREATE, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.MemoriesCreate, queue: QueueName.BackgroundTask }) async onMemoriesCreate() { const users = await this.userRepository.getList({ withDeleted: false }); const usersIds = await Promise.all( @@ -26,7 +26,7 @@ export class MemoryService extends BaseService { ); await this.databaseRepository.withLock(DatabaseLock.MemoryCreation, async () => { - const state = await this.systemMetadataRepository.get(SystemMetadataKey.MEMORIES_STATE); + const state = await this.systemMetadataRepository.get(SystemMetadataKey.MemoriesState); const start = DateTime.utc().startOf('day').minus({ days: DAYS }); const lastOnThisDayDate = state?.lastOnThisDayDate ? DateTime.fromISO(state.lastOnThisDayDate) : start; @@ -43,7 +43,7 @@ export class MemoryService extends BaseService { this.logger.error(`Failed to create memories for ${target.toISO()}`, error); } // update system metadata even when there is an error to minimize the chance of duplicates - await this.systemMetadataRepository.set(SystemMetadataKey.MEMORIES_STATE, { + await this.systemMetadataRepository.set(SystemMetadataKey.MemoriesState, { ...state, lastOnThisDayDate: target.toISO(), }); @@ -60,7 +60,7 @@ export class MemoryService extends BaseService { this.memoryRepository.create( { ownerId, - type: MemoryType.ON_THIS_DAY, + type: MemoryType.OnThisDay, data: { year }, memoryAt: target.set({ year }).toISO()!, showAt, @@ -72,7 +72,7 @@ export class MemoryService extends BaseService { ); } - @OnJob({ name: JobName.MEMORIES_CLEANUP, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.MemoriesCleanup, queue: QueueName.BackgroundTask }) async onMemoriesCleanup() { await this.memoryRepository.cleanup(); } @@ -87,7 +87,7 @@ export class MemoryService extends BaseService { } async get(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.MEMORY_READ, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.MemoryRead, ids: [id] }); const memory = await this.findOrFail(id); return mapMemory(memory, auth); } @@ -98,7 +98,7 @@ export class MemoryService extends BaseService { const assetIds = dto.assetIds || []; const allowedAssetIds = await this.checkAccess({ auth, - permission: Permission.ASSET_SHARE, + permission: Permission.AssetShare, ids: assetIds, }); const memory = await this.memoryRepository.create( @@ -117,7 +117,7 @@ export class MemoryService extends BaseService { } async update(auth: AuthDto, id: string, dto: MemoryUpdateDto): Promise { - await this.requireAccess({ auth, permission: Permission.MEMORY_UPDATE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.MemoryUpdate, ids: [id] }); const memory = await this.memoryRepository.update(id, { isSaved: dto.isSaved, @@ -129,12 +129,12 @@ export class MemoryService extends BaseService { } async remove(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.MEMORY_DELETE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.MemoryDelete, ids: [id] }); await this.memoryRepository.delete(id); } async addAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise { - await this.requireAccess({ auth, permission: Permission.MEMORY_READ, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.MemoryRead, ids: [id] }); const repos = { access: this.accessRepository, bulk: this.memoryRepository }; const results = await addAssets(auth, repos, { parentId: id, assetIds: dto.ids }); @@ -148,13 +148,13 @@ export class MemoryService extends BaseService { } async removeAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise { - await this.requireAccess({ auth, permission: Permission.MEMORY_UPDATE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.MemoryUpdate, ids: [id] }); const repos = { access: this.accessRepository, bulk: this.memoryRepository }; const results = await removeAssets(auth, repos, { parentId: id, assetIds: dto.ids, - canAlwaysRemove: Permission.MEMORY_DELETE, + canAlwaysRemove: Permission.MemoryDelete, }); const hasSuccess = results.find(({ success }) => success); diff --git a/server/src/services/metadata.service.spec.ts b/server/src/services/metadata.service.spec.ts index 881f25d5dd..23884256ea 100644 --- a/server/src/services/metadata.service.spec.ts +++ b/server/src/services/metadata.service.spec.ts @@ -50,7 +50,7 @@ describe(MetadataService.name, () => { mockReadTags(); - mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES); + mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices); delete process.env.TZ; }); @@ -102,11 +102,11 @@ describe(MetadataService.name, () => { it('should queue metadata extraction for all assets without exif values', async () => { mocks.assetJob.streamForMetadataExtraction.mockReturnValue(makeStream([assetStub.image])); - await expect(sut.handleQueueMetadataExtraction({ force: false })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleQueueMetadataExtraction({ force: false })).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.streamForMetadataExtraction).toHaveBeenCalledWith(false); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.METADATA_EXTRACTION, + name: JobName.MetadataExtraction, data: { id: assetStub.image.id }, }, ]); @@ -115,11 +115,11 @@ describe(MetadataService.name, () => { it('should queue metadata extraction for all assets', async () => { mocks.assetJob.streamForMetadataExtraction.mockReturnValue(makeStream([assetStub.image])); - await expect(sut.handleQueueMetadataExtraction({ force: true })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleQueueMetadataExtraction({ force: true })).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.streamForMetadataExtraction).toHaveBeenCalledWith(true); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.METADATA_EXTRACTION, + name: JobName.MetadataExtraction, data: { id: assetStub.image.id }, }, ]); @@ -506,7 +506,7 @@ describe(MetadataService.name, () => { it('should not apply motion photos if asset is video', async () => { mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ ...assetStub.livePhotoMotionAsset, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }); mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer); @@ -516,7 +516,7 @@ describe(MetadataService.name, () => { expect(mocks.job.queue).not.toHaveBeenCalled(); expect(mocks.job.queueAll).not.toHaveBeenCalled(); expect(mocks.asset.update).not.toHaveBeenCalledWith( - expect.objectContaining({ assetType: AssetType.VIDEO, visibility: AssetVisibility.HIDDEN }), + expect.objectContaining({ assetType: AssetType.Video, visibility: AssetVisibility.Hidden }), ); }); @@ -583,13 +583,13 @@ describe(MetadataService.name, () => { fileCreatedAt: assetStub.livePhotoWithOriginalFileName.fileCreatedAt, fileModifiedAt: assetStub.livePhotoWithOriginalFileName.fileModifiedAt, id: fileStub.livePhotoMotion.uuid, - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, libraryId: assetStub.livePhotoWithOriginalFileName.libraryId, localDateTime: assetStub.livePhotoWithOriginalFileName.fileCreatedAt, originalFileName: 'asset_1.mp4', originalPath: 'upload/encoded-video/user-id/li/ve/live-photo-motion-asset-MP.mp4', ownerId: assetStub.livePhotoWithOriginalFileName.ownerId, - type: AssetType.VIDEO, + type: AssetType.Video, }); expect(mocks.user.updateUsage).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.ownerId, 512); expect(mocks.storage.createFile).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.originalPath, video); @@ -599,7 +599,7 @@ describe(MetadataService.name, () => { }); expect(mocks.asset.update).toHaveBeenCalledTimes(3); expect(mocks.job.queue).toHaveBeenCalledExactlyOnceWith({ - name: JobName.VIDEO_CONVERSION, + name: JobName.VideoConversation, data: { id: assetStub.livePhotoMotionAsset.id }, }); }); @@ -641,13 +641,13 @@ describe(MetadataService.name, () => { fileCreatedAt: assetStub.livePhotoWithOriginalFileName.fileCreatedAt, fileModifiedAt: assetStub.livePhotoWithOriginalFileName.fileModifiedAt, id: fileStub.livePhotoMotion.uuid, - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, libraryId: assetStub.livePhotoWithOriginalFileName.libraryId, localDateTime: assetStub.livePhotoWithOriginalFileName.fileCreatedAt, originalFileName: 'asset_1.mp4', originalPath: 'upload/encoded-video/user-id/li/ve/live-photo-motion-asset-MP.mp4', ownerId: assetStub.livePhotoWithOriginalFileName.ownerId, - type: AssetType.VIDEO, + type: AssetType.Video, }); expect(mocks.user.updateUsage).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.ownerId, 512); expect(mocks.storage.createFile).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.originalPath, video); @@ -657,7 +657,7 @@ describe(MetadataService.name, () => { }); expect(mocks.asset.update).toHaveBeenCalledTimes(3); expect(mocks.job.queue).toHaveBeenCalledExactlyOnceWith({ - name: JobName.VIDEO_CONVERSION, + name: JobName.VideoConversation, data: { id: assetStub.livePhotoMotionAsset.id }, }); }); @@ -699,13 +699,13 @@ describe(MetadataService.name, () => { fileCreatedAt: assetStub.livePhotoWithOriginalFileName.fileCreatedAt, fileModifiedAt: assetStub.livePhotoWithOriginalFileName.fileModifiedAt, id: fileStub.livePhotoMotion.uuid, - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, libraryId: assetStub.livePhotoWithOriginalFileName.libraryId, localDateTime: assetStub.livePhotoWithOriginalFileName.fileCreatedAt, originalFileName: 'asset_1.mp4', originalPath: 'upload/encoded-video/user-id/li/ve/live-photo-motion-asset-MP.mp4', ownerId: assetStub.livePhotoWithOriginalFileName.ownerId, - type: AssetType.VIDEO, + type: AssetType.Video, }); expect(mocks.user.updateUsage).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.ownerId, 512); expect(mocks.storage.createFile).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.originalPath, video); @@ -715,7 +715,7 @@ describe(MetadataService.name, () => { }); expect(mocks.asset.update).toHaveBeenCalledTimes(3); expect(mocks.job.queue).toHaveBeenCalledExactlyOnceWith({ - name: JobName.VIDEO_CONVERSION, + name: JobName.VideoConversation, data: { id: assetStub.livePhotoMotionAsset.id }, }); }); @@ -737,7 +737,7 @@ describe(MetadataService.name, () => { await sut.handleMetadataExtraction({ id: assetStub.livePhotoWithOriginalFileName.id }); expect(mocks.job.queue).toHaveBeenNthCalledWith(1, { - name: JobName.ASSET_DELETION, + name: JobName.AssetDeletion, data: { id: assetStub.livePhotoWithOriginalFileName.livePhotoVideoId, deleteOnDisk: true }, }); }); @@ -778,7 +778,7 @@ describe(MetadataService.name, () => { mocks.crypto.hashSha1.mockReturnValue(randomBytes(512)); mocks.asset.getByChecksum.mockResolvedValue({ ...assetStub.livePhotoMotionAsset, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }); const video = randomBytes(512); mocks.storage.readFile.mockResolvedValue(video); @@ -786,7 +786,7 @@ describe(MetadataService.name, () => { await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id }); expect(mocks.asset.update).toHaveBeenCalledWith({ id: assetStub.livePhotoMotionAsset.id, - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, }); expect(mocks.asset.update).toHaveBeenCalledWith({ id: assetStub.livePhotoStillAsset.id, @@ -1106,7 +1106,7 @@ describe(MetadataService.name, () => { boundingBoxX2: 200, boundingBoxY1: 20, boundingBoxY2: 60, - sourceType: SourceType.EXIF, + sourceType: SourceType.Exif, }, ], [], @@ -1116,7 +1116,7 @@ describe(MetadataService.name, () => { ]); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_PERSON_THUMBNAIL, + name: JobName.GeneratePersonThumbnail, data: { id: personStub.withName.id }, }, ]); @@ -1145,7 +1145,7 @@ describe(MetadataService.name, () => { boundingBoxX2: 200, boundingBoxY1: 20, boundingBoxY2: 60, - sourceType: SourceType.EXIF, + sourceType: SourceType.Exif, }, ], [], @@ -1234,7 +1234,7 @@ describe(MetadataService.name, () => { boundingBoxX2: x2, boundingBoxY1: y1, boundingBoxY2: y2, - sourceType: SourceType.EXIF, + sourceType: SourceType.Exif, }, ], [], @@ -1244,7 +1244,7 @@ describe(MetadataService.name, () => { ]); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_PERSON_THUMBNAIL, + name: JobName.GeneratePersonThumbnail, data: { id: personStub.withName.id }, }, ]); @@ -1308,7 +1308,7 @@ describe(MetadataService.name, () => { expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id); expect(mocks.asset.findLivePhotoMatch).not.toHaveBeenCalled(); expect(mocks.asset.update).not.toHaveBeenCalledWith( - expect.objectContaining({ visibility: AssetVisibility.HIDDEN }), + expect.objectContaining({ visibility: AssetVisibility.Hidden }), ); expect(mocks.album.removeAssetsFromAll).not.toHaveBeenCalled(); }); @@ -1326,10 +1326,10 @@ describe(MetadataService.name, () => { ownerId: assetStub.livePhotoMotionAsset.ownerId, otherAssetId: assetStub.livePhotoMotionAsset.id, libraryId: null, - type: AssetType.IMAGE, + type: AssetType.Image, }); expect(mocks.asset.update).not.toHaveBeenCalledWith( - expect.objectContaining({ visibility: AssetVisibility.HIDDEN }), + expect.objectContaining({ visibility: AssetVisibility.Hidden }), ); expect(mocks.album.removeAssetsFromAll).not.toHaveBeenCalled(); }); @@ -1346,7 +1346,7 @@ describe(MetadataService.name, () => { livePhotoCID: 'CID', ownerId: assetStub.livePhotoStillAsset.ownerId, otherAssetId: assetStub.livePhotoStillAsset.id, - type: AssetType.VIDEO, + type: AssetType.Video, }); expect(mocks.asset.update).toHaveBeenCalledWith({ id: assetStub.livePhotoStillAsset.id, @@ -1354,7 +1354,7 @@ describe(MetadataService.name, () => { }); expect(mocks.asset.update).toHaveBeenCalledWith({ id: assetStub.livePhotoMotionAsset.id, - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, }); expect(mocks.album.removeAssetsFromAll).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id]); }); @@ -1457,7 +1457,7 @@ describe(MetadataService.name, () => { expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.SIDECAR_SYNC, + name: JobName.SidecarSync, data: { id: assetStub.sidecar.id }, }, ]); @@ -1471,7 +1471,7 @@ describe(MetadataService.name, () => { expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(false); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.SIDECAR_DISCOVERY, + name: JobName.SidecarDiscovery, data: { id: assetStub.image.id }, }, ]); @@ -1481,13 +1481,13 @@ describe(MetadataService.name, () => { describe('handleSidecarSync', () => { it('should do nothing if asset could not be found', async () => { mocks.asset.getByIds.mockResolvedValue([]); - await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED); + await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.Failed); expect(mocks.asset.update).not.toHaveBeenCalled(); }); it('should do nothing if asset has no sidecar path', async () => { mocks.asset.getByIds.mockResolvedValue([assetStub.image]); - await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED); + await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.Failed); expect(mocks.asset.update).not.toHaveBeenCalled(); }); @@ -1495,7 +1495,7 @@ describe(MetadataService.name, () => { mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]); mocks.storage.checkFileExists.mockResolvedValue(true); - await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success); expect(mocks.storage.checkFileExists).toHaveBeenCalledWith( `${assetStub.sidecar.originalPath}.xmp`, constants.R_OK, @@ -1511,7 +1511,7 @@ describe(MetadataService.name, () => { mocks.storage.checkFileExists.mockResolvedValueOnce(false); mocks.storage.checkFileExists.mockResolvedValueOnce(true); - await expect(sut.handleSidecarSync({ id: assetStub.sidecarWithoutExt.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSidecarSync({ id: assetStub.sidecarWithoutExt.id })).resolves.toBe(JobStatus.Success); expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith( 2, assetStub.sidecarWithoutExt.sidecarPath, @@ -1528,7 +1528,7 @@ describe(MetadataService.name, () => { mocks.storage.checkFileExists.mockResolvedValueOnce(true); mocks.storage.checkFileExists.mockResolvedValueOnce(true); - await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success); expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith(1, assetStub.sidecar.sidecarPath, constants.R_OK); expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith( 2, @@ -1545,7 +1545,7 @@ describe(MetadataService.name, () => { mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]); mocks.storage.checkFileExists.mockResolvedValue(false); - await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success); expect(mocks.storage.checkFileExists).toHaveBeenCalledWith( `${assetStub.sidecar.originalPath}.xmp`, constants.R_OK, @@ -1603,14 +1603,14 @@ describe(MetadataService.name, () => { describe('handleSidecarWrite', () => { it('should skip assets that do not exist anymore', async () => { mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(void 0); - await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.FAILED); + await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.Failed); expect(mocks.metadata.writeTags).not.toHaveBeenCalled(); }); it('should skip jobs with no metadata', async () => { const asset = factory.jobAssets.sidecarWrite(); mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(asset); - await expect(sut.handleSidecarWrite({ id: asset.id })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleSidecarWrite({ id: asset.id })).resolves.toBe(JobStatus.Skipped); expect(mocks.metadata.writeTags).not.toHaveBeenCalled(); }); @@ -1629,7 +1629,7 @@ describe(MetadataService.name, () => { longitude: gps, dateTimeOriginal: date, }), - ).resolves.toBe(JobStatus.SUCCESS); + ).resolves.toBe(JobStatus.Success); expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.sidecarPath, { Description: description, ImageDescription: description, diff --git a/server/src/services/metadata.service.ts b/server/src/services/metadata.service.ts index ea3f810fa4..037bab3633 100644 --- a/server/src/services/metadata.service.ts +++ b/server/src/services/metadata.service.ts @@ -126,7 +126,7 @@ type Dates = { @Injectable() export class MetadataService extends BaseService { - @OnEvent({ name: 'AppBootstrap', workers: [ImmichWorker.MICROSERVICES] }) + @OnEvent({ name: 'AppBootstrap', workers: [ImmichWorker.Microservices] }) async onBootstrap() { this.logger.log('Bootstrapping metadata service'); await this.init(); @@ -137,12 +137,12 @@ export class MetadataService extends BaseService { await this.metadataRepository.teardown(); } - @OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.MICROSERVICES] }) + @OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] }) onConfigInit({ newConfig }: ArgOf<'ConfigInit'>) { this.metadataRepository.setMaxConcurrency(newConfig.job.metadataExtraction.concurrency); } - @OnEvent({ name: 'ConfigUpdate', workers: [ImmichWorker.MICROSERVICES], server: true }) + @OnEvent({ name: 'ConfigUpdate', workers: [ImmichWorker.Microservices], server: true }) onConfigUpdate({ newConfig }: ArgOf<'ConfigUpdate'>) { this.metadataRepository.setMaxConcurrency(newConfig.job.metadataExtraction.concurrency); } @@ -151,9 +151,9 @@ export class MetadataService extends BaseService { this.logger.log('Initializing metadata service'); try { - await this.jobRepository.pause(QueueName.METADATA_EXTRACTION); + await this.jobRepository.pause(QueueName.MetadataExtraction); await this.databaseRepository.withLock(DatabaseLock.GeodataImport, () => this.mapRepository.init()); - await this.jobRepository.resume(QueueName.METADATA_EXTRACTION); + await this.jobRepository.resume(QueueName.MetadataExtraction); this.logger.log(`Initialized local reverse geocoder`); } catch (error: Error | any) { @@ -170,7 +170,7 @@ export class MetadataService extends BaseService { return; } - const otherType = asset.type === AssetType.VIDEO ? AssetType.IMAGE : AssetType.VIDEO; + const otherType = asset.type === AssetType.Video ? AssetType.Image : AssetType.Video; const match = await this.assetRepository.findLivePhotoMatch({ livePhotoCID: exifInfo.livePhotoCID, ownerId: asset.ownerId, @@ -183,23 +183,23 @@ export class MetadataService extends BaseService { return; } - const [photoAsset, motionAsset] = asset.type === AssetType.IMAGE ? [asset, match] : [match, asset]; + const [photoAsset, motionAsset] = asset.type === AssetType.Image ? [asset, match] : [match, asset]; await Promise.all([ this.assetRepository.update({ id: photoAsset.id, livePhotoVideoId: motionAsset.id }), - this.assetRepository.update({ id: motionAsset.id, visibility: AssetVisibility.HIDDEN }), + this.assetRepository.update({ id: motionAsset.id, visibility: AssetVisibility.Hidden }), this.albumRepository.removeAssetsFromAll([motionAsset.id]), ]); await this.eventRepository.emit('AssetHide', { assetId: motionAsset.id, userId: motionAsset.ownerId }); } - @OnJob({ name: JobName.QUEUE_METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION }) - async handleQueueMetadataExtraction(job: JobOf): Promise { + @OnJob({ name: JobName.QueueMetadataExtraction, queue: QueueName.MetadataExtraction }) + async handleQueueMetadataExtraction(job: JobOf): Promise { const { force } = job; - let queue: { name: JobName.METADATA_EXTRACTION; data: { id: string } }[] = []; + let queue: { name: JobName.MetadataExtraction; data: { id: string } }[] = []; for await (const asset of this.assetJobRepository.streamForMetadataExtraction(force)) { - queue.push({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id } }); + queue.push({ name: JobName.MetadataExtraction, data: { id: asset.id } }); if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) { await this.jobRepository.queueAll(queue); @@ -208,11 +208,11 @@ export class MetadataService extends BaseService { } await this.jobRepository.queueAll(queue); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION }) - async handleMetadataExtraction(data: JobOf) { + @OnJob({ name: JobName.MetadataExtraction, queue: QueueName.MetadataExtraction }) + async handleMetadataExtraction(data: JobOf) { const [{ metadata, reverseGeocoding }, asset] = await Promise.all([ this.getConfig({ withCache: true }), this.assetJobRepository.getForMetadataExtraction(data.id), @@ -320,8 +320,8 @@ export class MetadataService extends BaseService { }); } - @OnJob({ name: JobName.QUEUE_SIDECAR, queue: QueueName.SIDECAR }) - async handleQueueSidecar({ force }: JobOf): Promise { + @OnJob({ name: JobName.QueueSidecar, queue: QueueName.Sidecar }) + async handleQueueSidecar({ force }: JobOf): Promise { let jobs: JobItem[] = []; const queueAll = async () => { await this.jobRepository.queueAll(jobs); @@ -330,7 +330,7 @@ export class MetadataService extends BaseService { const assets = this.assetJobRepository.streamForSidecar(force); for await (const asset of assets) { - jobs.push({ name: force ? JobName.SIDECAR_SYNC : JobName.SIDECAR_DISCOVERY, data: { id: asset.id } }); + jobs.push({ name: force ? JobName.SidecarSync : JobName.SidecarDiscovery, data: { id: asset.id } }); if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) { await queueAll(); } @@ -338,35 +338,35 @@ export class MetadataService extends BaseService { await queueAll(); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.SIDECAR_SYNC, queue: QueueName.SIDECAR }) - handleSidecarSync({ id }: JobOf): Promise { + @OnJob({ name: JobName.SidecarSync, queue: QueueName.Sidecar }) + handleSidecarSync({ id }: JobOf): Promise { return this.processSidecar(id, true); } - @OnJob({ name: JobName.SIDECAR_DISCOVERY, queue: QueueName.SIDECAR }) - handleSidecarDiscovery({ id }: JobOf): Promise { + @OnJob({ name: JobName.SidecarDiscovery, queue: QueueName.Sidecar }) + handleSidecarDiscovery({ id }: JobOf): Promise { return this.processSidecar(id, false); } @OnEvent({ name: 'AssetTag' }) async handleTagAsset({ assetId }: ArgOf<'AssetTag'>) { - await this.jobRepository.queue({ name: JobName.SIDECAR_WRITE, data: { id: assetId, tags: true } }); + await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId, tags: true } }); } @OnEvent({ name: 'AssetUntag' }) async handleUntagAsset({ assetId }: ArgOf<'AssetUntag'>) { - await this.jobRepository.queue({ name: JobName.SIDECAR_WRITE, data: { id: assetId, tags: true } }); + await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId, tags: true } }); } - @OnJob({ name: JobName.SIDECAR_WRITE, queue: QueueName.SIDECAR }) - async handleSidecarWrite(job: JobOf): Promise { + @OnJob({ name: JobName.SidecarWrite, queue: QueueName.Sidecar }) + async handleSidecarWrite(job: JobOf): Promise { const { id, description, dateTimeOriginal, latitude, longitude, rating, tags } = job; const asset = await this.assetJobRepository.getForSidecarWriteJob(id); if (!asset) { - return JobStatus.FAILED; + return JobStatus.Failed; } const tagsList = (asset.tags || []).map((tag) => tag.value); @@ -386,7 +386,7 @@ export class MetadataService extends BaseService { ); if (Object.keys(exif).length === 0) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } await this.metadataRepository.writeTags(sidecarPath, exif); @@ -395,7 +395,7 @@ export class MetadataService extends BaseService { await this.assetRepository.update({ id, sidecarPath }); } - return JobStatus.SUCCESS; + return JobStatus.Success; } private getImageDimensions(exifTags: ImmichTags): { width?: number; height?: number } { @@ -416,7 +416,7 @@ export class MetadataService extends BaseService { sidecarPath: string | null; type: AssetType; }): Promise { - if (!asset.sidecarPath && asset.type === AssetType.IMAGE) { + if (!asset.sidecarPath && asset.type === AssetType.Image) { return this.metadataRepository.readTags(asset.originalPath); } @@ -431,7 +431,7 @@ export class MetadataService extends BaseService { const [mediaTags, sidecarTags, videoTags] = await Promise.all([ this.metadataRepository.readTags(asset.originalPath), asset.sidecarPath ? this.metadataRepository.readTags(asset.sidecarPath) : null, - asset.type === AssetType.VIDEO ? this.getVideoTags(asset.originalPath) : null, + asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null, ]); // prefer dates from sidecar tags @@ -488,7 +488,7 @@ export class MetadataService extends BaseService { } private isMotionPhoto(asset: { type: AssetType }, tags: ImmichTags): boolean { - return asset.type === AssetType.IMAGE && !!(tags.MotionPhoto || tags.MicroVideo); + return asset.type === AssetType.Image && !!(tags.MotionPhoto || tags.MicroVideo); } private async applyMotionPhotos(asset: Asset, tags: ImmichTags, dates: Dates, stats: Stats) { @@ -558,10 +558,10 @@ export class MetadataService extends BaseService { }); // Hide the motion photo video asset if it's not already hidden to prepare for linking - if (motionAsset.visibility === AssetVisibility.TIMELINE) { + if (motionAsset.visibility === AssetVisibility.Timeline) { await this.assetRepository.update({ id: motionAsset.id, - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, }); this.logger.log(`Hid unlinked motion photo video asset (${motionAsset.id})`); } @@ -570,7 +570,7 @@ export class MetadataService extends BaseService { motionAsset = await this.assetRepository.create({ id: motionAssetId, libraryId: asset.libraryId, - type: AssetType.VIDEO, + type: AssetType.Video, fileCreatedAt: dates.dateTimeOriginal, fileModifiedAt: stats.mtime, localDateTime: dates.localDateTime, @@ -578,7 +578,7 @@ export class MetadataService extends BaseService { ownerId: asset.ownerId, originalPath: StorageCore.getAndroidMotionPath(asset, motionAssetId), originalFileName: `${path.parse(asset.originalFileName).name}.mp4`, - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, deviceAssetId: 'NONE', deviceId: 'NONE', }); @@ -597,7 +597,7 @@ export class MetadataService extends BaseService { // note asset.livePhotoVideoId is not motionAsset.id yet if (asset.livePhotoVideoId) { await this.jobRepository.queue({ - name: JobName.ASSET_DELETION, + name: JobName.AssetDeletion, data: { id: asset.livePhotoVideoId, deleteOnDisk: true }, }); this.logger.log(`Removed old motion photo video asset (${asset.livePhotoVideoId})`); @@ -612,7 +612,7 @@ export class MetadataService extends BaseService { this.logger.log(`Wrote motion photo video to ${motionAsset.originalPath}`); await this.handleMetadataExtraction({ id: motionAsset.id }); - await this.jobRepository.queue({ name: JobName.VIDEO_CONVERSION, data: { id: motionAsset.id } }); + await this.jobRepository.queue({ name: JobName.VideoConversation, data: { id: motionAsset.id } }); } this.logger.debug(`Finished motion photo video extraction for asset ${asset.id}: ${asset.originalPath}`); @@ -740,7 +740,7 @@ export class MetadataService extends BaseService { boundingBoxY1: Math.floor((region.Area.Y - region.Area.H / 2) * imageHeight), boundingBoxX2: Math.floor((region.Area.X + region.Area.W / 2) * imageWidth), boundingBoxY2: Math.floor((region.Area.Y + region.Area.H / 2) * imageHeight), - sourceType: SourceType.EXIF, + sourceType: SourceType.Exif, }; facesToAdd.push(face); @@ -753,11 +753,11 @@ export class MetadataService extends BaseService { if (missing.length > 0) { this.logger.debugFn(() => `Creating missing persons: ${missing.map((p) => `${p.name}/${p.id}`)}`); const newPersonIds = await this.personRepository.createAll(missing); - const jobs = newPersonIds.map((id) => ({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id } }) as const); + const jobs = newPersonIds.map((id) => ({ name: JobName.GeneratePersonThumbnail, data: { id } }) as const); await this.jobRepository.queueAll(jobs); } - const facesToRemove = asset.faces.filter((face) => face.sourceType === SourceType.EXIF).map((face) => face.id); + const facesToRemove = asset.faces.filter((face) => face.sourceType === SourceType.Exif).map((face) => face.id); if (facesToRemove.length > 0) { this.logger.debug(`Removing ${facesToRemove.length} faces for asset ${asset.id}: ${asset.originalPath}`); } @@ -894,15 +894,15 @@ export class MetadataService extends BaseService { const [asset] = await this.assetRepository.getByIds([id]); if (!asset) { - return JobStatus.FAILED; + return JobStatus.Failed; } if (isSync && !asset.sidecarPath) { - return JobStatus.FAILED; + return JobStatus.Failed; } - if (!isSync && (asset.visibility === AssetVisibility.HIDDEN || asset.sidecarPath) && !asset.isExternal) { - return JobStatus.FAILED; + if (!isSync && (asset.visibility === AssetVisibility.Hidden || asset.sidecarPath) && !asset.isExternal) { + return JobStatus.Failed; } // XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp @@ -927,22 +927,22 @@ export class MetadataService extends BaseService { if (sidecarPath !== asset.sidecarPath) { await this.assetRepository.update({ id: asset.id, sidecarPath }); } - return JobStatus.SUCCESS; + return JobStatus.Success; } if (sidecarPath) { this.logger.debug(`Detected sidecar at '${sidecarPath}' for asset ${asset.id}: ${asset.originalPath}`); await this.assetRepository.update({ id: asset.id, sidecarPath }); - return JobStatus.SUCCESS; + return JobStatus.Success; } if (!isSync) { - return JobStatus.FAILED; + return JobStatus.Failed; } this.logger.debug(`No sidecar found for asset ${asset.id}: ${asset.originalPath}`); await this.assetRepository.update({ id: asset.id, sidecarPath: null }); - return JobStatus.SUCCESS; + return JobStatus.Success; } } diff --git a/server/src/services/notification.service.spec.ts b/server/src/services/notification.service.spec.ts index bca7074194..97e5ddf106 100644 --- a/server/src/services/notification.service.spec.ts +++ b/server/src/services/notification.service.spec.ts @@ -131,7 +131,7 @@ describe(NotificationService.name, () => { it('should queue the generate thumbnail job', async () => { await sut.onAssetShow({ assetId: 'asset-id', userId: 'user-id' }); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.GENERATE_THUMBNAILS, + name: JobName.GenerateThumbnails, data: { id: 'asset-id', notify: true }, }); }); @@ -146,7 +146,7 @@ describe(NotificationService.name, () => { it('should queue notify signup event if notify is true', async () => { await sut.onUserSignup({ id: '', notify: true }); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.NOTIFY_SIGNUP, + name: JobName.NotifySignup, data: { id: '', tempPassword: undefined }, }); }); @@ -156,7 +156,7 @@ describe(NotificationService.name, () => { it('should queue notify album update event', async () => { await sut.onAlbumUpdate({ id: 'album', recipientId: '42' }); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.NOTIFY_ALBUM_UPDATE, + name: JobName.NotifyAlbumUpdate, data: { id: 'album', recipientId: '42', delay: 300_000 }, }); }); @@ -166,7 +166,7 @@ describe(NotificationService.name, () => { it('should queue notify album invite event', async () => { await sut.onAlbumInvite({ id: '', userId: '42' }); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.NOTIFY_ALBUM_INVITE, + name: JobName.NotifyAlbumInvite, data: { id: '', recipientId: '42' }, }); }); @@ -242,7 +242,7 @@ describe(NotificationService.name, () => { describe('handleUserSignup', () => { it('should skip if user could not be found', async () => { - await expect(sut.handleUserSignup({ id: '' })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleUserSignup({ id: '' })).resolves.toBe(JobStatus.Skipped); }); it('should be successful', async () => { @@ -250,9 +250,9 @@ describe(NotificationService.name, () => { mocks.systemMetadata.get.mockResolvedValue({ server: {} }); mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' }); - await expect(sut.handleUserSignup({ id: '' })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleUserSignup({ id: '' })).resolves.toBe(JobStatus.Success); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.SEND_EMAIL, + name: JobName.SendMail, data: expect.objectContaining({ subject: 'Welcome to Immich' }), }); }); @@ -260,14 +260,14 @@ describe(NotificationService.name, () => { describe('handleAlbumInvite', () => { it('should skip if album could not be found', async () => { - await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped); expect(mocks.user.get).not.toHaveBeenCalled(); }); it('should skip if recipient could not be found', async () => { mocks.album.getById.mockResolvedValue(albumStub.empty); - await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped); expect(mocks.job.queue).not.toHaveBeenCalled(); }); @@ -277,13 +277,13 @@ describe(NotificationService.name, () => { ...userStub.user1, metadata: [ { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: { emailNotifications: { enabled: false, albumInvite: true } }, }, ], }); - await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped); }); it('should skip if the recipient has email notifications for album invite disabled', async () => { @@ -292,13 +292,13 @@ describe(NotificationService.name, () => { ...userStub.user1, metadata: [ { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: { emailNotifications: { enabled: true, albumInvite: false } }, }, ], }); - await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped); }); it('should send invite email', async () => { @@ -307,7 +307,7 @@ describe(NotificationService.name, () => { ...userStub.user1, metadata: [ { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: { emailNotifications: { enabled: true, albumInvite: true } }, }, ], @@ -315,9 +315,9 @@ describe(NotificationService.name, () => { mocks.systemMetadata.get.mockResolvedValue({ server: {} }); mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' }); - await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.SEND_EMAIL, + name: JobName.SendMail, data: expect.objectContaining({ subject: expect.stringContaining('You have been added to a shared album') }), }); }); @@ -328,7 +328,7 @@ describe(NotificationService.name, () => { ...userStub.user1, metadata: [ { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: { emailNotifications: { enabled: true, albumInvite: true } }, }, ], @@ -337,13 +337,13 @@ describe(NotificationService.name, () => { mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' }); mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]); - await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith( albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, - AssetFileType.THUMBNAIL, + AssetFileType.Thumbnail, ); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.SEND_EMAIL, + name: JobName.SendMail, data: expect.objectContaining({ subject: expect.stringContaining('You have been added to a shared album'), imageAttachments: undefined, @@ -357,7 +357,7 @@ describe(NotificationService.name, () => { ...userStub.user1, metadata: [ { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: { emailNotifications: { enabled: true, albumInvite: true } }, }, ], @@ -365,16 +365,16 @@ describe(NotificationService.name, () => { mocks.systemMetadata.get.mockResolvedValue({ server: {} }); mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' }); mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([ - { id: '1', type: AssetFileType.THUMBNAIL, path: 'path-to-thumb.jpg' }, + { id: '1', type: AssetFileType.Thumbnail, path: 'path-to-thumb.jpg' }, ]); - await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith( albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, - AssetFileType.THUMBNAIL, + AssetFileType.Thumbnail, ); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.SEND_EMAIL, + name: JobName.SendMail, data: expect.objectContaining({ subject: expect.stringContaining('You have been added to a shared album'), imageAttachments: [{ filename: 'album-thumbnail.jpg', path: expect.anything(), cid: expect.anything() }], @@ -388,7 +388,7 @@ describe(NotificationService.name, () => { ...userStub.user1, metadata: [ { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: { emailNotifications: { enabled: true, albumInvite: true } }, }, ], @@ -397,13 +397,13 @@ describe(NotificationService.name, () => { mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' }); mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([assetStub.image.files[2]]); - await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith( albumStub.emptyWithValidThumbnail.albumThumbnailAssetId, - AssetFileType.THUMBNAIL, + AssetFileType.Thumbnail, ); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.SEND_EMAIL, + name: JobName.SendMail, data: expect.objectContaining({ subject: expect.stringContaining('You have been added to a shared album'), imageAttachments: [{ filename: 'album-thumbnail.ext', path: expect.anything(), cid: expect.anything() }], @@ -414,14 +414,14 @@ describe(NotificationService.name, () => { describe('handleAlbumUpdate', () => { it('should skip if album could not be found', async () => { - await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.Skipped); expect(mocks.user.get).not.toHaveBeenCalled(); }); it('should skip if owner could not be found', async () => { mocks.album.getById.mockResolvedValue(albumStub.emptyWithValidThumbnail); - await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.Skipped); expect(mocks.systemMetadata.get).not.toHaveBeenCalled(); }); @@ -448,7 +448,7 @@ describe(NotificationService.name, () => { ...userStub.user1, metadata: [ { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: { emailNotifications: { enabled: false, albumUpdate: true } }, }, ], @@ -470,7 +470,7 @@ describe(NotificationService.name, () => { ...userStub.user1, metadata: [ { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: { emailNotifications: { enabled: true, albumUpdate: false } }, }, ], @@ -500,9 +500,9 @@ describe(NotificationService.name, () => { it('should add new recipients for new images if job is already queued', async () => { await sut.onAlbumUpdate({ id: '1', recipientId: '2' } as INotifyAlbumUpdateJob); - expect(mocks.job.removeJob).toHaveBeenCalledWith(JobName.NOTIFY_ALBUM_UPDATE, '1/2'); + expect(mocks.job.removeJob).toHaveBeenCalledWith(JobName.NotifyAlbumUpdate, '1/2'); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.NOTIFY_ALBUM_UPDATE, + name: JobName.NotifyAlbumUpdate, data: { id: '1', delay: 300_000, @@ -515,7 +515,7 @@ describe(NotificationService.name, () => { describe('handleSendEmail', () => { it('should skip if smtp notifications are disabled', async () => { mocks.systemMetadata.get.mockResolvedValue({ notifications: { smtp: { enabled: false } } }); - await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.Skipped); }); it('should send mail successfully', async () => { @@ -524,7 +524,7 @@ describe(NotificationService.name, () => { }); mocks.email.sendEmail.mockResolvedValue({ messageId: '', response: '' }); - await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.Success); expect(mocks.email.sendEmail).toHaveBeenCalledWith(expect.objectContaining({ replyTo: 'test@immich.app' })); }); @@ -534,7 +534,7 @@ describe(NotificationService.name, () => { }); mocks.email.sendEmail.mockResolvedValue({ messageId: '', response: '' }); - await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.Success); expect(mocks.email.sendEmail).toHaveBeenCalledWith(expect.objectContaining({ replyTo: 'demo@immich.app' })); }); }); diff --git a/server/src/services/notification.service.ts b/server/src/services/notification.service.ts index 80a20195a1..90a3b63ac5 100644 --- a/server/src/services/notification.service.ts +++ b/server/src/services/notification.service.ts @@ -39,19 +39,19 @@ export class NotificationService extends BaseService { } async updateAll(auth: AuthDto, dto: NotificationUpdateAllDto) { - await this.requireAccess({ auth, ids: dto.ids, permission: Permission.NOTIFICATION_UPDATE }); + await this.requireAccess({ auth, ids: dto.ids, permission: Permission.NotificationUpdate }); await this.notificationRepository.updateAll(dto.ids, { readAt: dto.readAt, }); } async deleteAll(auth: AuthDto, dto: NotificationDeleteAllDto) { - await this.requireAccess({ auth, ids: dto.ids, permission: Permission.NOTIFICATION_DELETE }); + await this.requireAccess({ auth, ids: dto.ids, permission: Permission.NotificationDelete }); await this.notificationRepository.deleteAll(dto.ids); } async get(auth: AuthDto, id: string) { - await this.requireAccess({ auth, ids: [id], permission: Permission.NOTIFICATION_READ }); + await this.requireAccess({ auth, ids: [id], permission: Permission.NotificationRead }); const item = await this.notificationRepository.get(id); if (!item) { throw new BadRequestException('Notification not found'); @@ -60,7 +60,7 @@ export class NotificationService extends BaseService { } async update(auth: AuthDto, id: string, dto: NotificationUpdateDto) { - await this.requireAccess({ auth, ids: [id], permission: Permission.NOTIFICATION_UPDATE }); + await this.requireAccess({ auth, ids: [id], permission: Permission.NotificationUpdate }); const item = await this.notificationRepository.update(id, { readAt: dto.readAt, }); @@ -68,11 +68,11 @@ export class NotificationService extends BaseService { } async delete(auth: AuthDto, id: string) { - await this.requireAccess({ auth, ids: [id], permission: Permission.NOTIFICATION_DELETE }); + await this.requireAccess({ auth, ids: [id], permission: Permission.NotificationDelete }); await this.notificationRepository.delete(id); } - @OnJob({ name: JobName.NOTIFICATIONS_CLEANUP, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.NotificationsCleanup, queue: QueueName.BackgroundTask }) async onNotificationsCleanup() { await this.notificationRepository.cleanup(); } @@ -87,7 +87,7 @@ export class NotificationService extends BaseService { this.logger.error(`Unable to run job handler (${job.name}): ${error}`, error?.stack, JSON.stringify(job.data)); switch (job.name) { - case JobName.BACKUP_DATABASE: { + case JobName.BackupDatabase: { const errorMessage = error instanceof Error ? error.message : error; const item = await this.notificationRepository.create({ userId: admin.id, @@ -135,7 +135,7 @@ export class NotificationService extends BaseService { @OnEvent({ name: 'AssetShow' }) async onAssetShow({ assetId }: ArgOf<'AssetShow'>) { - await this.jobRepository.queue({ name: JobName.GENERATE_THUMBNAILS, data: { id: assetId, notify: true } }); + await this.jobRepository.queue({ name: JobName.GenerateThumbnails, data: { id: assetId, notify: true } }); } @OnEvent({ name: 'AssetTrash' }) @@ -193,22 +193,22 @@ export class NotificationService extends BaseService { @OnEvent({ name: 'UserSignup' }) async onUserSignup({ notify, id, tempPassword }: ArgOf<'UserSignup'>) { if (notify) { - await this.jobRepository.queue({ name: JobName.NOTIFY_SIGNUP, data: { id, tempPassword } }); + await this.jobRepository.queue({ name: JobName.NotifySignup, data: { id, tempPassword } }); } } @OnEvent({ name: 'AlbumUpdate' }) async onAlbumUpdate({ id, recipientId }: ArgOf<'AlbumUpdate'>) { - await this.jobRepository.removeJob(JobName.NOTIFY_ALBUM_UPDATE, `${id}/${recipientId}`); + await this.jobRepository.removeJob(JobName.NotifyAlbumUpdate, `${id}/${recipientId}`); await this.jobRepository.queue({ - name: JobName.NOTIFY_ALBUM_UPDATE, + name: JobName.NotifyAlbumUpdate, data: { id, recipientId, delay: NotificationService.albumUpdateEmailDelayMs }, }); } @OnEvent({ name: 'AlbumInvite' }) async onAlbumInvite({ id, userId }: ArgOf<'AlbumInvite'>) { - await this.jobRepository.queue({ name: JobName.NOTIFY_ALBUM_INVITE, data: { id, recipientId: userId } }); + await this.jobRepository.queue({ name: JobName.NotifyAlbumInvite, data: { id, recipientId: userId } }); } @OnEvent({ name: 'SessionDelete' }) @@ -313,11 +313,11 @@ export class NotificationService extends BaseService { return { name, html: templateResponse }; } - @OnJob({ name: JobName.NOTIFY_SIGNUP, queue: QueueName.NOTIFICATION }) - async handleUserSignup({ id, tempPassword }: JobOf) { + @OnJob({ name: JobName.NotifySignup, queue: QueueName.Notification }) + async handleUserSignup({ id, tempPassword }: JobOf) { const user = await this.userRepository.get(id, { withDeleted: false }); if (!user) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const { server, templates } = await this.getConfig({ withCache: true }); @@ -333,7 +333,7 @@ export class NotificationService extends BaseService { }); await this.jobRepository.queue({ - name: JobName.SEND_EMAIL, + name: JobName.SendMail, data: { to: user.email, subject: 'Welcome to Immich', @@ -342,25 +342,25 @@ export class NotificationService extends BaseService { }, }); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.NOTIFY_ALBUM_INVITE, queue: QueueName.NOTIFICATION }) - async handleAlbumInvite({ id, recipientId }: JobOf) { + @OnJob({ name: JobName.NotifyAlbumInvite, queue: QueueName.Notification }) + async handleAlbumInvite({ id, recipientId }: JobOf) { const album = await this.albumRepository.getById(id, { withAssets: false }); if (!album) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const recipient = await this.userRepository.get(recipientId, { withDeleted: false }); if (!recipient) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const { emailNotifications } = getPreferences(recipient.metadata); if (!emailNotifications.enabled || !emailNotifications.albumInvite) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const attachment = await this.getAlbumThumbnailAttachment(album); @@ -380,7 +380,7 @@ export class NotificationService extends BaseService { }); await this.jobRepository.queue({ - name: JobName.SEND_EMAIL, + name: JobName.SendMail, data: { to: recipient.email, subject: `You have been added to a shared album - ${album.albumName}`, @@ -390,20 +390,20 @@ export class NotificationService extends BaseService { }, }); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.NOTIFY_ALBUM_UPDATE, queue: QueueName.NOTIFICATION }) - async handleAlbumUpdate({ id, recipientId }: JobOf) { + @OnJob({ name: JobName.NotifyAlbumUpdate, queue: QueueName.Notification }) + async handleAlbumUpdate({ id, recipientId }: JobOf) { const album = await this.albumRepository.getById(id, { withAssets: false }); if (!album) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const owner = await this.userRepository.get(album.ownerId, { withDeleted: false }); if (!owner) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const attachment = await this.getAlbumThumbnailAttachment(album); @@ -412,13 +412,13 @@ export class NotificationService extends BaseService { const user = await this.userRepository.get(recipientId, { withDeleted: false }); if (!user) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const { emailNotifications } = getPreferences(user.metadata); if (!emailNotifications.enabled || !emailNotifications.albumUpdate) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const { html, text } = await this.emailRepository.renderEmail({ @@ -434,7 +434,7 @@ export class NotificationService extends BaseService { }); await this.jobRepository.queue({ - name: JobName.SEND_EMAIL, + name: JobName.SendMail, data: { to: user.email, subject: `New media has been added to an album - ${album.albumName}`, @@ -444,14 +444,14 @@ export class NotificationService extends BaseService { }, }); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.SEND_EMAIL, queue: QueueName.NOTIFICATION }) - async handleSendEmail(data: JobOf): Promise { + @OnJob({ name: JobName.SendMail, queue: QueueName.Notification }) + async handleSendEmail(data: JobOf): Promise { const { notifications } = await this.getConfig({ withCache: false }); if (!notifications.smtp.enabled) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const { to, subject, html, text: plain } = data; @@ -468,7 +468,7 @@ export class NotificationService extends BaseService { this.logger.log(`Sent mail with id: ${response.messageId} status: ${response.response}`); - return JobStatus.SUCCESS; + return JobStatus.Success; } private async getAlbumThumbnailAttachment(album: { @@ -480,7 +480,7 @@ export class NotificationService extends BaseService { const albumThumbnailFiles = await this.assetJobRepository.getAlbumThumbnailFiles( album.albumThumbnailAssetId, - AssetFileType.THUMBNAIL, + AssetFileType.Thumbnail, ); if (albumThumbnailFiles.length !== 1) { diff --git a/server/src/services/partner.service.ts b/server/src/services/partner.service.ts index 3723634948..755b688397 100644 --- a/server/src/services/partner.service.ts +++ b/server/src/services/partner.service.ts @@ -40,7 +40,7 @@ export class PartnerService extends BaseService { } async update(auth: AuthDto, sharedById: string, dto: UpdatePartnerDto): Promise { - await this.requireAccess({ auth, permission: Permission.PARTNER_UPDATE, ids: [sharedById] }); + await this.requireAccess({ auth, permission: Permission.PartnerUpdate, ids: [sharedById] }); const partnerId: PartnerIds = { sharedById, sharedWithId: auth.user.id }; const entity = await this.partnerRepository.update(partnerId, { inTimeline: dto.inTimeline }); diff --git a/server/src/services/person.service.spec.ts b/server/src/services/person.service.spec.ts index d9df2225f4..8fbf8b20dd 100644 --- a/server/src/services/person.service.spec.ts +++ b/server/src/services/person.service.spec.ts @@ -182,7 +182,7 @@ describe(PersonService.name, () => { new ImmichFileResponse({ path: '/path/to/thumbnail.jpg', contentType: 'image/jpeg', - cacheControl: CacheControl.PRIVATE_WITHOUT_CACHE, + cacheControl: CacheControl.PrivateWithoutCache, }), ); expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1'])); @@ -276,7 +276,7 @@ describe(PersonService.name, () => { }, ]); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.GENERATE_PERSON_THUMBNAIL, + name: JobName.GeneratePersonThumbnail, data: { id: 'person-1' }, }); expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1'])); @@ -337,7 +337,7 @@ describe(PersonService.name, () => { expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_PERSON_THUMBNAIL, + name: JobName.GeneratePersonThumbnail, data: { id: personStub.newThumbnail.id }, }, ]); @@ -346,7 +346,7 @@ describe(PersonService.name, () => { describe('handlePersonMigration', () => { it('should not move person files', async () => { - await expect(sut.handlePersonMigration(personStub.noName)).resolves.toBe(JobStatus.FAILED); + await expect(sut.handlePersonMigration(personStub.noName)).resolves.toBe(JobStatus.Failed); }); }); @@ -373,7 +373,7 @@ describe(PersonService.name, () => { await sut.createNewFeaturePhoto([personStub.newThumbnail.id]); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.GENERATE_PERSON_THUMBNAIL, + name: JobName.GeneratePersonThumbnail, data: { id: personStub.newThumbnail.id }, }, ]); @@ -447,7 +447,7 @@ describe(PersonService.name, () => { it('should skip if machine learning is disabled', async () => { mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled); - await expect(sut.handleQueueDetectFaces({})).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleQueueDetectFaces({})).resolves.toBe(JobStatus.Skipped); expect(mocks.job.queue).not.toHaveBeenCalled(); expect(mocks.job.queueAll).not.toHaveBeenCalled(); expect(mocks.systemMetadata.get).toHaveBeenCalled(); @@ -462,7 +462,7 @@ describe(PersonService.name, () => { expect(mocks.person.vacuum).not.toHaveBeenCalled(); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.FACE_DETECTION, + name: JobName.FaceDetection, data: { id: assetStub.image.id }, }, ]); @@ -474,14 +474,14 @@ describe(PersonService.name, () => { await sut.handleQueueDetectFaces({ force: true }); - expect(mocks.person.deleteFaces).toHaveBeenCalledWith({ sourceType: SourceType.MACHINE_LEARNING }); + expect(mocks.person.deleteFaces).toHaveBeenCalledWith({ sourceType: SourceType.MachineLearning }); expect(mocks.person.delete).toHaveBeenCalledWith([personStub.withName.id]); expect(mocks.person.vacuum).toHaveBeenCalledWith({ reindexVectors: true }); expect(mocks.storage.unlink).toHaveBeenCalledWith(personStub.withName.thumbnailPath); expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.FACE_DETECTION, + name: JobName.FaceDetection, data: { id: assetStub.image.id }, }, ]); @@ -499,11 +499,11 @@ describe(PersonService.name, () => { expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(undefined); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.FACE_DETECTION, + name: JobName.FaceDetection, data: { id: assetStub.image.id }, }, ]); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.PERSON_CLEANUP }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.PersonCleanup }); }); it('should delete existing people and faces if forced', async () => { @@ -518,7 +518,7 @@ describe(PersonService.name, () => { expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.FACE_DETECTION, + name: JobName.FaceDetection, data: { id: assetStub.image.id }, }, ]); @@ -540,7 +540,7 @@ describe(PersonService.name, () => { }); mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled); - await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.Skipped); expect(mocks.job.queueAll).not.toHaveBeenCalled(); expect(mocks.systemMetadata.get).toHaveBeenCalled(); expect(mocks.systemMetadata.set).not.toHaveBeenCalled(); @@ -556,7 +556,7 @@ describe(PersonService.name, () => { delayed: 0, }); - await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.Skipped); expect(mocks.job.queueAll).not.toHaveBeenCalled(); expect(mocks.systemMetadata.set).not.toHaveBeenCalled(); }); @@ -577,15 +577,15 @@ describe(PersonService.name, () => { expect(mocks.person.getAllFaces).toHaveBeenCalledWith({ personId: null, - sourceType: SourceType.MACHINE_LEARNING, + sourceType: SourceType.MachineLearning, }); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.FACIAL_RECOGNITION, + name: JobName.FacialRecognition, data: { id: faceStub.face1.id, deferred: false }, }, ]); - expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FACIAL_RECOGNITION_STATE, { + expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FacialRecognitionState, { lastRun: expect.any(String), }); expect(mocks.person.vacuum).not.toHaveBeenCalled(); @@ -609,11 +609,11 @@ describe(PersonService.name, () => { expect(mocks.person.getAllFaces).toHaveBeenCalledWith(undefined); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.FACIAL_RECOGNITION, + name: JobName.FacialRecognition, data: { id: faceStub.face1.id, deferred: false }, }, ]); - expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FACIAL_RECOGNITION_STATE, { + expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FacialRecognitionState, { lastRun: expect.any(String), }); expect(mocks.person.vacuum).toHaveBeenCalledWith({ reindexVectors: false }); @@ -637,19 +637,19 @@ describe(PersonService.name, () => { await sut.handleQueueRecognizeFaces({ force: false, nightly: true }); - expect(mocks.systemMetadata.get).toHaveBeenCalledWith(SystemMetadataKey.FACIAL_RECOGNITION_STATE); + expect(mocks.systemMetadata.get).toHaveBeenCalledWith(SystemMetadataKey.FacialRecognitionState); expect(mocks.person.getLatestFaceDate).toHaveBeenCalledOnce(); expect(mocks.person.getAllFaces).toHaveBeenCalledWith({ personId: null, - sourceType: SourceType.MACHINE_LEARNING, + sourceType: SourceType.MachineLearning, }); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.FACIAL_RECOGNITION, + name: JobName.FacialRecognition, data: { id: faceStub.face1.id, deferred: false }, }, ]); - expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FACIAL_RECOGNITION_STATE, { + expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FacialRecognitionState, { lastRun: expect.any(String), }); expect(mocks.person.vacuum).not.toHaveBeenCalled(); @@ -665,7 +665,7 @@ describe(PersonService.name, () => { await sut.handleQueueRecognizeFaces({ force: true, nightly: true }); - expect(mocks.systemMetadata.get).toHaveBeenCalledWith(SystemMetadataKey.FACIAL_RECOGNITION_STATE); + expect(mocks.systemMetadata.get).toHaveBeenCalledWith(SystemMetadataKey.FacialRecognitionState); expect(mocks.person.getLatestFaceDate).toHaveBeenCalledOnce(); expect(mocks.person.getAllFaces).not.toHaveBeenCalled(); expect(mocks.job.queueAll).not.toHaveBeenCalled(); @@ -690,10 +690,10 @@ describe(PersonService.name, () => { await sut.handleQueueRecognizeFaces({ force: true }); expect(mocks.person.deleteFaces).not.toHaveBeenCalled(); - expect(mocks.person.unassignFaces).toHaveBeenCalledWith({ sourceType: SourceType.MACHINE_LEARNING }); + expect(mocks.person.unassignFaces).toHaveBeenCalledWith({ sourceType: SourceType.MachineLearning }); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.FACIAL_RECOGNITION, + name: JobName.FacialRecognition, data: { id: faceStub.face1.id, deferred: false }, }, ]); @@ -711,7 +711,7 @@ describe(PersonService.name, () => { it('should skip if machine learning is disabled', async () => { mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled); - await expect(sut.handleDetectFaces({ id: 'foo' })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleDetectFaces({ id: 'foo' })).resolves.toBe(JobStatus.Skipped); expect(mocks.asset.getByIds).not.toHaveBeenCalled(); expect(mocks.systemMetadata.get).toHaveBeenCalled(); }); @@ -754,8 +754,8 @@ describe(PersonService.name, () => { expect(mocks.person.refreshFaces).toHaveBeenCalledWith([face], [], [faceSearch]); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } }, - { name: JobName.FACIAL_RECOGNITION, data: { id: faceId } }, + { name: JobName.QueueFacialRecognition, data: { force: false } }, + { name: JobName.FacialRecognition, data: { id: faceId } }, ]); expect(mocks.person.reassignFace).not.toHaveBeenCalled(); expect(mocks.person.reassignFaces).not.toHaveBeenCalled(); @@ -790,8 +790,8 @@ describe(PersonService.name, () => { expect(mocks.person.refreshFaces).toHaveBeenCalledWith([face], [faceStub.primaryFace1.id], [faceSearch]); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } }, - { name: JobName.FACIAL_RECOGNITION, data: { id: faceId } }, + { name: JobName.QueueFacialRecognition, data: { force: false } }, + { name: JobName.FacialRecognition, data: { id: faceId } }, ]); expect(mocks.person.reassignFace).not.toHaveBeenCalled(); expect(mocks.person.reassignFaces).not.toHaveBeenCalled(); @@ -830,8 +830,8 @@ describe(PersonService.name, () => { expect(mocks.person.refreshFaces).toHaveBeenCalledWith([face], [], [faceSearch]); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } }, - { name: JobName.FACIAL_RECOGNITION, data: { id: faceId } }, + { name: JobName.QueueFacialRecognition, data: { force: false } }, + { name: JobName.FacialRecognition, data: { id: faceId } }, ]); expect(mocks.person.reassignFace).not.toHaveBeenCalled(); expect(mocks.person.reassignFaces).not.toHaveBeenCalled(); @@ -840,7 +840,7 @@ describe(PersonService.name, () => { describe('handleRecognizeFaces', () => { it('should fail if face does not exist', async () => { - expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.FAILED); + expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.Failed); expect(mocks.person.reassignFaces).not.toHaveBeenCalled(); expect(mocks.person.create).not.toHaveBeenCalled(); @@ -850,7 +850,7 @@ describe(PersonService.name, () => { const face = { ...faceStub.face1, asset: null }; mocks.person.getFaceForFacialRecognitionJob.mockResolvedValue(face); - expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.FAILED); + expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.Failed); expect(mocks.person.reassignFaces).not.toHaveBeenCalled(); expect(mocks.person.create).not.toHaveBeenCalled(); @@ -859,7 +859,7 @@ describe(PersonService.name, () => { it('should skip if face already has an assigned person', async () => { mocks.person.getFaceForFacialRecognitionJob.mockResolvedValue(faceStub.face1); - expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.SKIPPED); + expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.Skipped); expect(mocks.person.reassignFaces).not.toHaveBeenCalled(); expect(mocks.person.create).not.toHaveBeenCalled(); @@ -1008,7 +1008,7 @@ describe(PersonService.name, () => { await sut.handleRecognizeFaces({ id: faceStub.noPerson1.id }); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.FACIAL_RECOGNITION, + name: JobName.FacialRecognition, data: { id: faceStub.noPerson1.id, deferred: true }, }); expect(mocks.search.searchFaces).toHaveBeenCalledTimes(1); @@ -1161,7 +1161,7 @@ describe(PersonService.name, () => { id: faceStub.face1.id, imageHeight: 1024, imageWidth: 1024, - sourceType: SourceType.MACHINE_LEARNING, + sourceType: SourceType.MachineLearning, person: mapPerson(personStub.withName), }); }); diff --git a/server/src/services/person.service.ts b/server/src/services/person.service.ts index af34e6eda9..d8925578d4 100644 --- a/server/src/services/person.service.ts +++ b/server/src/services/person.service.ts @@ -78,7 +78,7 @@ export class PersonService extends BaseService { } async reassignFaces(auth: AuthDto, personId: string, dto: AssetFaceUpdateDto): Promise { - await this.requireAccess({ auth, permission: Permission.PERSON_UPDATE, ids: [personId] }); + await this.requireAccess({ auth, permission: Permission.PersonUpdate, ids: [personId] }); const person = await this.findOrFail(personId); const result: PersonResponseDto[] = []; const changeFeaturePhoto: string[] = []; @@ -86,7 +86,7 @@ export class PersonService extends BaseService { const faces = await this.personRepository.getFacesByIds([{ personId: data.personId, assetId: data.assetId }]); for (const face of faces) { - await this.requireAccess({ auth, permission: Permission.PERSON_CREATE, ids: [face.id] }); + await this.requireAccess({ auth, permission: Permission.PersonCreate, ids: [face.id] }); if (person.faceAssetId === null) { changeFeaturePhoto.push(person.id); } @@ -107,8 +107,8 @@ export class PersonService extends BaseService { } async reassignFacesById(auth: AuthDto, personId: string, dto: FaceDto): Promise { - await this.requireAccess({ auth, permission: Permission.PERSON_UPDATE, ids: [personId] }); - await this.requireAccess({ auth, permission: Permission.PERSON_CREATE, ids: [dto.id] }); + await this.requireAccess({ auth, permission: Permission.PersonUpdate, ids: [personId] }); + await this.requireAccess({ auth, permission: Permission.PersonCreate, ids: [dto.id] }); const face = await this.personRepository.getFaceById(dto.id); const person = await this.findOrFail(personId); @@ -124,7 +124,7 @@ export class PersonService extends BaseService { } async getFacesById(auth: AuthDto, dto: FaceDto): Promise { - await this.requireAccess({ auth, permission: Permission.ASSET_READ, ids: [dto.id] }); + await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [dto.id] }); const faces = await this.personRepository.getFaces(dto.id); return faces.map((asset) => mapFaces(asset, auth)); } @@ -140,7 +140,7 @@ export class PersonService extends BaseService { if (assetFace) { await this.personRepository.update({ id: personId, faceAssetId: assetFace.id }); - jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: personId } }); + jobs.push({ name: JobName.GeneratePersonThumbnail, data: { id: personId } }); } } @@ -148,17 +148,17 @@ export class PersonService extends BaseService { } async getById(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.PERSON_READ, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.PersonRead, ids: [id] }); return this.findOrFail(id).then(mapPerson); } async getStatistics(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.PERSON_READ, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.PersonRead, ids: [id] }); return this.personRepository.getStatistics(id); } async getThumbnail(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.PERSON_READ, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.PersonRead, ids: [id] }); const person = await this.personRepository.getById(id); if (!person || !person.thumbnailPath) { throw new NotFoundException(); @@ -167,7 +167,7 @@ export class PersonService extends BaseService { return new ImmichFileResponse({ path: person.thumbnailPath, contentType: mimeTypes.lookup(person.thumbnailPath), - cacheControl: CacheControl.PRIVATE_WITHOUT_CACHE, + cacheControl: CacheControl.PrivateWithoutCache, }); } @@ -185,13 +185,13 @@ export class PersonService extends BaseService { } async update(auth: AuthDto, id: string, dto: PersonUpdateDto): Promise { - await this.requireAccess({ auth, permission: Permission.PERSON_UPDATE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.PersonUpdate, ids: [id] }); const { name, birthDate, isHidden, featureFaceAssetId: assetId, isFavorite, color } = dto; // TODO: set by faceId directly let faceId: string | undefined = undefined; if (assetId) { - await this.requireAccess({ auth, permission: Permission.ASSET_READ, ids: [assetId] }); + await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [assetId] }); const [face] = await this.personRepository.getFacesByIds([{ personId: id, assetId }]); if (!face) { throw new BadRequestException('Invalid assetId for feature face'); @@ -211,7 +211,7 @@ export class PersonService extends BaseService { }); if (assetId) { - await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id } }); + await this.jobRepository.queue({ name: JobName.GeneratePersonThumbnail, data: { id } }); } return mapPerson(person); @@ -242,7 +242,7 @@ export class PersonService extends BaseService { } async deleteAll(auth: AuthDto, { ids }: BulkIdsDto): Promise { - await this.requireAccess({ auth, permission: Permission.PERSON_DELETE, ids }); + await this.requireAccess({ auth, permission: Permission.PersonDelete, ids }); const people = await this.personRepository.getForPeopleDelete(ids); await this.removeAllPeople(people); } @@ -254,22 +254,22 @@ export class PersonService extends BaseService { this.logger.debug(`Deleted ${people.length} people`); } - @OnJob({ name: JobName.PERSON_CLEANUP, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.PersonCleanup, queue: QueueName.BackgroundTask }) async handlePersonCleanup(): Promise { const people = await this.personRepository.getAllWithoutFaces(); await this.removeAllPeople(people); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.QUEUE_FACE_DETECTION, queue: QueueName.FACE_DETECTION }) - async handleQueueDetectFaces({ force }: JobOf): Promise { + @OnJob({ name: JobName.QueueFaceDetection, queue: QueueName.FaceDetection }) + async handleQueueDetectFaces({ force }: JobOf): Promise { const { machineLearning } = await this.getConfig({ withCache: false }); if (!isFacialRecognitionEnabled(machineLearning)) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } if (force) { - await this.personRepository.deleteFaces({ sourceType: SourceType.MACHINE_LEARNING }); + await this.personRepository.deleteFaces({ sourceType: SourceType.MachineLearning }); await this.handlePersonCleanup(); await this.personRepository.vacuum({ reindexVectors: true }); } @@ -277,7 +277,7 @@ export class PersonService extends BaseService { let jobs: JobItem[] = []; const assets = this.assetJobRepository.streamForDetectFacesJob(force); for await (const asset of assets) { - jobs.push({ name: JobName.FACE_DETECTION, data: { id: asset.id } }); + jobs.push({ name: JobName.FaceDetection, data: { id: asset.id } }); if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) { await this.jobRepository.queueAll(jobs); @@ -288,27 +288,27 @@ export class PersonService extends BaseService { await this.jobRepository.queueAll(jobs); if (force === undefined) { - await this.jobRepository.queue({ name: JobName.PERSON_CLEANUP }); + await this.jobRepository.queue({ name: JobName.PersonCleanup }); } - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.FACE_DETECTION, queue: QueueName.FACE_DETECTION }) - async handleDetectFaces({ id }: JobOf): Promise { + @OnJob({ name: JobName.FaceDetection, queue: QueueName.FaceDetection }) + async handleDetectFaces({ id }: JobOf): Promise { const { machineLearning } = await this.getConfig({ withCache: true }); if (!isFacialRecognitionEnabled(machineLearning)) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const asset = await this.assetJobRepository.getForDetectFacesJob(id); const previewFile = asset?.files[0]; if (!asset || asset.files.length !== 1 || !previewFile) { - return JobStatus.FAILED; + return JobStatus.Failed; } - if (asset.visibility === AssetVisibility.HIDDEN) { - return JobStatus.SKIPPED; + if (asset.visibility === AssetVisibility.Hidden) { + return JobStatus.Skipped; } const { imageHeight, imageWidth, faces } = await this.machineLearningRepository.detectFaces( @@ -323,7 +323,7 @@ export class PersonService extends BaseService { const mlFaceIds = new Set(); for (const face of asset.faces) { - if (face.sourceType === SourceType.MACHINE_LEARNING) { + if (face.sourceType === SourceType.MachineLearning) { mlFaceIds.add(face.id); } } @@ -368,15 +368,15 @@ export class PersonService extends BaseService { if (facesToAdd.length > 0) { this.logger.log(`Detected ${facesToAdd.length} new faces in asset ${id}`); - const jobs = facesToAdd.map((face) => ({ name: JobName.FACIAL_RECOGNITION, data: { id: face.id } }) as const); - await this.jobRepository.queueAll([{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } }, ...jobs]); + const jobs = facesToAdd.map((face) => ({ name: JobName.FacialRecognition, data: { id: face.id } }) as const); + await this.jobRepository.queueAll([{ name: JobName.QueueFacialRecognition, data: { force: false } }, ...jobs]); } else if (embeddings.length > 0) { this.logger.log(`Added ${embeddings.length} face embeddings for asset ${id}`); } await this.assetRepository.upsertJobStatus({ assetId: asset.id, facesRecognizedAt: new Date() }); - return JobStatus.SUCCESS; + return JobStatus.Success; } private iou( @@ -396,50 +396,50 @@ export class PersonService extends BaseService { return intersection / union; } - @OnJob({ name: JobName.QUEUE_FACIAL_RECOGNITION, queue: QueueName.FACIAL_RECOGNITION }) - async handleQueueRecognizeFaces({ force, nightly }: JobOf): Promise { + @OnJob({ name: JobName.QueueFacialRecognition, queue: QueueName.FacialRecognition }) + async handleQueueRecognizeFaces({ force, nightly }: JobOf): Promise { const { machineLearning } = await this.getConfig({ withCache: false }); if (!isFacialRecognitionEnabled(machineLearning)) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } - await this.jobRepository.waitForQueueCompletion(QueueName.THUMBNAIL_GENERATION, QueueName.FACE_DETECTION); + await this.jobRepository.waitForQueueCompletion(QueueName.ThumbnailGeneration, QueueName.FaceDetection); if (nightly) { const [state, latestFaceDate] = await Promise.all([ - this.systemMetadataRepository.get(SystemMetadataKey.FACIAL_RECOGNITION_STATE), + this.systemMetadataRepository.get(SystemMetadataKey.FacialRecognitionState), this.personRepository.getLatestFaceDate(), ]); if (state?.lastRun && latestFaceDate && state.lastRun > latestFaceDate) { this.logger.debug('Skipping facial recognition nightly since no face has been added since the last run'); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } } - const { waiting } = await this.jobRepository.getJobCounts(QueueName.FACIAL_RECOGNITION); + const { waiting } = await this.jobRepository.getJobCounts(QueueName.FacialRecognition); if (force) { - await this.personRepository.unassignFaces({ sourceType: SourceType.MACHINE_LEARNING }); + await this.personRepository.unassignFaces({ sourceType: SourceType.MachineLearning }); await this.handlePersonCleanup(); await this.personRepository.vacuum({ reindexVectors: false }); } else if (waiting) { this.logger.debug( `Skipping facial recognition queueing because ${waiting} job${waiting > 1 ? 's are' : ' is'} already queued`, ); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } - await this.databaseRepository.prewarm(VectorIndex.FACE); + await this.databaseRepository.prewarm(VectorIndex.Face); const lastRun = new Date().toISOString(); const facePagination = this.personRepository.getAllFaces( - force ? undefined : { personId: null, sourceType: SourceType.MACHINE_LEARNING }, + force ? undefined : { personId: null, sourceType: SourceType.MachineLearning }, ); - let jobs: { name: JobName.FACIAL_RECOGNITION; data: { id: string; deferred: false } }[] = []; + let jobs: { name: JobName.FacialRecognition; data: { id: string; deferred: false } }[] = []; for await (const face of facePagination) { - jobs.push({ name: JobName.FACIAL_RECOGNITION, data: { id: face.id, deferred: false } }); + jobs.push({ name: JobName.FacialRecognition, data: { id: face.id, deferred: false } }); if (jobs.length === JOBS_ASSET_PAGINATION_SIZE) { await this.jobRepository.queueAll(jobs); @@ -449,37 +449,37 @@ export class PersonService extends BaseService { await this.jobRepository.queueAll(jobs); - await this.systemMetadataRepository.set(SystemMetadataKey.FACIAL_RECOGNITION_STATE, { lastRun }); + await this.systemMetadataRepository.set(SystemMetadataKey.FacialRecognitionState, { lastRun }); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.FACIAL_RECOGNITION, queue: QueueName.FACIAL_RECOGNITION }) - async handleRecognizeFaces({ id, deferred }: JobOf): Promise { + @OnJob({ name: JobName.FacialRecognition, queue: QueueName.FacialRecognition }) + async handleRecognizeFaces({ id, deferred }: JobOf): Promise { const { machineLearning } = await this.getConfig({ withCache: true }); if (!isFacialRecognitionEnabled(machineLearning)) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const face = await this.personRepository.getFaceForFacialRecognitionJob(id); if (!face || !face.asset) { this.logger.warn(`Face ${id} not found`); - return JobStatus.FAILED; + return JobStatus.Failed; } - if (face.sourceType !== SourceType.MACHINE_LEARNING) { + if (face.sourceType !== SourceType.MachineLearning) { this.logger.warn(`Skipping face ${id} due to source ${face.sourceType}`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } if (!face.faceSearch?.embedding) { this.logger.warn(`Face ${id} does not have an embedding`); - return JobStatus.FAILED; + return JobStatus.Failed; } if (face.personId) { this.logger.debug(`Face ${id} already has a person assigned`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const matches = await this.searchRepository.searchFaces({ @@ -493,18 +493,18 @@ export class PersonService extends BaseService { // `matches` also includes the face itself if (machineLearning.facialRecognition.minFaces > 1 && matches.length <= 1) { this.logger.debug(`Face ${id} only matched the face itself, skipping`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } this.logger.debug(`Face ${id} has ${matches.length} matches`); const isCore = matches.length >= machineLearning.facialRecognition.minFaces && - face.asset.visibility === AssetVisibility.TIMELINE; + face.asset.visibility === AssetVisibility.Timeline; if (!isCore && !deferred) { this.logger.debug(`Deferring non-core face ${id} for later processing`); - await this.jobRepository.queue({ name: JobName.FACIAL_RECOGNITION, data: { id, deferred: true } }); - return JobStatus.SKIPPED; + await this.jobRepository.queue({ name: JobName.FacialRecognition, data: { id, deferred: true } }); + return JobStatus.Skipped; } let personId = matches.find((match) => match.personId)?.personId; @@ -526,7 +526,7 @@ export class PersonService extends BaseService { if (isCore && !personId) { this.logger.log(`Creating new person for face ${id}`); const newPerson = await this.personRepository.create({ ownerId: face.asset.ownerId, faceAssetId: face.id }); - await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: newPerson.id } }); + await this.jobRepository.queue({ name: JobName.GeneratePersonThumbnail, data: { id: newPerson.id } }); personId = newPerson.id; } @@ -535,19 +535,19 @@ export class PersonService extends BaseService { await this.personRepository.reassignFaces({ faceIds: [id], newPersonId: personId }); } - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.MIGRATE_PERSON, queue: QueueName.MIGRATION }) - async handlePersonMigration({ id }: JobOf): Promise { + @OnJob({ name: JobName.MigratePerson, queue: QueueName.Migration }) + async handlePersonMigration({ id }: JobOf): Promise { const person = await this.personRepository.getById(id); if (!person) { - return JobStatus.FAILED; + return JobStatus.Failed; } - await this.storageCore.movePersonFile(person, PersonPathType.FACE); + await this.storageCore.movePersonFile(person, PersonPathType.Face); - return JobStatus.SUCCESS; + return JobStatus.Success; } async mergePerson(auth: AuthDto, id: string, dto: MergePersonDto): Promise { @@ -556,7 +556,7 @@ export class PersonService extends BaseService { throw new BadRequestException('Cannot merge a person into themselves'); } - await this.requireAccess({ auth, permission: Permission.PERSON_UPDATE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.PersonUpdate, ids: [id] }); let primaryPerson = await this.findOrFail(id); const primaryName = primaryPerson.name || primaryPerson.id; @@ -564,7 +564,7 @@ export class PersonService extends BaseService { const allowedIds = await this.checkAccess({ auth, - permission: Permission.PERSON_MERGE, + permission: Permission.PersonMerge, ids: mergeIds, }); @@ -623,8 +623,8 @@ export class PersonService extends BaseService { // TODO return a asset face response async createFace(auth: AuthDto, dto: AssetFaceCreateDto): Promise { await Promise.all([ - this.requireAccess({ auth, permission: Permission.ASSET_READ, ids: [dto.assetId] }), - this.requireAccess({ auth, permission: Permission.PERSON_READ, ids: [dto.personId] }), + this.requireAccess({ auth, permission: Permission.AssetRead, ids: [dto.assetId] }), + this.requireAccess({ auth, permission: Permission.PersonRead, ids: [dto.personId] }), ]); await this.personRepository.createAssetFace({ @@ -636,12 +636,12 @@ export class PersonService extends BaseService { boundingBoxX2: dto.x + dto.width, boundingBoxY1: dto.y, boundingBoxY2: dto.y + dto.height, - sourceType: SourceType.MANUAL, + sourceType: SourceType.Manual, }); } async deleteFace(auth: AuthDto, id: string, dto: AssetFaceDeleteDto): Promise { - await this.requireAccess({ auth, permission: Permission.FACE_DELETE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.FaceDelete, ids: [id] }); return dto.force ? this.personRepository.deleteAssetFace(id) : this.personRepository.softDeleteAssetFaces(id); } diff --git a/server/src/services/search.service.ts b/server/src/services/search.service.ts index a10c01e8d3..1c75c4a434 100644 --- a/server/src/services/search.service.ts +++ b/server/src/services/search.service.ts @@ -46,7 +46,7 @@ export class SearchService extends BaseService { } async searchMetadata(auth: AuthDto, dto: MetadataSearchDto): Promise { - if (dto.visibility === AssetVisibility.LOCKED) { + if (dto.visibility === AssetVisibility.Locked) { requireElevatedPermission(auth); } @@ -65,7 +65,7 @@ export class SearchService extends BaseService { ...dto, checksum, userIds, - orderDirection: dto.order ?? AssetOrder.DESC, + orderDirection: dto.order ?? AssetOrder.Desc, }, ); @@ -82,7 +82,7 @@ export class SearchService extends BaseService { } async searchRandom(auth: AuthDto, dto: RandomSearchDto): Promise { - if (dto.visibility === AssetVisibility.LOCKED) { + if (dto.visibility === AssetVisibility.Locked) { requireElevatedPermission(auth); } @@ -92,7 +92,7 @@ export class SearchService extends BaseService { } async searchSmart(auth: AuthDto, dto: SmartSearchDto): Promise { - if (dto.visibility === AssetVisibility.LOCKED) { + if (dto.visibility === AssetVisibility.Locked) { requireElevatedPermission(auth); } diff --git a/server/src/services/server.service.spec.ts b/server/src/services/server.service.spec.ts index 05ebda6a94..0ddf3d69b1 100644 --- a/server/src/services/server.service.spec.ts +++ b/server/src/services/server.service.spec.ts @@ -256,7 +256,7 @@ describe(ServerService.name, () => { const license = { licenseKey: 'IMSV-license-key', activationKey: 'activation-key' }; await sut.setLicense(license); - expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.LICENSE, expect.any(Object)); + expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.License, expect.any(Object)); }); it('should not save license if invalid', async () => { diff --git a/server/src/services/server.service.ts b/server/src/services/server.service.ts index 5ad93b40ef..dae484cce8 100644 --- a/server/src/services/server.service.ts +++ b/server/src/services/server.service.ts @@ -27,7 +27,7 @@ export class ServerService extends BaseService { async onBootstrap(): Promise { const featureFlags = await this.getFeatures(); if (featureFlags.configFile) { - await this.systemMetadataRepository.set(SystemMetadataKey.ADMIN_ONBOARDING, { + await this.systemMetadataRepository.set(SystemMetadataKey.AdminOnboarding, { isOnboarded: true, }); } @@ -38,7 +38,7 @@ export class ServerService extends BaseService { const version = `v${serverVersion.toString()}`; const { buildMetadata } = this.configRepository.getEnv(); const buildVersions = await this.serverInfoRepository.getBuildVersions(); - const licensed = await this.systemMetadataRepository.get(SystemMetadataKey.LICENSE); + const licensed = await this.systemMetadataRepository.get(SystemMetadataKey.License); return { version, @@ -60,7 +60,7 @@ export class ServerService extends BaseService { } async getStorage(): Promise { - const libraryBase = StorageCore.getBaseFolder(StorageFolder.LIBRARY); + const libraryBase = StorageCore.getBaseFolder(StorageFolder.Library); const diskInfo = await this.storageRepository.checkDiskUsage(libraryBase); const usagePercentage = (((diskInfo.total - diskInfo.free) / diskInfo.total) * 100).toFixed(2); @@ -111,7 +111,7 @@ export class ServerService extends BaseService { async getSystemConfig(): Promise { const config = await this.getConfig({ withCache: false }); const isInitialized = await this.userRepository.hasAdmin(); - const onboarding = await this.systemMetadataRepository.get(SystemMetadataKey.ADMIN_ONBOARDING); + const onboarding = await this.systemMetadataRepository.get(SystemMetadataKey.AdminOnboarding); return { loginPageMessage: config.server.loginPageMessage, @@ -163,11 +163,11 @@ export class ServerService extends BaseService { } async deleteLicense(): Promise { - await this.systemMetadataRepository.delete(SystemMetadataKey.LICENSE); + await this.systemMetadataRepository.delete(SystemMetadataKey.License); } async getLicense(): Promise { - const license = await this.systemMetadataRepository.get(SystemMetadataKey.LICENSE); + const license = await this.systemMetadataRepository.get(SystemMetadataKey.License); if (!license) { throw new NotFoundException(); } @@ -186,7 +186,7 @@ export class ServerService extends BaseService { const licenseData = { ...dto, activatedAt: new Date() }; - await this.systemMetadataRepository.set(SystemMetadataKey.LICENSE, licenseData); + await this.systemMetadataRepository.set(SystemMetadataKey.License, licenseData); return licenseData; } diff --git a/server/src/services/session.service.spec.ts b/server/src/services/session.service.spec.ts index 7ac338da80..3cbad28389 100644 --- a/server/src/services/session.service.spec.ts +++ b/server/src/services/session.service.spec.ts @@ -19,7 +19,7 @@ describe('SessionService', () => { describe('handleCleanup', () => { it('should clean sessions', async () => { mocks.session.cleanup.mockResolvedValue([]); - await expect(sut.handleCleanup()).resolves.toEqual(JobStatus.SUCCESS); + await expect(sut.handleCleanup()).resolves.toEqual(JobStatus.Success); }); }); diff --git a/server/src/services/session.service.ts b/server/src/services/session.service.ts index 198e380c53..b7bb5cb6fa 100644 --- a/server/src/services/session.service.ts +++ b/server/src/services/session.service.ts @@ -14,7 +14,7 @@ import { BaseService } from 'src/services/base.service'; @Injectable() export class SessionService extends BaseService { - @OnJob({ name: JobName.CLEAN_OLD_SESSION_TOKENS, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.CleanOldSessionTokens, queue: QueueName.BackgroundTask }) async handleCleanup(): Promise { const sessions = await this.sessionRepository.cleanup(); for (const session of sessions) { @@ -23,7 +23,7 @@ export class SessionService extends BaseService { this.logger.log(`Deleted ${sessions.length} expired session tokens`); - return JobStatus.SUCCESS; + return JobStatus.Success; } async create(auth: AuthDto, dto: SessionCreateDto): Promise { @@ -51,7 +51,7 @@ export class SessionService extends BaseService { } async update(auth: AuthDto, id: string, dto: SessionUpdateDto): Promise { - await this.requireAccess({ auth, permission: Permission.SESSION_UPDATE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.SessionUpdate, ids: [id] }); if (Object.values(dto).filter((prop) => prop !== undefined).length === 0) { throw new BadRequestException('No fields to update'); @@ -65,12 +65,12 @@ export class SessionService extends BaseService { } async delete(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.AUTH_DEVICE_DELETE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.AuthDeviceDelete, ids: [id] }); await this.sessionRepository.delete(id); } async lock(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.SESSION_LOCK, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.SessionLock, ids: [id] }); await this.sessionRepository.update(id, { pinExpiresAt: null }); } diff --git a/server/src/services/shared-link.service.spec.ts b/server/src/services/shared-link.service.spec.ts index b3b4c4b1cf..8e09580d55 100644 --- a/server/src/services/shared-link.service.spec.ts +++ b/server/src/services/shared-link.service.spec.ts @@ -95,26 +95,26 @@ describe(SharedLinkService.name, () => { describe('create', () => { it('should not allow an album shared link without an albumId', async () => { - await expect(sut.create(authStub.admin, { type: SharedLinkType.ALBUM, assetIds: [] })).rejects.toBeInstanceOf( + await expect(sut.create(authStub.admin, { type: SharedLinkType.Album, assetIds: [] })).rejects.toBeInstanceOf( BadRequestException, ); }); it('should not allow non-owners to create album shared links', async () => { await expect( - sut.create(authStub.admin, { type: SharedLinkType.ALBUM, assetIds: [], albumId: 'album-1' }), + sut.create(authStub.admin, { type: SharedLinkType.Album, assetIds: [], albumId: 'album-1' }), ).rejects.toBeInstanceOf(BadRequestException); }); it('should not allow individual shared links with no assets', async () => { await expect( - sut.create(authStub.admin, { type: SharedLinkType.INDIVIDUAL, assetIds: [] }), + sut.create(authStub.admin, { type: SharedLinkType.Individual, assetIds: [] }), ).rejects.toBeInstanceOf(BadRequestException); }); it('should require asset ownership to make an individual shared link', async () => { await expect( - sut.create(authStub.admin, { type: SharedLinkType.INDIVIDUAL, assetIds: ['asset-1'] }), + sut.create(authStub.admin, { type: SharedLinkType.Individual, assetIds: ['asset-1'] }), ).rejects.toBeInstanceOf(BadRequestException); }); @@ -122,14 +122,14 @@ describe(SharedLinkService.name, () => { mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.oneAsset.id])); mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.valid); - await sut.create(authStub.admin, { type: SharedLinkType.ALBUM, albumId: albumStub.oneAsset.id }); + await sut.create(authStub.admin, { type: SharedLinkType.Album, albumId: albumStub.oneAsset.id }); expect(mocks.access.album.checkOwnerAccess).toHaveBeenCalledWith( authStub.admin.user.id, new Set([albumStub.oneAsset.id]), ); expect(mocks.sharedLink.create).toHaveBeenCalledWith({ - type: SharedLinkType.ALBUM, + type: SharedLinkType.Album, userId: authStub.admin.user.id, albumId: albumStub.oneAsset.id, allowDownload: true, @@ -146,7 +146,7 @@ describe(SharedLinkService.name, () => { mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.individual); await sut.create(authStub.admin, { - type: SharedLinkType.INDIVIDUAL, + type: SharedLinkType.Individual, assetIds: [assetStub.image.id], showMetadata: true, allowDownload: true, @@ -159,7 +159,7 @@ describe(SharedLinkService.name, () => { false, ); expect(mocks.sharedLink.create).toHaveBeenCalledWith({ - type: SharedLinkType.INDIVIDUAL, + type: SharedLinkType.Individual, userId: authStub.admin.user.id, albumId: null, allowDownload: true, @@ -177,7 +177,7 @@ describe(SharedLinkService.name, () => { mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.individual); await sut.create(authStub.admin, { - type: SharedLinkType.INDIVIDUAL, + type: SharedLinkType.Individual, assetIds: [assetStub.image.id], showMetadata: false, allowDownload: true, @@ -190,7 +190,7 @@ describe(SharedLinkService.name, () => { false, ); expect(mocks.sharedLink.create).toHaveBeenCalledWith({ - type: SharedLinkType.INDIVIDUAL, + type: SharedLinkType.Individual, userId: authStub.admin.user.id, albumId: null, allowDownload: false, diff --git a/server/src/services/shared-link.service.ts b/server/src/services/shared-link.service.ts index c70b31a3a1..9f8e238c43 100644 --- a/server/src/services/shared-link.service.ts +++ b/server/src/services/shared-link.service.ts @@ -45,20 +45,20 @@ export class SharedLinkService extends BaseService { async create(auth: AuthDto, dto: SharedLinkCreateDto): Promise { switch (dto.type) { - case SharedLinkType.ALBUM: { + case SharedLinkType.Album: { if (!dto.albumId) { throw new BadRequestException('Invalid albumId'); } - await this.requireAccess({ auth, permission: Permission.ALBUM_SHARE, ids: [dto.albumId] }); + await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [dto.albumId] }); break; } - case SharedLinkType.INDIVIDUAL: { + case SharedLinkType.Individual: { if (!dto.assetIds || dto.assetIds.length === 0) { throw new BadRequestException('Invalid assetIds'); } - await this.requireAccess({ auth, permission: Permission.ASSET_SHARE, ids: dto.assetIds }); + await this.requireAccess({ auth, permission: Permission.AssetShare, ids: dto.assetIds }); break; } @@ -113,7 +113,7 @@ export class SharedLinkService extends BaseService { async addAssets(auth: AuthDto, id: string, dto: AssetIdsDto): Promise { const sharedLink = await this.findOrFail(auth.user.id, id); - if (sharedLink.type !== SharedLinkType.INDIVIDUAL) { + if (sharedLink.type !== SharedLinkType.Individual) { throw new BadRequestException('Invalid shared link type'); } @@ -121,7 +121,7 @@ export class SharedLinkService extends BaseService { const notPresentAssetIds = dto.assetIds.filter((assetId) => !existingAssetIds.has(assetId)); const allowedAssetIds = await this.checkAccess({ auth, - permission: Permission.ASSET_SHARE, + permission: Permission.AssetShare, ids: notPresentAssetIds, }); @@ -153,7 +153,7 @@ export class SharedLinkService extends BaseService { async removeAssets(auth: AuthDto, id: string, dto: AssetIdsDto): Promise { const sharedLink = await this.findOrFail(auth.user.id, id); - if (sharedLink.type !== SharedLinkType.INDIVIDUAL) { + if (sharedLink.type !== SharedLinkType.Individual) { throw new BadRequestException('Invalid shared link type'); } diff --git a/server/src/services/smart-info.service.spec.ts b/server/src/services/smart-info.service.spec.ts index a6529fa623..edd9f4663a 100644 --- a/server/src/services/smart-info.service.spec.ts +++ b/server/src/services/smart-info.service.spec.ts @@ -14,7 +14,7 @@ describe(SmartInfoService.name, () => { ({ sut, mocks } = newTestService(SmartInfoService)); mocks.asset.getByIds.mockResolvedValue([assetStub.image]); - mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES); + mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices); }); it('should work', () => { @@ -160,7 +160,7 @@ describe(SmartInfoService.name, () => { await sut.handleQueueEncodeClip({ force: false }); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.SMART_SEARCH, data: { id: assetStub.image.id } }, + { name: JobName.SmartSearch, data: { id: assetStub.image.id } }, ]); expect(mocks.assetJob.streamForEncodeClip).toHaveBeenCalledWith(false); expect(mocks.database.setDimensionSize).not.toHaveBeenCalled(); @@ -172,7 +172,7 @@ describe(SmartInfoService.name, () => { await sut.handleQueueEncodeClip({ force: true }); expect(mocks.job.queueAll).toHaveBeenCalledWith([ - { name: JobName.SMART_SEARCH, data: { id: assetStub.image.id } }, + { name: JobName.SmartSearch, data: { id: assetStub.image.id } }, ]); expect(mocks.assetJob.streamForEncodeClip).toHaveBeenCalledWith(true); expect(mocks.database.setDimensionSize).toHaveBeenCalledExactlyOnceWith(512); @@ -183,7 +183,7 @@ describe(SmartInfoService.name, () => { it('should do nothing if machine learning is disabled', async () => { mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled); - expect(await sut.handleEncodeClip({ id: '123' })).toEqual(JobStatus.SKIPPED); + expect(await sut.handleEncodeClip({ id: '123' })).toEqual(JobStatus.Skipped); expect(mocks.asset.getByIds).not.toHaveBeenCalled(); expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled(); @@ -192,7 +192,7 @@ describe(SmartInfoService.name, () => { it('should skip assets without a resize path', async () => { mocks.assetJob.getForClipEncoding.mockResolvedValue({ ...assetStub.noResizePath, files: [] }); - expect(await sut.handleEncodeClip({ id: assetStub.noResizePath.id })).toEqual(JobStatus.FAILED); + expect(await sut.handleEncodeClip({ id: assetStub.noResizePath.id })).toEqual(JobStatus.Failed); expect(mocks.search.upsert).not.toHaveBeenCalled(); expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled(); @@ -202,7 +202,7 @@ describe(SmartInfoService.name, () => { mocks.machineLearning.encodeImage.mockResolvedValue('[0.01, 0.02, 0.03]'); mocks.assetJob.getForClipEncoding.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] }); - expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.SUCCESS); + expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.Success); expect(mocks.machineLearning.encodeImage).toHaveBeenCalledWith( ['http://immich-machine-learning:3003'], @@ -218,7 +218,7 @@ describe(SmartInfoService.name, () => { files: [assetStub.image.files[1]], }); - expect(await sut.handleEncodeClip({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.SKIPPED); + expect(await sut.handleEncodeClip({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.Skipped); expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled(); expect(mocks.search.upsert).not.toHaveBeenCalled(); @@ -227,7 +227,7 @@ describe(SmartInfoService.name, () => { it('should fail if asset could not be found', async () => { mocks.assetJob.getForClipEncoding.mockResolvedValue(void 0); - expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.FAILED); + expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.Failed); expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled(); expect(mocks.search.upsert).not.toHaveBeenCalled(); @@ -238,7 +238,7 @@ describe(SmartInfoService.name, () => { mocks.database.isBusy.mockReturnValue(true); mocks.assetJob.getForClipEncoding.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] }); - expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.SUCCESS); + expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.Success); expect(mocks.database.wait).toHaveBeenCalledWith(512); expect(mocks.machineLearning.encodeImage).toHaveBeenCalledWith( diff --git a/server/src/services/smart-info.service.ts b/server/src/services/smart-info.service.ts index d6e30c6d86..5ce47593f4 100644 --- a/server/src/services/smart-info.service.ts +++ b/server/src/services/smart-info.service.ts @@ -10,12 +10,12 @@ import { getCLIPModelInfo, isSmartSearchEnabled } from 'src/utils/misc'; @Injectable() export class SmartInfoService extends BaseService { - @OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.MICROSERVICES] }) + @OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] }) async onConfigInit({ newConfig }: ArgOf<'ConfigInit'>) { await this.init(newConfig); } - @OnEvent({ name: 'ConfigUpdate', workers: [ImmichWorker.MICROSERVICES], server: true }) + @OnEvent({ name: 'ConfigUpdate', workers: [ImmichWorker.Microservices], server: true }) async onConfigUpdate({ oldConfig, newConfig }: ArgOf<'ConfigUpdate'>) { await this.init(newConfig, oldConfig); } @@ -64,11 +64,11 @@ export class SmartInfoService extends BaseService { }); } - @OnJob({ name: JobName.QUEUE_SMART_SEARCH, queue: QueueName.SMART_SEARCH }) - async handleQueueEncodeClip({ force }: JobOf): Promise { + @OnJob({ name: JobName.QueueSmartSearch, queue: QueueName.SmartSearch }) + async handleQueueEncodeClip({ force }: JobOf): Promise { const { machineLearning } = await this.getConfig({ withCache: false }); if (!isSmartSearchEnabled(machineLearning)) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } if (force) { @@ -80,7 +80,7 @@ export class SmartInfoService extends BaseService { let queue: JobItem[] = []; const assets = this.assetJobRepository.streamForEncodeClip(force); for await (const asset of assets) { - queue.push({ name: JobName.SMART_SEARCH, data: { id: asset.id } }); + queue.push({ name: JobName.SmartSearch, data: { id: asset.id } }); if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) { await this.jobRepository.queueAll(queue); queue = []; @@ -89,23 +89,23 @@ export class SmartInfoService extends BaseService { await this.jobRepository.queueAll(queue); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.SMART_SEARCH, queue: QueueName.SMART_SEARCH }) - async handleEncodeClip({ id }: JobOf): Promise { + @OnJob({ name: JobName.SmartSearch, queue: QueueName.SmartSearch }) + async handleEncodeClip({ id }: JobOf): Promise { const { machineLearning } = await this.getConfig({ withCache: true }); if (!isSmartSearchEnabled(machineLearning)) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const asset = await this.assetJobRepository.getForClipEncoding(id); if (!asset || asset.files.length !== 1) { - return JobStatus.FAILED; + return JobStatus.Failed; } - if (asset.visibility === AssetVisibility.HIDDEN) { - return JobStatus.SKIPPED; + if (asset.visibility === AssetVisibility.Hidden) { + return JobStatus.Skipped; } const embedding = await this.machineLearningRepository.encodeImage( @@ -122,11 +122,11 @@ export class SmartInfoService extends BaseService { const newConfig = await this.getConfig({ withCache: true }); if (machineLearning.clip.modelName !== newConfig.machineLearning.clip.modelName) { // Skip the job if the the model has changed since the embedding was generated. - return JobStatus.SKIPPED; + return JobStatus.Skipped; } await this.searchRepository.upsert(asset.id, embedding); - return JobStatus.SUCCESS; + return JobStatus.Success; } } diff --git a/server/src/services/stack.service.ts b/server/src/services/stack.service.ts index b2ac47274f..18600abd12 100644 --- a/server/src/services/stack.service.ts +++ b/server/src/services/stack.service.ts @@ -17,7 +17,7 @@ export class StackService extends BaseService { } async create(auth: AuthDto, dto: StackCreateDto): Promise { - await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids: dto.assetIds }); + await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: dto.assetIds }); const stack = await this.stackRepository.create({ ownerId: auth.user.id }, dto.assetIds); @@ -27,13 +27,13 @@ export class StackService extends BaseService { } async get(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.STACK_READ, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.StackRead, ids: [id] }); const stack = await this.findOrFail(id); return mapStack(stack, { auth }); } async update(auth: AuthDto, id: string, dto: StackUpdateDto): Promise { - await this.requireAccess({ auth, permission: Permission.STACK_UPDATE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.StackUpdate, ids: [id] }); const stack = await this.findOrFail(id); if (dto.primaryAssetId && !stack.assets.some(({ id }) => id === dto.primaryAssetId)) { throw new BadRequestException('Primary asset must be in the stack'); @@ -47,13 +47,13 @@ export class StackService extends BaseService { } async delete(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.STACK_DELETE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.StackDelete, ids: [id] }); await this.stackRepository.delete(id); await this.eventRepository.emit('StackDelete', { stackId: id, userId: auth.user.id }); } async deleteAll(auth: AuthDto, dto: BulkIdsDto): Promise { - await this.requireAccess({ auth, permission: Permission.STACK_DELETE, ids: dto.ids }); + await this.requireAccess({ auth, permission: Permission.StackDelete, ids: dto.ids }); await this.stackRepository.deleteAll(dto.ids); await this.eventRepository.emit('StackDeleteAll', { stackIds: dto.ids, userId: auth.user.id }); } diff --git a/server/src/services/storage-template.service.spec.ts b/server/src/services/storage-template.service.spec.ts index 9c4fe02f3e..2751651dbf 100644 --- a/server/src/services/storage-template.service.spec.ts +++ b/server/src/services/storage-template.service.spec.ts @@ -96,7 +96,7 @@ describe(StorageTemplateService.name, () => { it('should skip when storage template is disabled', async () => { mocks.systemMetadata.get.mockResolvedValue({ storageTemplate: { enabled: false } }); - await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.Skipped); expect(mocks.asset.getByIds).not.toHaveBeenCalled(); expect(mocks.storage.checkFileExists).not.toHaveBeenCalled(); @@ -119,7 +119,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValueOnce({ id: '123', entityId: stillAsset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: stillAsset.originalPath, newPath: newStillPicturePath, }); @@ -127,12 +127,12 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValueOnce({ id: '124', entityId: motionAsset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: motionAsset.originalPath, newPath: newMotionPicturePath, }); - await expect(sut.handleMigrationSingle({ id: stillAsset.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleMigrationSingle({ id: stillAsset.id })).resolves.toBe(JobStatus.Success); expect(mocks.storage.checkFileExists).toHaveBeenCalledTimes(2); expect(mocks.asset.update).toHaveBeenCalledWith({ id: stillAsset.id, originalPath: newStillPicturePath }); @@ -152,13 +152,13 @@ describe(StorageTemplateService.name, () => { mocks.assetJob.getForStorageTemplateJob.mockResolvedValueOnce(asset); mocks.album.getByAssetId.mockResolvedValueOnce([album]); - expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.SUCCESS); + expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.Success); expect(mocks.move.create).toHaveBeenCalledWith({ entityId: asset.id, newPath: `upload/library/${user.id}/${asset.fileCreatedAt.getFullYear()}/${album.albumName}/${asset.originalFileName}`, oldPath: asset.originalPath, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, }); }); @@ -172,14 +172,14 @@ describe(StorageTemplateService.name, () => { mocks.user.get.mockResolvedValue(user); mocks.assetJob.getForStorageTemplateJob.mockResolvedValueOnce(asset); - expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.SUCCESS); + expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.Success); const month = (asset.fileCreatedAt.getMonth() + 1).toString().padStart(2, '0'); expect(mocks.move.create).toHaveBeenCalledWith({ entityId: asset.id, newPath: `upload/library/${user.id}/${asset.fileCreatedAt.getFullYear()}/other/${month}/${asset.originalFileName}`, oldPath: asset.originalPath, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, }); }); @@ -206,14 +206,14 @@ describe(StorageTemplateService.name, () => { }, ]); - expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.SUCCESS); + expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.Success); const month = (asset.fileCreatedAt.getMonth() + 1).toString().padStart(2, '0'); expect(mocks.move.create).toHaveBeenCalledWith({ entityId: asset.id, newPath: `upload/library/${user.id}/${asset.fileCreatedAt.getFullYear()}/${month} - ${album.albumName}/${asset.originalFileName}`, oldPath: asset.originalPath, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, }); }); @@ -229,14 +229,14 @@ describe(StorageTemplateService.name, () => { mocks.user.get.mockResolvedValue(user); mocks.assetJob.getForStorageTemplateJob.mockResolvedValueOnce(asset); - expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.SUCCESS); + expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.Success); const month = (asset.fileCreatedAt.getMonth() + 1).toString().padStart(2, '0'); expect(mocks.move.create).toHaveBeenCalledWith({ entityId: asset.id, newPath: `upload/library/${user.id}/${asset.fileCreatedAt.getFullYear()}/${month}/${asset.originalFileName}`, oldPath: asset.originalPath, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, }); }); @@ -251,7 +251,7 @@ describe(StorageTemplateService.name, () => { mocks.move.getByEntity.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: asset.originalPath, newPath: previousFailedNewPath, }); @@ -259,12 +259,12 @@ describe(StorageTemplateService.name, () => { mocks.move.update.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: asset.originalPath, newPath, }); - await expect(sut.handleMigrationSingle({ id: asset.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleMigrationSingle({ id: asset.id })).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.getForStorageTemplateJob).toHaveBeenCalledWith(asset.id); expect(mocks.storage.checkFileExists).toHaveBeenCalledTimes(3); @@ -293,7 +293,7 @@ describe(StorageTemplateService.name, () => { mocks.move.getByEntity.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: asset.originalPath, newPath: previousFailedNewPath, }); @@ -301,12 +301,12 @@ describe(StorageTemplateService.name, () => { mocks.move.update.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: previousFailedNewPath, newPath, }); - await expect(sut.handleMigrationSingle({ id: asset.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleMigrationSingle({ id: asset.id })).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.getForStorageTemplateJob).toHaveBeenCalledWith(asset.id); expect(mocks.storage.checkFileExists).toHaveBeenCalledTimes(3); @@ -328,19 +328,19 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: '123', entityId: testAsset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: testAsset.originalPath, newPath, }); - await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.getForStorageTemplateJob).toHaveBeenCalledWith(testAsset.id); expect(mocks.storage.checkFileExists).toHaveBeenCalledTimes(1); expect(mocks.storage.stat).toHaveBeenCalledWith(newPath); expect(mocks.move.create).toHaveBeenCalledWith({ entityId: testAsset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: testAsset.originalPath, newPath, }); @@ -370,7 +370,7 @@ describe(StorageTemplateService.name, () => { mocks.move.getByEntity.mockResolvedValue({ id: '123', entityId: testAsset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: testAsset.originalPath, newPath: previousFailedNewPath, }); @@ -378,12 +378,12 @@ describe(StorageTemplateService.name, () => { mocks.move.update.mockResolvedValue({ id: '123', entityId: testAsset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: previousFailedNewPath, newPath, }); - await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.Success); expect(mocks.assetJob.getForStorageTemplateJob).toHaveBeenCalledWith(testAsset.id); expect(mocks.storage.checkFileExists).toHaveBeenCalledTimes(3); @@ -417,7 +417,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath, newPath, }); @@ -472,7 +472,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: '123', entityId: assetStub.image.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: assetStub.image.originalPath, newPath, }); @@ -492,7 +492,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: asset.originalPath, newPath: `upload/library/${user.storageLabel}/2023/2023-02-23/${asset.originalFileName}`, }); @@ -520,7 +520,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath, newPath, }); @@ -559,7 +559,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: asset.originalPath, newPath: `upload/library/user-id/2022/2022-06-19/${asset.originalFileName}`, }); @@ -592,7 +592,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: 'move-123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: asset.originalPath, newPath: '', }); @@ -622,7 +622,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: `upload/library/${user.id}/2022/2022-06-19/IMG_7065.heic`, newPath: `upload/library/${user.id}/2023/2023-02-23/IMG_7065.heic`, }); @@ -648,7 +648,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: `upload/library/${user.id}/2022/2022-06-19/IMG_7065.HEIC`, newPath: `upload/library/${user.id}/2023/2023-02-23/IMG_7065.heic`, }); @@ -674,7 +674,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: `upload/library/${user.id}/2022/2022-06-19/IMG_7065.JPEG`, newPath: `upload/library/${user.id}/2023/2023-02-23/IMG_7065.jpg`, }); @@ -700,7 +700,7 @@ describe(StorageTemplateService.name, () => { mocks.move.create.mockResolvedValue({ id: '123', entityId: asset.id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath: `upload/library/${user.id}/2022/2022-06-19/IMG_7065.JPG`, newPath: `upload/library/${user.id}/2023/2023-02-23/IMG_7065.jpg`, }); diff --git a/server/src/services/storage-template.service.ts b/server/src/services/storage-template.service.ts index a286d518d6..6086d62809 100644 --- a/server/src/services/storage-template.service.ts +++ b/server/src/services/storage-template.service.ts @@ -97,7 +97,7 @@ export class StorageTemplateService extends BaseService { asset: { fileCreatedAt: new Date(), originalPath: '/upload/test/IMG_123.jpg', - type: AssetType.IMAGE, + type: AssetType.Image, id: 'd587e44b-f8c0-4832-9ba3-43268bbf5d4e', } as StorageAsset, filename: 'IMG_123', @@ -118,20 +118,20 @@ export class StorageTemplateService extends BaseService { @OnEvent({ name: 'AssetMetadataExtracted' }) async onAssetMetadataExtracted({ source, assetId }: ArgOf<'AssetMetadataExtracted'>) { - await this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { source, id: assetId } }); + await this.jobRepository.queue({ name: JobName.StorageTemplateMigrationSingle, data: { source, id: assetId } }); } - @OnJob({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, queue: QueueName.STORAGE_TEMPLATE_MIGRATION }) - async handleMigrationSingle({ id }: JobOf): Promise { + @OnJob({ name: JobName.StorageTemplateMigrationSingle, queue: QueueName.StorageTemplateMigration }) + async handleMigrationSingle({ id }: JobOf): Promise { const config = await this.getConfig({ withCache: true }); const storageTemplateEnabled = config.storageTemplate.enabled; if (!storageTemplateEnabled) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } const asset = await this.assetJobRepository.getForStorageTemplateJob(id); if (!asset) { - return JobStatus.FAILED; + return JobStatus.Failed; } const user = await this.userRepository.get(asset.ownerId, {}); @@ -143,22 +143,22 @@ export class StorageTemplateService extends BaseService { if (asset.livePhotoVideoId) { const livePhotoVideo = await this.assetJobRepository.getForStorageTemplateJob(asset.livePhotoVideoId); if (!livePhotoVideo) { - return JobStatus.FAILED; + return JobStatus.Failed; } const motionFilename = getLivePhotoMotionFilename(filename, livePhotoVideo.originalPath); await this.moveAsset(livePhotoVideo, { storageLabel, filename: motionFilename }); } - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.STORAGE_TEMPLATE_MIGRATION, queue: QueueName.STORAGE_TEMPLATE_MIGRATION }) + @OnJob({ name: JobName.StorageTemplateMigration, queue: QueueName.StorageTemplateMigration }) async handleMigration(): Promise { this.logger.log('Starting storage template migration'); const { storageTemplate } = await this.getConfig({ withCache: true }); const { enabled } = storageTemplate; if (!enabled) { this.logger.log('Storage template migration disabled, skipping'); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } await this.moveRepository.cleanMoveHistory(); @@ -174,12 +174,12 @@ export class StorageTemplateService extends BaseService { } this.logger.debug('Cleaning up empty directories...'); - const libraryFolder = StorageCore.getBaseFolder(StorageFolder.LIBRARY); + const libraryFolder = StorageCore.getBaseFolder(StorageFolder.Library); await this.storageRepository.removeEmptyDirs(libraryFolder); this.logger.log('Finished storage template migration'); - return JobStatus.SUCCESS; + return JobStatus.Success; } @OnEvent({ name: 'AssetDelete' }) @@ -208,7 +208,7 @@ export class StorageTemplateService extends BaseService { try { await this.storageCore.moveFile({ entityId: id, - pathType: AssetPathType.ORIGINAL, + pathType: AssetPathType.Original, oldPath, newPath, assetInfo: { sizeInBytes: fileSizeInByte, checksum }, @@ -216,7 +216,7 @@ export class StorageTemplateService extends BaseService { if (sidecarPath) { await this.storageCore.moveFile({ entityId: id, - pathType: AssetPathType.SIDECAR, + pathType: AssetPathType.Sidecar, oldPath: sidecarPath, newPath: `${newPath}.xmp`, }); @@ -357,8 +357,8 @@ export class StorageTemplateService extends BaseService { const substitutions: Record = { filename, ext: extension, - filetype: asset.type == AssetType.IMAGE ? 'IMG' : 'VID', - filetypefull: asset.type == AssetType.IMAGE ? 'IMAGE' : 'VIDEO', + filetype: asset.type == AssetType.Image ? 'IMG' : 'VID', + filetypefull: asset.type == AssetType.Image ? 'IMAGE' : 'VIDEO', assetId: asset.id, assetIdShort: asset.id.slice(-12), //just throw into the root if it doesn't belong to an album diff --git a/server/src/services/storage.service.spec.ts b/server/src/services/storage.service.spec.ts index 2d28489fae..0e051f2642 100644 --- a/server/src/services/storage.service.spec.ts +++ b/server/src/services/storage.service.spec.ts @@ -22,7 +22,7 @@ describe(StorageService.name, () => { await expect(sut.onBootstrap()).resolves.toBeUndefined(); - expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, { + expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.SystemFlags, { mountChecks: { backups: true, 'encoded-video': true, @@ -60,7 +60,7 @@ describe(StorageService.name, () => { await expect(sut.onBootstrap()).resolves.toBeUndefined(); - expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, { + expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.SystemFlags, { mountChecks: { backups: true, 'encoded-video': true, diff --git a/server/src/services/storage.service.ts b/server/src/services/storage.service.ts index e9ca10f08a..d8804665da 100644 --- a/server/src/services/storage.service.ts +++ b/server/src/services/storage.service.ts @@ -17,7 +17,7 @@ export class StorageService extends BaseService { await this.databaseRepository.withLock(DatabaseLock.SystemFileMounts, async () => { const flags = - (await this.systemMetadataRepository.get(SystemMetadataKey.SYSTEM_FLAGS)) || + (await this.systemMetadataRepository.get(SystemMetadataKey.SystemFlags)) || ({ mountChecks: {} } as SystemFlags); if (!flags.mountChecks) { @@ -46,7 +46,7 @@ export class StorageService extends BaseService { } if (updated) { - await this.systemMetadataRepository.set(SystemMetadataKey.SYSTEM_FLAGS, flags); + await this.systemMetadataRepository.set(SystemMetadataKey.SystemFlags, flags); this.logger.log('Successfully enabled system mount folders checks'); } @@ -62,8 +62,8 @@ export class StorageService extends BaseService { }); } - @OnJob({ name: JobName.DELETE_FILES, queue: QueueName.BACKGROUND_TASK }) - async handleDeleteFiles(job: JobOf): Promise { + @OnJob({ name: JobName.DeleteFiles, queue: QueueName.BackgroundTask }) + async handleDeleteFiles(job: JobOf): Promise { const { files } = job; // TODO: one job per file @@ -79,7 +79,7 @@ export class StorageService extends BaseService { } } - return JobStatus.SUCCESS; + return JobStatus.Success; } private async verifyReadAccess(folder: StorageFolder) { diff --git a/server/src/services/sync.service.ts b/server/src/services/sync.service.ts index 9779498d70..4463ab0d76 100644 --- a/server/src/services/sync.service.ts +++ b/server/src/services/sync.service.ts @@ -640,7 +640,7 @@ export class SyncService extends BaseService { async getFullSync(auth: AuthDto, dto: AssetFullSyncDto): Promise { // mobile implementation is faster if this is a single id const userId = dto.userId || auth.user.id; - await this.requireAccess({ auth, permission: Permission.TIMELINE_READ, ids: [userId] }); + await this.requireAccess({ auth, permission: Permission.TimelineRead, ids: [userId] }); const assets = await this.assetRepository.getAllForUserFullSync({ ownerId: userId, updatedUntil: dto.updatedUntil, @@ -664,7 +664,7 @@ export class SyncService extends BaseService { return FULL_SYNC; } - await this.requireAccess({ auth, permission: Permission.TIMELINE_READ, ids: dto.userIds }); + await this.requireAccess({ auth, permission: Permission.TimelineRead, ids: dto.userIds }); const limit = 10_000; const upserted = await this.assetRepository.getChangedDeltaSync({ limit, updatedAfter: dto.updatedAfter, userIds }); @@ -676,8 +676,8 @@ export class SyncService extends BaseService { const deleted = await this.auditRepository.getAfter(dto.updatedAfter, { userIds, - entityType: EntityType.ASSET, - action: DatabaseAction.DELETE, + entityType: EntityType.Asset, + action: DatabaseAction.Delete, }); const result = { @@ -686,7 +686,7 @@ export class SyncService extends BaseService { // do not return archived assets for partner users .filter( (a) => - a.ownerId === auth.user.id || (a.ownerId !== auth.user.id && a.visibility === AssetVisibility.TIMELINE), + a.ownerId === auth.user.id || (a.ownerId !== auth.user.id && a.visibility === AssetVisibility.Timeline), ) .map((a) => mapAsset(a, { diff --git a/server/src/services/system-config.service.spec.ts b/server/src/services/system-config.service.spec.ts index 582c50ed8a..20127bab15 100644 --- a/server/src/services/system-config.service.spec.ts +++ b/server/src/services/system-config.service.spec.ts @@ -9,7 +9,7 @@ import { OAuthTokenEndpointAuthMethod, QueueName, ToneMapping, - TranscodeHWAccel, + TranscodeHardwareAcceleration, TranscodePolicy, VideoCodec, VideoContainer, @@ -28,17 +28,17 @@ const partialConfig = { const updatedConfig = Object.freeze({ job: { - [QueueName.BACKGROUND_TASK]: { concurrency: 5 }, - [QueueName.SMART_SEARCH]: { concurrency: 2 }, - [QueueName.METADATA_EXTRACTION]: { concurrency: 5 }, - [QueueName.FACE_DETECTION]: { concurrency: 2 }, - [QueueName.SEARCH]: { concurrency: 5 }, - [QueueName.SIDECAR]: { concurrency: 5 }, - [QueueName.LIBRARY]: { concurrency: 5 }, - [QueueName.MIGRATION]: { concurrency: 5 }, - [QueueName.THUMBNAIL_GENERATION]: { concurrency: 3 }, - [QueueName.VIDEO_CONVERSION]: { concurrency: 1 }, - [QueueName.NOTIFICATION]: { concurrency: 5 }, + [QueueName.BackgroundTask]: { concurrency: 5 }, + [QueueName.SmartSearch]: { concurrency: 2 }, + [QueueName.MetadataExtraction]: { concurrency: 5 }, + [QueueName.FaceDetection]: { concurrency: 2 }, + [QueueName.Search]: { concurrency: 5 }, + [QueueName.Sidecar]: { concurrency: 5 }, + [QueueName.Library]: { concurrency: 5 }, + [QueueName.Migration]: { concurrency: 5 }, + [QueueName.ThumbnailGeneration]: { concurrency: 3 }, + [QueueName.VideoConversion]: { concurrency: 1 }, + [QueueName.Notification]: { concurrency: 5 }, }, backup: { database: { @@ -51,28 +51,28 @@ const updatedConfig = Object.freeze({ crf: 30, threads: 0, preset: 'ultrafast', - targetAudioCodec: AudioCodec.AAC, - acceptedAudioCodecs: [AudioCodec.AAC, AudioCodec.MP3, AudioCodec.LIBOPUS, AudioCodec.PCMS16LE], + targetAudioCodec: AudioCodec.Aac, + acceptedAudioCodecs: [AudioCodec.Aac, AudioCodec.Mp3, AudioCodec.LibOpus, AudioCodec.PcmS16le], targetResolution: '720', targetVideoCodec: VideoCodec.H264, acceptedVideoCodecs: [VideoCodec.H264], - acceptedContainers: [VideoContainer.MOV, VideoContainer.OGG, VideoContainer.WEBM], + acceptedContainers: [VideoContainer.Mov, VideoContainer.Ogg, VideoContainer.Webm], maxBitrate: '0', bframes: -1, refs: 0, gopSize: 0, temporalAQ: false, - cqMode: CQMode.AUTO, + cqMode: CQMode.Auto, twoPass: false, preferredHwDevice: 'auto', - transcode: TranscodePolicy.REQUIRED, - accel: TranscodeHWAccel.DISABLED, + transcode: TranscodePolicy.Required, + accel: TranscodeHardwareAcceleration.Disabled, accelDecode: false, - tonemap: ToneMapping.HABLE, + tonemap: ToneMapping.Hable, }, logging: { enabled: true, - level: LogLevel.LOG, + level: LogLevel.Log, }, metadata: { faces: { @@ -128,7 +128,7 @@ const updatedConfig = Object.freeze({ scope: 'openid email profile', signingAlgorithm: 'RS256', profileSigningAlgorithm: 'none', - tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod.CLIENT_SECRET_POST, + tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod.ClientSecretPost, timeout: 30_000, storageLabelClaim: 'preferred_username', storageQuotaClaim: 'immich_quota', @@ -150,15 +150,15 @@ const updatedConfig = Object.freeze({ image: { thumbnail: { size: 250, - format: ImageFormat.WEBP, + format: ImageFormat.Webp, quality: 80, }, preview: { size: 1440, - format: ImageFormat.JPEG, + format: ImageFormat.Jpeg, quality: 80, }, - fullsize: { enabled: false, format: ImageFormat.JPEG, quality: 80 }, + fullsize: { enabled: false, format: ImageFormat.Jpeg, quality: 80 }, colorspace: Colorspace.P3, extractEmbedded: false, }, diff --git a/server/src/services/system-metadata.service.spec.ts b/server/src/services/system-metadata.service.spec.ts index a8d6c0cdcc..f5bdcde7b4 100644 --- a/server/src/services/system-metadata.service.spec.ts +++ b/server/src/services/system-metadata.service.spec.ts @@ -30,12 +30,12 @@ describe(SystemMetadataService.name, () => { describe('updateAdminOnboarding', () => { it('should update isOnboarded to true', async () => { await expect(sut.updateAdminOnboarding({ isOnboarded: true })).resolves.toBeUndefined(); - expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.ADMIN_ONBOARDING, { isOnboarded: true }); + expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.AdminOnboarding, { isOnboarded: true }); }); it('should update isOnboarded to false', async () => { await expect(sut.updateAdminOnboarding({ isOnboarded: false })).resolves.toBeUndefined(); - expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.ADMIN_ONBOARDING, { isOnboarded: false }); + expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.AdminOnboarding, { isOnboarded: false }); }); }); diff --git a/server/src/services/system-metadata.service.ts b/server/src/services/system-metadata.service.ts index 750e6b1d0b..30af715379 100644 --- a/server/src/services/system-metadata.service.ts +++ b/server/src/services/system-metadata.service.ts @@ -11,23 +11,23 @@ import { BaseService } from 'src/services/base.service'; @Injectable() export class SystemMetadataService extends BaseService { async getAdminOnboarding(): Promise { - const value = await this.systemMetadataRepository.get(SystemMetadataKey.ADMIN_ONBOARDING); + const value = await this.systemMetadataRepository.get(SystemMetadataKey.AdminOnboarding); return { isOnboarded: false, ...value }; } async updateAdminOnboarding(dto: AdminOnboardingUpdateDto): Promise { - await this.systemMetadataRepository.set(SystemMetadataKey.ADMIN_ONBOARDING, { + await this.systemMetadataRepository.set(SystemMetadataKey.AdminOnboarding, { isOnboarded: dto.isOnboarded, }); } async getReverseGeocodingState(): Promise { - const value = await this.systemMetadataRepository.get(SystemMetadataKey.REVERSE_GEOCODING_STATE); + const value = await this.systemMetadataRepository.get(SystemMetadataKey.ReverseGeocodingState); return { lastUpdate: null, lastImportFileName: null, ...value }; } async getVersionCheckState(): Promise { - const value = await this.systemMetadataRepository.get(SystemMetadataKey.VERSION_CHECK_STATE); + const value = await this.systemMetadataRepository.get(SystemMetadataKey.VersionCheckState); return { checkedAt: null, releaseVersion: null, ...value }; } } diff --git a/server/src/services/tag.service.spec.ts b/server/src/services/tag.service.spec.ts index 70507ab433..6699c61970 100644 --- a/server/src/services/tag.service.spec.ts +++ b/server/src/services/tag.service.spec.ts @@ -278,7 +278,7 @@ describe(TagService.name, () => { it('should delete empty tags', async () => { mocks.tag.deleteEmptyTags.mockResolvedValue(); - await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success); expect(mocks.tag.deleteEmptyTags).toHaveBeenCalled(); }); diff --git a/server/src/services/tag.service.ts b/server/src/services/tag.service.ts index e975fc3980..2fae4b55d0 100644 --- a/server/src/services/tag.service.ts +++ b/server/src/services/tag.service.ts @@ -26,7 +26,7 @@ export class TagService extends BaseService { } async get(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.TAG_READ, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.TagRead, ids: [id] }); const tag = await this.findOrFail(id); return mapTag(tag); } @@ -34,7 +34,7 @@ export class TagService extends BaseService { async create(auth: AuthDto, dto: TagCreateDto) { let parent; if (dto.parentId) { - await this.requireAccess({ auth, permission: Permission.TAG_READ, ids: [dto.parentId] }); + await this.requireAccess({ auth, permission: Permission.TagRead, ids: [dto.parentId] }); parent = await this.tagRepository.get(dto.parentId); if (!parent) { throw new BadRequestException('Tag not found'); @@ -55,7 +55,7 @@ export class TagService extends BaseService { } async update(auth: AuthDto, id: string, dto: TagUpdateDto): Promise { - await this.requireAccess({ auth, permission: Permission.TAG_UPDATE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.TagUpdate, ids: [id] }); const { color } = dto; const tag = await this.tagRepository.update(id, { color }); @@ -68,7 +68,7 @@ export class TagService extends BaseService { } async remove(auth: AuthDto, id: string): Promise { - await this.requireAccess({ auth, permission: Permission.TAG_DELETE, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.TagDelete, ids: [id] }); // TODO sync tag changes for affected assets @@ -77,8 +77,8 @@ export class TagService extends BaseService { async bulkTagAssets(auth: AuthDto, dto: TagBulkAssetsDto): Promise { const [tagIds, assetIds] = await Promise.all([ - this.checkAccess({ auth, permission: Permission.TAG_ASSET, ids: dto.tagIds }), - this.checkAccess({ auth, permission: Permission.ASSET_UPDATE, ids: dto.assetIds }), + this.checkAccess({ auth, permission: Permission.TagAsset, ids: dto.tagIds }), + this.checkAccess({ auth, permission: Permission.AssetUpdate, ids: dto.assetIds }), ]); const items: Insertable[] = []; @@ -97,7 +97,7 @@ export class TagService extends BaseService { } async addAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise { - await this.requireAccess({ auth, permission: Permission.TAG_ASSET, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.TagAsset, ids: [id] }); const results = await addAssets( auth, @@ -115,12 +115,12 @@ export class TagService extends BaseService { } async removeAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise { - await this.requireAccess({ auth, permission: Permission.TAG_ASSET, ids: [id] }); + await this.requireAccess({ auth, permission: Permission.TagAsset, ids: [id] }); const results = await removeAssets( auth, { access: this.accessRepository, bulk: this.tagRepository }, - { parentId: id, assetIds: dto.ids, canAlwaysRemove: Permission.TAG_DELETE }, + { parentId: id, assetIds: dto.ids, canAlwaysRemove: Permission.TagDelete }, ); for (const { id: assetId, success } of results) { @@ -132,10 +132,10 @@ export class TagService extends BaseService { return results; } - @OnJob({ name: JobName.TAG_CLEANUP, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.TagCleanup, queue: QueueName.BackgroundTask }) async handleTagCleanup() { await this.tagRepository.deleteEmptyTags(); - return JobStatus.SUCCESS; + return JobStatus.Success; } private async findOrFail(id: string) { diff --git a/server/src/services/timeline.service.spec.ts b/server/src/services/timeline.service.spec.ts index 1669b1eac7..11df30a7d4 100644 --- a/server/src/services/timeline.service.spec.ts +++ b/server/src/services/timeline.service.spec.ts @@ -49,7 +49,7 @@ describe(TimelineService.name, () => { await expect( sut.getTimeBucket(authStub.admin, { timeBucket: 'bucket', - visibility: AssetVisibility.ARCHIVE, + visibility: AssetVisibility.Archive, userId: authStub.admin.user.id, }), ).resolves.toEqual(json); @@ -57,7 +57,7 @@ describe(TimelineService.name, () => { 'bucket', expect.objectContaining({ timeBucket: 'bucket', - visibility: AssetVisibility.ARCHIVE, + visibility: AssetVisibility.Archive, userIds: [authStub.admin.user.id], }), ); @@ -71,14 +71,14 @@ describe(TimelineService.name, () => { await expect( sut.getTimeBucket(authStub.admin, { timeBucket: 'bucket', - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, userId: authStub.admin.user.id, withPartners: true, }), ).resolves.toEqual(json); expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', { timeBucket: 'bucket', - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, withPartners: true, userIds: [authStub.admin.user.id], }); @@ -126,7 +126,7 @@ describe(TimelineService.name, () => { await expect( sut.getTimeBucket(authStub.admin, { timeBucket: 'bucket', - visibility: AssetVisibility.ARCHIVE, + visibility: AssetVisibility.Archive, withPartners: true, userId: authStub.admin.user.id, }), diff --git a/server/src/services/timeline.service.ts b/server/src/services/timeline.service.ts index abd536a97e..d8cac3a205 100644 --- a/server/src/services/timeline.service.ts +++ b/server/src/services/timeline.service.ts @@ -45,29 +45,29 @@ export class TimelineService extends BaseService { } private async timeBucketChecks(auth: AuthDto, dto: TimeBucketDto) { - if (dto.visibility === AssetVisibility.LOCKED) { + if (dto.visibility === AssetVisibility.Locked) { requireElevatedPermission(auth); } if (dto.albumId) { - await this.requireAccess({ auth, permission: Permission.ALBUM_READ, ids: [dto.albumId] }); + await this.requireAccess({ auth, permission: Permission.AlbumRead, ids: [dto.albumId] }); } else { dto.userId = dto.userId || auth.user.id; } if (dto.userId) { - await this.requireAccess({ auth, permission: Permission.TIMELINE_READ, ids: [dto.userId] }); - if (dto.visibility === AssetVisibility.ARCHIVE) { - await this.requireAccess({ auth, permission: Permission.ARCHIVE_READ, ids: [dto.userId] }); + await this.requireAccess({ auth, permission: Permission.TimelineRead, ids: [dto.userId] }); + if (dto.visibility === AssetVisibility.Archive) { + await this.requireAccess({ auth, permission: Permission.ArchiveRead, ids: [dto.userId] }); } } if (dto.tagId) { - await this.requireAccess({ auth, permission: Permission.TAG_READ, ids: [dto.tagId] }); + await this.requireAccess({ auth, permission: Permission.TagRead, ids: [dto.tagId] }); } if (dto.withPartners) { - const requestedArchived = dto.visibility === AssetVisibility.ARCHIVE || dto.visibility === undefined; + const requestedArchived = dto.visibility === AssetVisibility.Archive || dto.visibility === undefined; const requestedFavorite = dto.isFavorite === true || dto.isFavorite === false; const requestedTrash = dto.isTrashed === true; diff --git a/server/src/services/trash.service.spec.ts b/server/src/services/trash.service.spec.ts index b3bee90815..f742ce8757 100644 --- a/server/src/services/trash.service.spec.ts +++ b/server/src/services/trash.service.spec.ts @@ -77,24 +77,24 @@ describe(TrashService.name, () => { mocks.trash.empty.mockResolvedValue(1); await expect(sut.empty(authStub.user1)).resolves.toEqual({ count: 1 }); expect(mocks.trash.empty).toHaveBeenCalledWith('user-id'); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_TRASH_EMPTY, data: {} }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueTrashEmpty, data: {} }); }); }); describe('onAssetsDelete', () => { it('should queue the empty trash job', async () => { await expect(sut.onAssetsDelete()).resolves.toBeUndefined(); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_TRASH_EMPTY, data: {} }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueTrashEmpty, data: {} }); }); }); describe('handleQueueEmptyTrash', () => { it('should queue asset delete jobs', async () => { mocks.trash.getDeletedIds.mockReturnValue(makeAssetIdStream(1)); - await expect(sut.handleQueueEmptyTrash()).resolves.toEqual(JobStatus.SUCCESS); + await expect(sut.handleQueueEmptyTrash()).resolves.toEqual(JobStatus.Success); expect(mocks.job.queueAll).toHaveBeenCalledWith([ { - name: JobName.ASSET_DELETION, + name: JobName.AssetDeletion, data: { id: 'asset-1', deleteOnDisk: true }, }, ]); diff --git a/server/src/services/trash.service.ts b/server/src/services/trash.service.ts index a7447ab890..858feacd39 100644 --- a/server/src/services/trash.service.ts +++ b/server/src/services/trash.service.ts @@ -15,7 +15,7 @@ export class TrashService extends BaseService { return { count: 0 }; } - await this.requireAccess({ auth, permission: Permission.ASSET_DELETE, ids }); + await this.requireAccess({ auth, permission: Permission.AssetDelete, ids }); await this.trashRepository.restoreAll(ids); await this.eventRepository.emit('AssetRestoreAll', { assetIds: ids, userId: auth.user.id }); @@ -35,17 +35,17 @@ export class TrashService extends BaseService { async empty(auth: AuthDto): Promise { const count = await this.trashRepository.empty(auth.user.id); if (count > 0) { - await this.jobRepository.queue({ name: JobName.QUEUE_TRASH_EMPTY, data: {} }); + await this.jobRepository.queue({ name: JobName.QueueTrashEmpty, data: {} }); } return { count }; } @OnEvent({ name: 'AssetDeleteAll' }) async onAssetsDelete() { - await this.jobRepository.queue({ name: JobName.QUEUE_TRASH_EMPTY, data: {} }); + await this.jobRepository.queue({ name: JobName.QueueTrashEmpty, data: {} }); } - @OnJob({ name: JobName.QUEUE_TRASH_EMPTY, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.QueueTrashEmpty, queue: QueueName.BackgroundTask }) async handleQueueEmptyTrash() { const assets = this.trashRepository.getDeletedIds(); @@ -67,14 +67,14 @@ export class TrashService extends BaseService { this.logger.log(`Queued ${count} asset(s) for deletion from the trash`); - return JobStatus.SUCCESS; + return JobStatus.Success; } private async handleBatch(ids: string[]) { this.logger.debug(`Queueing ${ids.length} asset(s) for deletion from the trash`); await this.jobRepository.queueAll( ids.map((assetId) => ({ - name: JobName.ASSET_DELETION, + name: JobName.AssetDeletion, data: { id: assetId, deleteOnDisk: true, diff --git a/server/src/services/user-admin.service.spec.ts b/server/src/services/user-admin.service.spec.ts index 85cbb8238a..8854ade8e3 100644 --- a/server/src/services/user-admin.service.spec.ts +++ b/server/src/services/user-admin.service.spec.ts @@ -140,7 +140,7 @@ describe(UserAdminService.name, () => { await expect(sut.delete(authStub.admin, userStub.user1.id, {})).resolves.toEqual(mapUserAdmin(userStub.user1)); expect(mocks.user.update).toHaveBeenCalledWith(userStub.user1.id, { - status: UserStatus.DELETED, + status: UserStatus.Deleted, deletedAt: expect.any(Date), }); }); @@ -154,11 +154,11 @@ describe(UserAdminService.name, () => { ); expect(mocks.user.update).toHaveBeenCalledWith(userStub.user1.id, { - status: UserStatus.REMOVING, + status: UserStatus.Removing, deletedAt: expect.any(Date), }); expect(mocks.job.queue).toHaveBeenCalledWith({ - name: JobName.USER_DELETION, + name: JobName.UserDeletion, data: { id: userStub.user1.id, force: true }, }); }); diff --git a/server/src/services/user-admin.service.ts b/server/src/services/user-admin.service.ts index 180471bb44..91e21b6bb5 100644 --- a/server/src/services/user-admin.service.ts +++ b/server/src/services/user-admin.service.ts @@ -100,11 +100,11 @@ export class UserAdminService extends BaseService { await this.albumRepository.softDeleteAll(id); - const status = force ? UserStatus.REMOVING : UserStatus.DELETED; + const status = force ? UserStatus.Removing : UserStatus.Deleted; const user = await this.userRepository.update(id, { status, deletedAt: new Date() }); if (force) { - await this.jobRepository.queue({ name: JobName.USER_DELETION, data: { id: user.id, force } }); + await this.jobRepository.queue({ name: JobName.UserDeletion, data: { id: user.id, force } }); } return mapUserAdmin(user); @@ -134,7 +134,7 @@ export class UserAdminService extends BaseService { const newPreferences = mergePreferences(getPreferences(metadata), dto); await this.userRepository.upsertMetadata(id, { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: getPreferencesPartial(newPreferences), }); diff --git a/server/src/services/user.service.spec.ts b/server/src/services/user.service.spec.ts index 4ee92fc3d3..6ae6bc85b6 100644 --- a/server/src/services/user.service.spec.ts +++ b/server/src/services/user.service.spec.ts @@ -122,7 +122,7 @@ describe(UserService.name, () => { await sut.createProfileImage(authStub.admin, file); - expect(mocks.job.queue.mock.calls).toEqual([[{ name: JobName.DELETE_FILES, data: { files } }]]); + expect(mocks.job.queue.mock.calls).toEqual([[{ name: JobName.DeleteFiles, data: { files } }]]); }); it('should not delete the profile image if it has not been set', async () => { @@ -156,7 +156,7 @@ describe(UserService.name, () => { await sut.deleteProfileImage(authStub.admin); - expect(mocks.job.queue.mock.calls).toEqual([[{ name: JobName.DELETE_FILES, data: { files } }]]); + expect(mocks.job.queue.mock.calls).toEqual([[{ name: JobName.DeleteFiles, data: { files } }]]); }); }); @@ -185,7 +185,7 @@ describe(UserService.name, () => { new ImmichFileResponse({ path: '/path/to/profile.jpg', contentType: 'image/jpeg', - cacheControl: CacheControl.NONE, + cacheControl: CacheControl.None, }), ); @@ -211,7 +211,7 @@ describe(UserService.name, () => { await sut.handleUserDeleteCheck(); expect(mocks.user.getDeletedAfter).toHaveBeenCalled(); - expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.USER_DELETION, data: { id: user.id } }]); + expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.UserDeletion, data: { id: user.id } }]); }); }); @@ -266,7 +266,7 @@ describe(UserService.name, () => { await sut.setLicense(authStub.user1, license); expect(mocks.user.upsertMetadata).toHaveBeenCalledWith(authStub.user1.user.id, { - key: UserMetadataKey.LICENSE, + key: UserMetadataKey.License, value: expect.any(Object), }); }); @@ -279,7 +279,7 @@ describe(UserService.name, () => { await sut.setLicense(authStub.user1, license); expect(mocks.user.upsertMetadata).toHaveBeenCalledWith(authStub.user1.user.id, { - key: UserMetadataKey.LICENSE, + key: UserMetadataKey.License, value: expect.any(Object), }); }); diff --git a/server/src/services/user.service.ts b/server/src/services/user.service.ts index 78f49fd7ae..4a65e4ab9f 100644 --- a/server/src/services/user.service.ts +++ b/server/src/services/user.service.ts @@ -78,7 +78,7 @@ export class UserService extends BaseService { const updated = mergePreferences(getPreferences(metadata), dto); await this.userRepository.upsertMetadata(auth.user.id, { - key: UserMetadataKey.PREFERENCES, + key: UserMetadataKey.Preferences, value: getPreferencesPartial(updated), }); @@ -99,7 +99,7 @@ export class UserService extends BaseService { }); if (oldpath !== '') { - await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [oldpath] } }); + await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files: [oldpath] } }); } return { @@ -115,7 +115,7 @@ export class UserService extends BaseService { throw new BadRequestException("Can't delete a missing profile Image"); } await this.userRepository.update(auth.user.id, { profileImagePath: '', profileChangedAt: new Date() }); - await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [user.profileImagePath] } }); + await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files: [user.profileImagePath] } }); } async getProfileImage(id: string): Promise { @@ -127,7 +127,7 @@ export class UserService extends BaseService { return new ImmichFileResponse({ path: user.profileImagePath, contentType: 'image/jpeg', - cacheControl: CacheControl.NONE, + cacheControl: CacheControl.None, }); } @@ -135,7 +135,7 @@ export class UserService extends BaseService { const metadata = await this.userRepository.getMetadata(auth.user.id); const license = metadata.find( - (item): item is UserMetadataItem => item.key === UserMetadataKey.LICENSE, + (item): item is UserMetadataItem => item.key === UserMetadataKey.License, ); if (!license) { throw new NotFoundException(); @@ -144,7 +144,7 @@ export class UserService extends BaseService { } async deleteLicense({ user }: AuthDto): Promise { - await this.userRepository.deleteMetadata(user.id, UserMetadataKey.LICENSE); + await this.userRepository.deleteMetadata(user.id, UserMetadataKey.License); } async setLicense(auth: AuthDto, license: LicenseKeyDto): Promise { @@ -173,7 +173,7 @@ export class UserService extends BaseService { const activatedAt = new Date(); await this.userRepository.upsertMetadata(auth.user.id, { - key: UserMetadataKey.LICENSE, + key: UserMetadataKey.License, value: { ...license, activatedAt: activatedAt.toISOString() }, }); @@ -184,7 +184,7 @@ export class UserService extends BaseService { const metadata = await this.userRepository.getMetadata(auth.user.id); const onboardingData = metadata.find( - (item): item is UserMetadataItem => item.key === UserMetadataKey.ONBOARDING, + (item): item is UserMetadataItem => item.key === UserMetadataKey.Onboarding, )?.value; if (!onboardingData) { @@ -197,12 +197,12 @@ export class UserService extends BaseService { } async deleteOnboarding({ user }: AuthDto): Promise { - await this.userRepository.deleteMetadata(user.id, UserMetadataKey.ONBOARDING); + await this.userRepository.deleteMetadata(user.id, UserMetadataKey.Onboarding); } async setOnboarding(auth: AuthDto, onboarding: OnboardingDto): Promise { await this.userRepository.upsertMetadata(auth.user.id, { - key: UserMetadataKey.ONBOARDING, + key: UserMetadataKey.Onboarding, value: { isOnboarded: onboarding.isOnboarded, }, @@ -213,42 +213,42 @@ export class UserService extends BaseService { }; } - @OnJob({ name: JobName.USER_SYNC_USAGE, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.userSyncUsage, queue: QueueName.BackgroundTask }) async handleUserSyncUsage(): Promise { await this.userRepository.syncUsage(); - return JobStatus.SUCCESS; + return JobStatus.Success; } - @OnJob({ name: JobName.USER_DELETE_CHECK, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.UserDeleteCheck, queue: QueueName.BackgroundTask }) async handleUserDeleteCheck(): Promise { const config = await this.getConfig({ withCache: false }); const users = await this.userRepository.getDeletedAfter(DateTime.now().minus({ days: config.user.deleteDelay })); - await this.jobRepository.queueAll(users.map((user) => ({ name: JobName.USER_DELETION, data: { id: user.id } }))); - return JobStatus.SUCCESS; + await this.jobRepository.queueAll(users.map((user) => ({ name: JobName.UserDeletion, data: { id: user.id } }))); + return JobStatus.Success; } - @OnJob({ name: JobName.USER_DELETION, queue: QueueName.BACKGROUND_TASK }) - async handleUserDelete({ id, force }: JobOf): Promise { + @OnJob({ name: JobName.UserDeletion, queue: QueueName.BackgroundTask }) + async handleUserDelete({ id, force }: JobOf): Promise { const config = await this.getConfig({ withCache: false }); const user = await this.userRepository.get(id, { withDeleted: true }); if (!user) { - return JobStatus.FAILED; + return JobStatus.Failed; } // just for extra protection here if (!force && !this.isReadyForDeletion(user, config.user.deleteDelay)) { this.logger.warn(`Skipped user that was not ready for deletion: id=${id}`); - return JobStatus.SKIPPED; + return JobStatus.Skipped; } this.logger.log(`Deleting user: ${user.id}`); const folders = [ StorageCore.getLibraryFolder(user), - StorageCore.getFolderLocation(StorageFolder.UPLOAD, user.id), - StorageCore.getFolderLocation(StorageFolder.PROFILE, user.id), - StorageCore.getFolderLocation(StorageFolder.THUMBNAILS, user.id), - StorageCore.getFolderLocation(StorageFolder.ENCODED_VIDEO, user.id), + StorageCore.getFolderLocation(StorageFolder.Upload, user.id), + StorageCore.getFolderLocation(StorageFolder.Profile, user.id), + StorageCore.getFolderLocation(StorageFolder.Thumbnails, user.id), + StorageCore.getFolderLocation(StorageFolder.EncodedVideo, user.id), ]; for (const folder of folders) { @@ -260,7 +260,7 @@ export class UserService extends BaseService { await this.albumRepository.deleteAll(user.id); await this.userRepository.delete(user, true); - return JobStatus.SUCCESS; + return JobStatus.Success; } private isReadyForDeletion(user: { id: string; deletedAt?: Date | null }, deleteDelay: number): boolean { diff --git a/server/src/services/version.service.spec.ts b/server/src/services/version.service.spec.ts index a83d9f85b6..73794275ea 100644 --- a/server/src/services/version.service.spec.ts +++ b/server/src/services/version.service.spec.ts @@ -72,18 +72,18 @@ describe(VersionService.name, () => { describe('handQueueVersionCheck', () => { it('should queue a version check job', async () => { await expect(sut.handleQueueVersionCheck()).resolves.toBeUndefined(); - expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.VERSION_CHECK, data: {} }); + expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.VersionCheck, data: {} }); }); }); describe('handVersionCheck', () => { beforeEach(() => { - mocks.config.getEnv.mockReturnValue(mockEnvData({ environment: ImmichEnvironment.PRODUCTION })); + mocks.config.getEnv.mockReturnValue(mockEnvData({ environment: ImmichEnvironment.Production })); }); it('should not run in dev mode', async () => { - mocks.config.getEnv.mockReturnValue(mockEnvData({ environment: ImmichEnvironment.DEVELOPMENT })); - await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.SKIPPED); + mocks.config.getEnv.mockReturnValue(mockEnvData({ environment: ImmichEnvironment.Development })); + await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Skipped); }); it('should not run if the last check was < 60 minutes ago', async () => { @@ -91,12 +91,12 @@ describe(VersionService.name, () => { checkedAt: DateTime.utc().minus({ minutes: 5 }).toISO(), releaseVersion: '1.0.0', }); - await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.SKIPPED); + await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Skipped); }); it('should not run if version check is disabled', async () => { mocks.systemMetadata.get.mockResolvedValue({ newVersionCheck: { enabled: false } }); - await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.SKIPPED); + await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Skipped); }); it('should run if it has been > 60 minutes', async () => { @@ -105,7 +105,7 @@ describe(VersionService.name, () => { checkedAt: DateTime.utc().minus({ minutes: 65 }).toISO(), releaseVersion: '1.0.0', }); - await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.SUCCESS); + await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Success); expect(mocks.systemMetadata.set).toHaveBeenCalled(); expect(mocks.logger.log).toHaveBeenCalled(); expect(mocks.event.clientBroadcast).toHaveBeenCalled(); @@ -113,8 +113,8 @@ describe(VersionService.name, () => { it('should not notify if the version is equal', async () => { mocks.serverInfo.getGitHubRelease.mockResolvedValue(mockRelease(serverVersion.toString())); - await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.SUCCESS); - expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.VERSION_CHECK_STATE, { + await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Success); + expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.VersionCheckState, { checkedAt: expect.any(String), releaseVersion: serverVersion.toString(), }); @@ -123,7 +123,7 @@ describe(VersionService.name, () => { it('should handle a github error', async () => { mocks.serverInfo.getGitHubRelease.mockRejectedValue(new Error('GitHub is down')); - await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.FAILED); + await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Failed); expect(mocks.systemMetadata.set).not.toHaveBeenCalled(); expect(mocks.event.clientBroadcast).not.toHaveBeenCalled(); expect(mocks.logger.warn).toHaveBeenCalled(); diff --git a/server/src/services/version.service.ts b/server/src/services/version.service.ts index 51d31b623f..5d2f87c2bf 100644 --- a/server/src/services/version.service.ts +++ b/server/src/services/version.service.ts @@ -41,7 +41,7 @@ export class VersionService extends BaseService { const needsNewMemories = semver.lt(previousVersion, '1.129.0'); if (needsNewMemories) { - await this.jobRepository.queue({ name: JobName.MEMORIES_CREATE }); + await this.jobRepository.queue({ name: JobName.MemoriesCreate }); } } }); @@ -56,31 +56,31 @@ export class VersionService extends BaseService { } async handleQueueVersionCheck() { - await this.jobRepository.queue({ name: JobName.VERSION_CHECK, data: {} }); + await this.jobRepository.queue({ name: JobName.VersionCheck, data: {} }); } - @OnJob({ name: JobName.VERSION_CHECK, queue: QueueName.BACKGROUND_TASK }) + @OnJob({ name: JobName.VersionCheck, queue: QueueName.BackgroundTask }) async handleVersionCheck(): Promise { try { this.logger.debug('Running version check'); const { environment } = this.configRepository.getEnv(); - if (environment === ImmichEnvironment.DEVELOPMENT) { - return JobStatus.SKIPPED; + if (environment === ImmichEnvironment.Development) { + return JobStatus.Skipped; } const { newVersionCheck } = await this.getConfig({ withCache: true }); if (!newVersionCheck.enabled) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } - const versionCheck = await this.systemMetadataRepository.get(SystemMetadataKey.VERSION_CHECK_STATE); + const versionCheck = await this.systemMetadataRepository.get(SystemMetadataKey.VersionCheckState); if (versionCheck?.checkedAt) { const lastUpdate = DateTime.fromISO(versionCheck.checkedAt); const elapsedTime = DateTime.now().diff(lastUpdate).as('minutes'); // check once per hour (max) if (elapsedTime < 60) { - return JobStatus.SKIPPED; + return JobStatus.Skipped; } } @@ -88,7 +88,7 @@ export class VersionService extends BaseService { await this.serverInfoRepository.getGitHubRelease(); const metadata: VersionCheckMetadata = { checkedAt: DateTime.utc().toISO(), releaseVersion }; - await this.systemMetadataRepository.set(SystemMetadataKey.VERSION_CHECK_STATE, metadata); + await this.systemMetadataRepository.set(SystemMetadataKey.VersionCheckState, metadata); if (semver.gt(releaseVersion, serverVersion)) { this.logger.log(`Found ${releaseVersion}, released at ${new Date(publishedAt).toLocaleString()}`); @@ -96,16 +96,16 @@ export class VersionService extends BaseService { } } catch (error: Error | any) { this.logger.warn(`Unable to run version check: ${error}`, error?.stack); - return JobStatus.FAILED; + return JobStatus.Failed; } - return JobStatus.SUCCESS; + return JobStatus.Success; } @OnEvent({ name: 'WebsocketConnect' }) async onWebsocketConnection({ userId }: ArgOf<'WebsocketConnect'>) { this.eventRepository.clientSend('on_server_version', userId, serverVersion); - const metadata = await this.systemMetadataRepository.get(SystemMetadataKey.VERSION_CHECK_STATE); + const metadata = await this.systemMetadataRepository.get(SystemMetadataKey.VersionCheckState); if (metadata) { this.eventRepository.clientSend('on_new_release', userId, asNotification(metadata)); } diff --git a/server/src/types.ts b/server/src/types.ts index 6776604078..6873613c30 100644 --- a/server/src/types.ts +++ b/server/src/types.ts @@ -161,10 +161,10 @@ export interface VideoInterfaces { export type ConcurrentQueueName = Exclude< QueueName, - | QueueName.STORAGE_TEMPLATE_MIGRATION - | QueueName.FACIAL_RECOGNITION - | QueueName.DUPLICATE_DETECTION - | QueueName.BACKUP_DATABASE + | QueueName.StorageTemplateMigration + | QueueName.FacialRecognition + | QueueName.DuplicateDetection + | QueueName.BackupDatabase >; export type Jobs = { [K in JobItem['name']]: (JobItem & { name: K })['data'] }; @@ -273,96 +273,96 @@ export interface QueueStatus { export type JobItem = // Backups - | { name: JobName.BACKUP_DATABASE; data?: IBaseJob } + | { name: JobName.BackupDatabase; data?: IBaseJob } // Transcoding - | { name: JobName.QUEUE_VIDEO_CONVERSION; data: IBaseJob } - | { name: JobName.VIDEO_CONVERSION; data: IEntityJob } + | { name: JobName.QueueVideoConversion; data: IBaseJob } + | { name: JobName.VideoConversation; data: IEntityJob } // Thumbnails - | { name: JobName.QUEUE_GENERATE_THUMBNAILS; data: IBaseJob } - | { name: JobName.GENERATE_THUMBNAILS; data: IEntityJob } + | { name: JobName.QueueGenerateThumbnails; data: IBaseJob } + | { name: JobName.GenerateThumbnails; data: IEntityJob } // User - | { name: JobName.USER_DELETE_CHECK; data?: IBaseJob } - | { name: JobName.USER_DELETION; data: IEntityJob } - | { name: JobName.USER_SYNC_USAGE; data?: IBaseJob } + | { name: JobName.UserDeleteCheck; data?: IBaseJob } + | { name: JobName.UserDeletion; data: IEntityJob } + | { name: JobName.userSyncUsage; data?: IBaseJob } // Storage Template - | { name: JobName.STORAGE_TEMPLATE_MIGRATION; data?: IBaseJob } - | { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE; data: IEntityJob } + | { name: JobName.StorageTemplateMigration; data?: IBaseJob } + | { name: JobName.StorageTemplateMigrationSingle; data: IEntityJob } // Migration - | { name: JobName.QUEUE_MIGRATION; data?: IBaseJob } - | { name: JobName.MIGRATE_ASSET; data: IEntityJob } - | { name: JobName.MIGRATE_PERSON; data: IEntityJob } + | { name: JobName.QueueMigration; data?: IBaseJob } + | { name: JobName.MigrateAsset; data: IEntityJob } + | { name: JobName.MigratePerson; data: IEntityJob } // Metadata Extraction - | { name: JobName.QUEUE_METADATA_EXTRACTION; data: IBaseJob } - | { name: JobName.METADATA_EXTRACTION; data: IEntityJob } + | { name: JobName.QueueMetadataExtraction; data: IBaseJob } + | { name: JobName.MetadataExtraction; data: IEntityJob } // Notifications - | { name: JobName.NOTIFICATIONS_CLEANUP; data?: IBaseJob } + | { name: JobName.NotificationsCleanup; data?: IBaseJob } // Sidecar Scanning - | { name: JobName.QUEUE_SIDECAR; data: IBaseJob } - | { name: JobName.SIDECAR_DISCOVERY; data: IEntityJob } - | { name: JobName.SIDECAR_SYNC; data: IEntityJob } - | { name: JobName.SIDECAR_WRITE; data: ISidecarWriteJob } + | { name: JobName.QueueSidecar; data: IBaseJob } + | { name: JobName.SidecarDiscovery; data: IEntityJob } + | { name: JobName.SidecarSync; data: IEntityJob } + | { name: JobName.SidecarWrite; data: ISidecarWriteJob } // Facial Recognition - | { name: JobName.QUEUE_FACE_DETECTION; data: IBaseJob } - | { name: JobName.FACE_DETECTION; data: IEntityJob } - | { name: JobName.QUEUE_FACIAL_RECOGNITION; data: INightlyJob } - | { name: JobName.FACIAL_RECOGNITION; data: IDeferrableJob } - | { name: JobName.GENERATE_PERSON_THUMBNAIL; data: IEntityJob } + | { name: JobName.QueueFaceDetection; data: IBaseJob } + | { name: JobName.FaceDetection; data: IEntityJob } + | { name: JobName.QueueFacialRecognition; data: INightlyJob } + | { name: JobName.FacialRecognition; data: IDeferrableJob } + | { name: JobName.GeneratePersonThumbnail; data: IEntityJob } // Smart Search - | { name: JobName.QUEUE_SMART_SEARCH; data: IBaseJob } - | { name: JobName.SMART_SEARCH; data: IEntityJob } - | { name: JobName.QUEUE_TRASH_EMPTY; data?: IBaseJob } + | { name: JobName.QueueSmartSearch; data: IBaseJob } + | { name: JobName.SmartSearch; data: IEntityJob } + | { name: JobName.QueueTrashEmpty; data?: IBaseJob } // Duplicate Detection - | { name: JobName.QUEUE_DUPLICATE_DETECTION; data: IBaseJob } - | { name: JobName.DUPLICATE_DETECTION; data: IEntityJob } + | { name: JobName.QueueDuplicateDetection; data: IBaseJob } + | { name: JobName.DuplicateDetection; data: IEntityJob } // Memories - | { name: JobName.MEMORIES_CLEANUP; data?: IBaseJob } - | { name: JobName.MEMORIES_CREATE; data?: IBaseJob } + | { name: JobName.MemoriesCleanup; data?: IBaseJob } + | { name: JobName.MemoriesCreate; data?: IBaseJob } // Filesystem - | { name: JobName.DELETE_FILES; data: IDeleteFilesJob } + | { name: JobName.DeleteFiles; data: IDeleteFilesJob } // Cleanup - | { name: JobName.CLEAN_OLD_AUDIT_LOGS; data?: IBaseJob } - | { name: JobName.CLEAN_OLD_SESSION_TOKENS; data?: IBaseJob } + | { name: JobName.CleanOldAuditLogs; data?: IBaseJob } + | { name: JobName.CleanOldSessionTokens; data?: IBaseJob } // Tags - | { name: JobName.TAG_CLEANUP; data?: IBaseJob } + | { name: JobName.TagCleanup; data?: IBaseJob } // Asset Deletion - | { name: JobName.PERSON_CLEANUP; data?: IBaseJob } - | { name: JobName.ASSET_DELETION; data: IAssetDeleteJob } - | { name: JobName.ASSET_DELETION_CHECK; data?: IBaseJob } + | { name: JobName.PersonCleanup; data?: IBaseJob } + | { name: JobName.AssetDeletion; data: IAssetDeleteJob } + | { name: JobName.AssetDeletionCheck; data?: IBaseJob } // Library Management - | { name: JobName.LIBRARY_SYNC_FILES; data: ILibraryFileJob } - | { name: JobName.LIBRARY_QUEUE_SYNC_FILES; data: IEntityJob } - | { name: JobName.LIBRARY_QUEUE_SYNC_ASSETS; data: IEntityJob } - | { name: JobName.LIBRARY_SYNC_ASSETS; data: ILibraryBulkIdsJob } - | { name: JobName.LIBRARY_ASSET_REMOVAL; data: ILibraryFileJob } - | { name: JobName.LIBRARY_DELETE; data: IEntityJob } - | { name: JobName.LIBRARY_QUEUE_SCAN_ALL; data?: IBaseJob } - | { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob } + | { name: JobName.LibrarySyncFiles; data: ILibraryFileJob } + | { name: JobName.LibraryQueueSyncFiles; data: IEntityJob } + | { name: JobName.LibraryQueueSyncAssets; data: IEntityJob } + | { name: JobName.LibrarySyncAssets; data: ILibraryBulkIdsJob } + | { name: JobName.LibraryAssetRemoval; data: ILibraryFileJob } + | { name: JobName.LibraryDelete; data: IEntityJob } + | { name: JobName.LibraryQueueScanAll; data?: IBaseJob } + | { name: JobName.LibraryQueueCleanup; data: IBaseJob } // Notification - | { name: JobName.SEND_EMAIL; data: IEmailJob } - | { name: JobName.NOTIFY_ALBUM_INVITE; data: INotifyAlbumInviteJob } - | { name: JobName.NOTIFY_ALBUM_UPDATE; data: INotifyAlbumUpdateJob } - | { name: JobName.NOTIFY_SIGNUP; data: INotifySignupJob } + | { name: JobName.SendMail; data: IEmailJob } + | { name: JobName.NotifyAlbumInvite; data: INotifyAlbumInviteJob } + | { name: JobName.NotifyAlbumUpdate; data: INotifyAlbumUpdateJob } + | { name: JobName.NotifySignup; data: INotifySignupJob } // Version check - | { name: JobName.VERSION_CHECK; data: IBaseJob }; + | { name: JobName.VersionCheck; data: IBaseJob }; export type VectorExtension = (typeof VECTOR_EXTENSIONS)[number]; @@ -442,7 +442,7 @@ export type StorageAsset = { export type OnThisDayData = { year: number }; export interface MemoryData { - [MemoryType.ON_THIS_DAY]: OnThisDayData; + [MemoryType.OnThisDay]: OnThisDayData; } export type VersionCheckMetadata = { checkedAt: string; releaseVersion: string }; @@ -453,14 +453,14 @@ export type MemoriesState = { }; export interface SystemMetadata extends Record> { - [SystemMetadataKey.ADMIN_ONBOARDING]: { isOnboarded: boolean }; - [SystemMetadataKey.FACIAL_RECOGNITION_STATE]: { lastRun?: string }; - [SystemMetadataKey.LICENSE]: { licenseKey: string; activationKey: string; activatedAt: Date }; - [SystemMetadataKey.REVERSE_GEOCODING_STATE]: { lastUpdate?: string; lastImportFileName?: string }; - [SystemMetadataKey.SYSTEM_CONFIG]: DeepPartial; - [SystemMetadataKey.SYSTEM_FLAGS]: DeepPartial; - [SystemMetadataKey.VERSION_CHECK_STATE]: VersionCheckMetadata; - [SystemMetadataKey.MEMORIES_STATE]: MemoriesState; + [SystemMetadataKey.AdminOnboarding]: { isOnboarded: boolean }; + [SystemMetadataKey.FacialRecognitionState]: { lastRun?: string }; + [SystemMetadataKey.License]: { licenseKey: string; activationKey: string; activatedAt: Date }; + [SystemMetadataKey.ReverseGeocodingState]: { lastUpdate?: string; lastImportFileName?: string }; + [SystemMetadataKey.SystemConfig]: DeepPartial; + [SystemMetadataKey.SystemFlags]: DeepPartial; + [SystemMetadataKey.VersionCheckState]: VersionCheckMetadata; + [SystemMetadataKey.MemoriesState]: MemoriesState; } export type UserMetadataItem = { @@ -513,7 +513,7 @@ export interface UserPreferences { } export interface UserMetadata extends Record> { - [UserMetadataKey.PREFERENCES]: DeepPartial; - [UserMetadataKey.LICENSE]: { licenseKey: string; activationKey: string; activatedAt: string }; - [UserMetadataKey.ONBOARDING]: { isOnboarded: boolean }; + [UserMetadataKey.Preferences]: DeepPartial; + [UserMetadataKey.License]: { licenseKey: string; activationKey: string; activatedAt: string }; + [UserMetadataKey.Onboarding]: { isOnboarded: boolean }; } diff --git a/server/src/utils/access.ts b/server/src/utils/access.ts index b639643b6f..08ff81e840 100644 --- a/server/src/utils/access.ts +++ b/server/src/utils/access.ts @@ -11,7 +11,7 @@ export type GrantedRequest = { }; export const isGranted = ({ requested, current }: GrantedRequest) => { - if (current.includes(Permission.ALL)) { + if (current.includes(Permission.All)) { return true; } @@ -63,36 +63,36 @@ const checkSharedLinkAccess = async ( const sharedLinkId = sharedLink.id; switch (permission) { - case Permission.ASSET_READ: { + case Permission.AssetRead: { return await access.asset.checkSharedLinkAccess(sharedLinkId, ids); } - case Permission.ASSET_VIEW: { + case Permission.AssetView: { return await access.asset.checkSharedLinkAccess(sharedLinkId, ids); } - case Permission.ASSET_DOWNLOAD: { + case Permission.AssetDownload: { return sharedLink.allowDownload ? await access.asset.checkSharedLinkAccess(sharedLinkId, ids) : new Set(); } - case Permission.ASSET_UPLOAD: { + case Permission.AssetUpload: { return sharedLink.allowUpload ? ids : new Set(); } - case Permission.ASSET_SHARE: { + case Permission.AssetShare: { // TODO: fix this to not use sharedLink.userId for access control return await access.asset.checkOwnerAccess(sharedLink.userId, ids, false); } - case Permission.ALBUM_READ: { + case Permission.AlbumRead: { return await access.album.checkSharedLinkAccess(sharedLinkId, ids); } - case Permission.ALBUM_DOWNLOAD: { + case Permission.AlbumDownload: { return sharedLink.allowDownload ? await access.album.checkSharedLinkAccess(sharedLinkId, ids) : new Set(); } - case Permission.ALBUM_ADD_ASSET: { + case Permission.AlbumAddAsset: { return sharedLink.allowUpload ? await access.album.checkSharedLinkAccess(sharedLinkId, ids) : new Set(); } @@ -107,190 +107,190 @@ const checkOtherAccess = async (access: AccessRepository, request: OtherAccessRe switch (permission) { // uses album id - case Permission.ACTIVITY_CREATE: { + case Permission.ActivityCreate: { return await access.activity.checkCreateAccess(auth.user.id, ids); } // uses activity id - case Permission.ACTIVITY_DELETE: { + case Permission.ActivityDelete: { const isOwner = await access.activity.checkOwnerAccess(auth.user.id, ids); const isAlbumOwner = await access.activity.checkAlbumOwnerAccess(auth.user.id, setDifference(ids, isOwner)); return setUnion(isOwner, isAlbumOwner); } - case Permission.ASSET_READ: { + case Permission.AssetRead: { const isOwner = await access.asset.checkOwnerAccess(auth.user.id, ids, auth.session?.hasElevatedPermission); const isAlbum = await access.asset.checkAlbumAccess(auth.user.id, setDifference(ids, isOwner)); const isPartner = await access.asset.checkPartnerAccess(auth.user.id, setDifference(ids, isOwner, isAlbum)); return setUnion(isOwner, isAlbum, isPartner); } - case Permission.ASSET_SHARE: { + case Permission.AssetShare: { const isOwner = await access.asset.checkOwnerAccess(auth.user.id, ids, false); const isPartner = await access.asset.checkPartnerAccess(auth.user.id, setDifference(ids, isOwner)); return setUnion(isOwner, isPartner); } - case Permission.ASSET_VIEW: { + case Permission.AssetView: { const isOwner = await access.asset.checkOwnerAccess(auth.user.id, ids, auth.session?.hasElevatedPermission); const isAlbum = await access.asset.checkAlbumAccess(auth.user.id, setDifference(ids, isOwner)); const isPartner = await access.asset.checkPartnerAccess(auth.user.id, setDifference(ids, isOwner, isAlbum)); return setUnion(isOwner, isAlbum, isPartner); } - case Permission.ASSET_DOWNLOAD: { + case Permission.AssetDownload: { const isOwner = await access.asset.checkOwnerAccess(auth.user.id, ids, auth.session?.hasElevatedPermission); const isAlbum = await access.asset.checkAlbumAccess(auth.user.id, setDifference(ids, isOwner)); const isPartner = await access.asset.checkPartnerAccess(auth.user.id, setDifference(ids, isOwner, isAlbum)); return setUnion(isOwner, isAlbum, isPartner); } - case Permission.ASSET_UPDATE: { + case Permission.AssetUpdate: { return await access.asset.checkOwnerAccess(auth.user.id, ids, auth.session?.hasElevatedPermission); } - case Permission.ASSET_DELETE: { + case Permission.AssetDelete: { return await access.asset.checkOwnerAccess(auth.user.id, ids, auth.session?.hasElevatedPermission); } - case Permission.ALBUM_READ: { + case Permission.AlbumRead: { const isOwner = await access.album.checkOwnerAccess(auth.user.id, ids); const isShared = await access.album.checkSharedAlbumAccess( auth.user.id, setDifference(ids, isOwner), - AlbumUserRole.VIEWER, + AlbumUserRole.Viewer, ); return setUnion(isOwner, isShared); } - case Permission.ALBUM_ADD_ASSET: { + case Permission.AlbumAddAsset: { const isOwner = await access.album.checkOwnerAccess(auth.user.id, ids); const isShared = await access.album.checkSharedAlbumAccess( auth.user.id, setDifference(ids, isOwner), - AlbumUserRole.EDITOR, + AlbumUserRole.Editor, ); return setUnion(isOwner, isShared); } - case Permission.ALBUM_UPDATE: { + case Permission.AlbumUpdate: { return await access.album.checkOwnerAccess(auth.user.id, ids); } - case Permission.ALBUM_DELETE: { + case Permission.AlbumDelete: { return await access.album.checkOwnerAccess(auth.user.id, ids); } - case Permission.ALBUM_SHARE: { + case Permission.AlbumShare: { return await access.album.checkOwnerAccess(auth.user.id, ids); } - case Permission.ALBUM_DOWNLOAD: { + case Permission.AlbumDownload: { const isOwner = await access.album.checkOwnerAccess(auth.user.id, ids); const isShared = await access.album.checkSharedAlbumAccess( auth.user.id, setDifference(ids, isOwner), - AlbumUserRole.VIEWER, + AlbumUserRole.Viewer, ); return setUnion(isOwner, isShared); } - case Permission.ALBUM_REMOVE_ASSET: { + case Permission.AlbumRemoveAsset: { const isOwner = await access.album.checkOwnerAccess(auth.user.id, ids); const isShared = await access.album.checkSharedAlbumAccess( auth.user.id, setDifference(ids, isOwner), - AlbumUserRole.EDITOR, + AlbumUserRole.Editor, ); return setUnion(isOwner, isShared); } - case Permission.ASSET_UPLOAD: { + case Permission.AssetUpload: { return ids.has(auth.user.id) ? new Set([auth.user.id]) : new Set(); } - case Permission.ARCHIVE_READ: { + case Permission.ArchiveRead: { return ids.has(auth.user.id) ? new Set([auth.user.id]) : new Set(); } - case Permission.AUTH_DEVICE_DELETE: { + case Permission.AuthDeviceDelete: { return await access.authDevice.checkOwnerAccess(auth.user.id, ids); } - case Permission.FACE_DELETE: { + case Permission.FaceDelete: { return access.person.checkFaceOwnerAccess(auth.user.id, ids); } - case Permission.NOTIFICATION_READ: - case Permission.NOTIFICATION_UPDATE: - case Permission.NOTIFICATION_DELETE: { + case Permission.NotificationRead: + case Permission.NotificationUpdate: + case Permission.NotificationDelete: { return access.notification.checkOwnerAccess(auth.user.id, ids); } - case Permission.TAG_ASSET: - case Permission.TAG_READ: - case Permission.TAG_UPDATE: - case Permission.TAG_DELETE: { + case Permission.TagAsset: + case Permission.TagRead: + case Permission.TagUpdate: + case Permission.TagDelete: { return await access.tag.checkOwnerAccess(auth.user.id, ids); } - case Permission.TIMELINE_READ: { + case Permission.TimelineRead: { const isOwner = ids.has(auth.user.id) ? new Set([auth.user.id]) : new Set(); const isPartner = await access.timeline.checkPartnerAccess(auth.user.id, setDifference(ids, isOwner)); return setUnion(isOwner, isPartner); } - case Permission.TIMELINE_DOWNLOAD: { + case Permission.TimelineDownload: { return ids.has(auth.user.id) ? new Set([auth.user.id]) : new Set(); } - case Permission.MEMORY_READ: { + case Permission.MemoryRead: { return access.memory.checkOwnerAccess(auth.user.id, ids); } - case Permission.MEMORY_UPDATE: { + case Permission.MemoryUpdate: { return access.memory.checkOwnerAccess(auth.user.id, ids); } - case Permission.MEMORY_DELETE: { + case Permission.MemoryDelete: { return access.memory.checkOwnerAccess(auth.user.id, ids); } - case Permission.PERSON_CREATE: { + case Permission.PersonCreate: { return access.person.checkFaceOwnerAccess(auth.user.id, ids); } - case Permission.PERSON_READ: - case Permission.PERSON_UPDATE: - case Permission.PERSON_DELETE: - case Permission.PERSON_MERGE: { + case Permission.PersonRead: + case Permission.PersonUpdate: + case Permission.PersonDelete: + case Permission.PersonMerge: { return await access.person.checkOwnerAccess(auth.user.id, ids); } - case Permission.PERSON_REASSIGN: { + case Permission.PersonReassign: { return access.person.checkFaceOwnerAccess(auth.user.id, ids); } - case Permission.PARTNER_UPDATE: { + case Permission.PartnerUpdate: { return await access.partner.checkUpdateAccess(auth.user.id, ids); } - case Permission.SESSION_READ: - case Permission.SESSION_UPDATE: - case Permission.SESSION_DELETE: - case Permission.SESSION_LOCK: { + case Permission.SessionRead: + case Permission.SessionUpdate: + case Permission.SessionDelete: + case Permission.SessionLock: { return access.session.checkOwnerAccess(auth.user.id, ids); } - case Permission.STACK_READ: { + case Permission.StackRead: { return access.stack.checkOwnerAccess(auth.user.id, ids); } - case Permission.STACK_UPDATE: { + case Permission.StackUpdate: { return access.stack.checkOwnerAccess(auth.user.id, ids); } - case Permission.STACK_DELETE: { + case Permission.StackDelete: { return access.stack.checkOwnerAccess(auth.user.id, ids); } diff --git a/server/src/utils/asset.util.ts b/server/src/utils/asset.util.ts index 85bc6cd2e5..1b9e12c1cd 100644 --- a/server/src/utils/asset.util.ts +++ b/server/src/utils/asset.util.ts @@ -18,9 +18,9 @@ export const getAssetFile = (files: AssetFile[], type: AssetFileType | Generated }; export const getAssetFiles = (files: AssetFile[]) => ({ - fullsizeFile: getAssetFile(files, AssetFileType.FULLSIZE), - previewFile: getAssetFile(files, AssetFileType.PREVIEW), - thumbnailFile: getAssetFile(files, AssetFileType.THUMBNAIL), + fullsizeFile: getAssetFile(files, AssetFileType.FullSize), + previewFile: getAssetFile(files, AssetFileType.Preview), + thumbnailFile: getAssetFile(files, AssetFileType.Thumbnail), }); export const addAssets = async ( @@ -33,7 +33,7 @@ export const addAssets = async ( const notPresentAssetIds = dto.assetIds.filter((id) => !existingAssetIds.has(id)); const allowedAssetIds = await checkAccess(access, { auth, - permission: Permission.ASSET_SHARE, + permission: Permission.AssetShare, ids: notPresentAssetIds, }); @@ -75,7 +75,7 @@ export const removeAssets = async ( const existingAssetIds = await bulk.getAssetIds(dto.parentId, dto.assetIds); const allowedAssetIds = canAlwaysRemove.has(dto.parentId) ? existingAssetIds - : await checkAccess(access, { auth, permission: Permission.ASSET_SHARE, ids: existingAssetIds }); + : await checkAccess(access, { auth, permission: Permission.AssetShare, ids: existingAssetIds }); const results: BulkIdResponseDto[] = []; for (const assetId of dto.assetIds) { @@ -143,15 +143,15 @@ export const onBeforeLink = async ( if (!motionAsset) { throw new BadRequestException('Live photo video not found'); } - if (motionAsset.type !== AssetType.VIDEO) { + if (motionAsset.type !== AssetType.Video) { throw new BadRequestException('Live photo video must be a video'); } if (motionAsset.ownerId !== userId) { throw new BadRequestException('Live photo video does not belong to the user'); } - if (motionAsset && motionAsset.visibility === AssetVisibility.TIMELINE) { - await assetRepository.update({ id: livePhotoVideoId, visibility: AssetVisibility.HIDDEN }); + if (motionAsset && motionAsset.visibility === AssetVisibility.Timeline) { + await assetRepository.update({ id: livePhotoVideoId, visibility: AssetVisibility.Hidden }); await eventRepository.emit('AssetHide', { assetId: motionAsset.id, userId }); } }; diff --git a/server/src/utils/config.ts b/server/src/utils/config.ts index bc1d2dae1b..a669af31cf 100644 --- a/server/src/utils/config.ts +++ b/server/src/utils/config.ts @@ -60,7 +60,7 @@ export const updateConfig = async (repos: RepoDeps, newConfig: SystemConfig): Pr _.set(partialConfig, property, newValue); } - await metadataRepo.set(SystemMetadataKey.SYSTEM_CONFIG, partialConfig); + await metadataRepo.set(SystemMetadataKey.SystemConfig, partialConfig); return getConfig(repos, { withCache: false }); }; @@ -83,7 +83,7 @@ const buildConfig = async (repos: RepoDeps) => { // load partial const partial = configFile ? await loadFromFile(repos, configFile) - : await metadataRepo.get(SystemMetadataKey.SYSTEM_CONFIG); + : await metadataRepo.get(SystemMetadataKey.SystemConfig); // merge with defaults const rawConfig = _.cloneDeep(defaults); diff --git a/server/src/utils/database.ts b/server/src/utils/database.ts index f23a3deb35..1ef9b8e926 100644 --- a/server/src/utils/database.ts +++ b/server/src/utils/database.ts @@ -154,7 +154,7 @@ export function toJson(qb: SelectQueryBuilder) { - return qb.where('asset.visibility', 'in', [sql.lit(AssetVisibility.ARCHIVE), sql.lit(AssetVisibility.TIMELINE)]); + return qb.where('asset.visibility', 'in', [sql.lit(AssetVisibility.Archive), sql.lit(AssetVisibility.Timeline)]); } // TODO come up with a better query that only selects the fields we need @@ -299,7 +299,7 @@ const joinDeduplicationPlugin = new DeduplicateJoinsPlugin(); export function searchAssetBuilder(kysely: Kysely, options: AssetSearchBuilderOptions) { options.withDeleted ||= !!(options.trashedAfter || options.trashedBefore || options.isOffline); - const visibility = options.visibility == null ? AssetVisibility.TIMELINE : options.visibility; + const visibility = options.visibility == null ? AssetVisibility.Timeline : options.visibility; return kysely .withPlugin(joinDeduplicationPlugin) @@ -399,7 +399,7 @@ type VectorIndexQueryOptions = { table: string; vectorExtension: VectorExtension export function vectorIndexQuery({ vectorExtension, table, indexName, lists }: VectorIndexQueryOptions): string { switch (vectorExtension) { - case DatabaseExtension.VECTORCHORD: { + case DatabaseExtension.VectorChord: { return ` CREATE INDEX IF NOT EXISTS ${indexName} ON ${table} USING vchordrq (embedding vector_cosine_ops) WITH (options = $$ residual_quantization = false @@ -410,7 +410,7 @@ export function vectorIndexQuery({ vectorExtension, table, indexName, lists }: V sampling_factor = 1024 $$)`; } - case DatabaseExtension.VECTORS: { + case DatabaseExtension.Vectors: { return ` CREATE INDEX IF NOT EXISTS ${indexName} ON ${table} USING vectors (embedding vector_cos_ops) WITH (options = $$ @@ -420,7 +420,7 @@ export function vectorIndexQuery({ vectorExtension, table, indexName, lists }: V ef_construction = 300 $$)`; } - case DatabaseExtension.VECTOR: { + case DatabaseExtension.Vector: { return ` CREATE INDEX IF NOT EXISTS ${indexName} ON ${table} USING hnsw (embedding vector_cosine_ops) diff --git a/server/src/utils/file.ts b/server/src/utils/file.ts index 716e0b1957..3e1a1b7f68 100644 --- a/server/src/utils/file.ts +++ b/server/src/utils/file.ts @@ -34,9 +34,9 @@ type SendFile = Parameters; type SendFileOptions = SendFile[1]; const cacheControlHeaders: Record = { - [CacheControl.PRIVATE_WITH_CACHE]: 'private, max-age=86400, no-transform', - [CacheControl.PRIVATE_WITHOUT_CACHE]: 'private, no-cache, no-transform', - [CacheControl.NONE]: null, // falsy value to prevent adding Cache-Control header + [CacheControl.PrivateWithCache]: 'private, max-age=86400, no-transform', + [CacheControl.PrivateWithoutCache]: 'private, no-cache, no-transform', + [CacheControl.None]: null, // falsy value to prevent adding Cache-Control header }; export const sendFile = async ( diff --git a/server/src/utils/media.ts b/server/src/utils/media.ts index b00eb652ef..e43ecba49f 100644 --- a/server/src/utils/media.ts +++ b/server/src/utils/media.ts @@ -1,5 +1,5 @@ import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto'; -import { CQMode, ToneMapping, TranscodeHWAccel, TranscodeTarget, VideoCodec } from 'src/enum'; +import { CQMode, ToneMapping, TranscodeHardwareAcceleration, TranscodeTarget, VideoCodec } from 'src/enum'; import { AudioStreamInfo, BitrateDistribution, @@ -16,7 +16,7 @@ export class BaseConfig implements VideoCodecSWConfig { protected constructor(protected config: SystemConfigFFmpegDto) {} static create(config: SystemConfigFFmpegDto, interfaces: VideoInterfaces): VideoCodecSWConfig { - if (config.accel === TranscodeHWAccel.DISABLED) { + if (config.accel === TranscodeHardwareAcceleration.Disabled) { return this.getSWCodecConfig(config); } return this.getHWCodecConfig(config, interfaces); @@ -27,13 +27,13 @@ export class BaseConfig implements VideoCodecSWConfig { case VideoCodec.H264: { return new H264Config(config); } - case VideoCodec.HEVC: { + case VideoCodec.Hevc: { return new HEVCConfig(config); } - case VideoCodec.VP9: { + case VideoCodec.Vp9: { return new VP9Config(config); } - case VideoCodec.AV1: { + case VideoCodec.Av1: { return new AV1Config(config); } default: { @@ -45,25 +45,25 @@ export class BaseConfig implements VideoCodecSWConfig { private static getHWCodecConfig(config: SystemConfigFFmpegDto, interfaces: VideoInterfaces) { let handler: VideoCodecHWConfig; switch (config.accel) { - case TranscodeHWAccel.NVENC: { + case TranscodeHardwareAcceleration.Nvenc: { handler = config.accelDecode ? new NvencHwDecodeConfig(config, interfaces) : new NvencSwDecodeConfig(config, interfaces); break; } - case TranscodeHWAccel.QSV: { + case TranscodeHardwareAcceleration.Qsv: { handler = config.accelDecode ? new QsvHwDecodeConfig(config, interfaces) : new QsvSwDecodeConfig(config, interfaces); break; } - case TranscodeHWAccel.VAAPI: { + case TranscodeHardwareAcceleration.Vaapi: { handler = config.accelDecode ? new VaapiHwDecodeConfig(config, interfaces) : new VaapiSwDecodeConfig(config, interfaces); break; } - case TranscodeHWAccel.RKMPP: { + case TranscodeHardwareAcceleration.Rkmpp: { handler = config.accelDecode ? new RkmppHwDecodeConfig(config, interfaces) : new RkmppSwDecodeConfig(config, interfaces); @@ -94,7 +94,7 @@ export class BaseConfig implements VideoCodecSWConfig { twoPass: this.eligibleForTwoPass(), progress: { frameCount: videoStream.frameCount, percentInterval: 5 }, } as TranscodeCommand; - if ([TranscodeTarget.ALL, TranscodeTarget.VIDEO].includes(target)) { + if ([TranscodeTarget.All, TranscodeTarget.Video].includes(target)) { const filters = this.getFilterOptions(videoStream); if (filters.length > 0) { options.outputOptions.push(`-vf ${filters.join(',')}`); @@ -116,8 +116,8 @@ export class BaseConfig implements VideoCodecSWConfig { } getBaseOutputOptions(target: TranscodeTarget, videoStream: VideoStreamInfo, audioStream?: AudioStreamInfo) { - const videoCodec = [TranscodeTarget.ALL, TranscodeTarget.VIDEO].includes(target) ? this.getVideoCodec() : 'copy'; - const audioCodec = [TranscodeTarget.ALL, TranscodeTarget.AUDIO].includes(target) ? this.getAudioCodec() : 'copy'; + const videoCodec = [TranscodeTarget.All, TranscodeTarget.Video].includes(target) ? this.getVideoCodec() : 'copy'; + const audioCodec = [TranscodeTarget.All, TranscodeTarget.Audio].includes(target) ? this.getAudioCodec() : 'copy'; const options = [ `-c:v ${videoCodec}`, @@ -146,7 +146,7 @@ export class BaseConfig implements VideoCodecSWConfig { } if ( - this.config.targetVideoCodec === VideoCodec.HEVC && + this.config.targetVideoCodec === VideoCodec.Hevc && (videoCodec !== 'copy' || videoStream.codecName === 'hevc') ) { options.push('-tag:v hvc1'); @@ -207,7 +207,7 @@ export class BaseConfig implements VideoCodecSWConfig { } eligibleForTwoPass() { - if (!this.config.twoPass || this.config.accel !== TranscodeHWAccel.DISABLED) { + if (!this.config.twoPass || this.config.accel !== TranscodeHardwareAcceleration.Disabled) { return false; } @@ -244,7 +244,7 @@ export class BaseConfig implements VideoCodecSWConfig { } shouldToneMap(videoStream: VideoStreamInfo) { - return videoStream.isHDR && this.config.tonemap !== ToneMapping.DISABLED; + return videoStream.isHDR && this.config.tonemap !== ToneMapping.Disabled; } getScaling(videoStream: VideoStreamInfo, mult = 2) { @@ -326,7 +326,7 @@ export class BaseConfig implements VideoCodecSWConfig { } useCQP() { - return this.config.cqMode === CQMode.CQP; + return this.config.cqMode === CQMode.Cqp; } } @@ -344,7 +344,7 @@ export class BaseHWConfig extends BaseConfig implements VideoCodecHWConfig { } getSupportedCodecs() { - return [VideoCodec.H264, VideoCodec.HEVC]; + return [VideoCodec.H264, VideoCodec.Hevc]; } validateDevices(devices: string[]) { @@ -526,7 +526,7 @@ export class NvencSwDecodeConfig extends BaseHWConfig { } getSupportedCodecs() { - return [VideoCodec.H264, VideoCodec.HEVC, VideoCodec.AV1]; + return [VideoCodec.H264, VideoCodec.Hevc, VideoCodec.Av1]; } getBaseInputOptions() { @@ -658,7 +658,7 @@ export class QsvSwDecodeConfig extends BaseHWConfig { getBaseOutputOptions(target: TranscodeTarget, videoStream: VideoStreamInfo, audioStream?: AudioStreamInfo) { const options = super.getBaseOutputOptions(target, videoStream, audioStream); // VP9 requires enabling low power mode https://git.ffmpeg.org/gitweb/ffmpeg.git/commit/33583803e107b6d532def0f9d949364b01b6ad5a - if (this.config.targetVideoCodec === VideoCodec.VP9) { + if (this.config.targetVideoCodec === VideoCodec.Vp9) { options.push('-low_power 1'); } return options; @@ -693,7 +693,7 @@ export class QsvSwDecodeConfig extends BaseHWConfig { } getSupportedCodecs() { - return [VideoCodec.H264, VideoCodec.HEVC, VideoCodec.VP9, VideoCodec.AV1]; + return [VideoCodec.H264, VideoCodec.Hevc, VideoCodec.Vp9, VideoCodec.Av1]; } // recommended from https://github.com/intel/media-delivery/blob/master/doc/benchmarks/intel-iris-xe-max-graphics/intel-iris-xe-max-graphics.md @@ -712,7 +712,7 @@ export class QsvSwDecodeConfig extends BaseHWConfig { } useCQP() { - return this.config.cqMode === CQMode.CQP || this.config.targetVideoCodec === VideoCodec.VP9; + return this.config.cqMode === CQMode.Cqp || this.config.targetVideoCodec === VideoCodec.Vp9; } getScaling(videoStream: VideoStreamInfo): string { @@ -802,7 +802,7 @@ export class VaapiSwDecodeConfig extends BaseHWConfig { const bitrates = this.getBitrateDistribution(); const options = []; - if (this.config.targetVideoCodec === VideoCodec.VP9) { + if (this.config.targetVideoCodec === VideoCodec.Vp9) { options.push('-bsf:v vp9_raw_reorder,vp9_superframe'); } @@ -824,11 +824,11 @@ export class VaapiSwDecodeConfig extends BaseHWConfig { } getSupportedCodecs() { - return [VideoCodec.H264, VideoCodec.HEVC, VideoCodec.VP9, VideoCodec.AV1]; + return [VideoCodec.H264, VideoCodec.Hevc, VideoCodec.Vp9, VideoCodec.Av1]; } useCQP() { - return this.config.cqMode !== CQMode.ICQ || this.config.targetVideoCodec === VideoCodec.VP9; + return this.config.cqMode !== CQMode.Icq || this.config.targetVideoCodec === VideoCodec.Vp9; } } @@ -900,7 +900,7 @@ export class RkmppSwDecodeConfig extends BaseHWConfig { // from ffmpeg_mpp help, commonly referred to as H264 level 5.1 return ['-level 51']; } - case VideoCodec.HEVC: { + case VideoCodec.Hevc: { // from ffmpeg_mpp help, commonly referred to as HEVC level 5.1 return ['-level 153']; } @@ -921,7 +921,7 @@ export class RkmppSwDecodeConfig extends BaseHWConfig { } getSupportedCodecs() { - return [VideoCodec.H264, VideoCodec.HEVC]; + return [VideoCodec.H264, VideoCodec.Hevc]; } getVideoCodec(): string { diff --git a/server/src/utils/mime-types.ts b/server/src/utils/mime-types.ts index 6aad418d9f..6b9392146d 100644 --- a/server/src/utils/mime-types.ts +++ b/server/src/utils/mime-types.ts @@ -129,11 +129,11 @@ export const mimeTypes = { assetType: (filename: string) => { const contentType = lookup(filename); if (contentType.startsWith('image/')) { - return AssetType.IMAGE; + return AssetType.Image; } else if (contentType.startsWith('video/')) { - return AssetType.VIDEO; + return AssetType.Video; } - return AssetType.OTHER; + return AssetType.Other; }, getSupportedFileExtensions: () => [...Object.keys(image), ...Object.keys(video)], }; diff --git a/server/src/utils/misc.ts b/server/src/utils/misc.ts index 742e98c1c3..3acb72b663 100644 --- a/server/src/utils/misc.ts +++ b/server/src/utils/misc.ts @@ -234,14 +234,14 @@ export const useSwagger = (app: INestApplication, { write }: { write: boolean }) scheme: 'Bearer', in: 'header', }) - .addCookieAuth(ImmichCookie.ACCESS_TOKEN) + .addCookieAuth(ImmichCookie.AccessToken) .addApiKey( { type: 'apiKey', in: 'header', - name: ImmichHeader.API_KEY, + name: ImmichHeader.ApiKey, }, - MetadataKey.API_KEY_SECURITY, + MetadataKey.ApiKeySecurity, ) .addServer('/api') .build(); diff --git a/server/src/utils/preferences.ts b/server/src/utils/preferences.ts index 9bd3dedd52..121bf2826d 100644 --- a/server/src/utils/preferences.ts +++ b/server/src/utils/preferences.ts @@ -8,7 +8,7 @@ import { getKeysDeep } from 'src/utils/misc'; const getDefaultPreferences = (): UserPreferences => { return { albums: { - defaultAssetOrder: AssetOrder.DESC, + defaultAssetOrder: AssetOrder.Desc, }, folders: { enabled: false, @@ -53,7 +53,7 @@ const getDefaultPreferences = (): UserPreferences => { export const getPreferences = (metadata: UserMetadataItem[]): UserPreferences => { const preferences = getDefaultPreferences(); - const item = metadata.find(({ key }) => key === UserMetadataKey.PREFERENCES); + const item = metadata.find(({ key }) => key === UserMetadataKey.Preferences); const partial = item?.value || {}; for (const property of getKeysDeep(partial)) { _.set(preferences, property, _.get(partial, property)); diff --git a/server/src/utils/response.ts b/server/src/utils/response.ts index a50e86a4ff..c5f51c385c 100644 --- a/server/src/utils/response.ts +++ b/server/src/utils/response.ts @@ -13,13 +13,13 @@ export const respondWithCookie = (res: Response, body: T, { isSecure, values }; const cookieOptions: Record = { - [ImmichCookie.AUTH_TYPE]: defaults, - [ImmichCookie.ACCESS_TOKEN]: defaults, - [ImmichCookie.OAUTH_STATE]: defaults, - [ImmichCookie.OAUTH_CODE_VERIFIER]: defaults, + [ImmichCookie.AuthType]: defaults, + [ImmichCookie.AccessToken]: defaults, + [ImmichCookie.OAuthState]: defaults, + [ImmichCookie.OAuthCodeVerifier]: defaults, // no httpOnly so that the client can know the auth state - [ImmichCookie.IS_AUTHENTICATED]: { ...defaults, httpOnly: false }, - [ImmichCookie.SHARED_LINK_TOKEN]: { ...defaults, maxAge: Duration.fromObject({ days: 1 }).toMillis() }, + [ImmichCookie.IsAuthenticated]: { ...defaults, httpOnly: false }, + [ImmichCookie.SharedLinkToken]: { ...defaults, maxAge: Duration.fromObject({ days: 1 }).toMillis() }, }; for (const { key, value } of values) { diff --git a/server/test/fixtures/album.stub.ts b/server/test/fixtures/album.stub.ts index fd6a8678a0..d36989bbcf 100644 --- a/server/test/fixtures/album.stub.ts +++ b/server/test/fixtures/album.stub.ts @@ -19,7 +19,7 @@ export const albumStub = { sharedLinks: [], albumUsers: [], isActivityEnabled: true, - order: AssetOrder.DESC, + order: AssetOrder.Desc, updateId: '42', }), sharedWithUser: Object.freeze({ @@ -38,11 +38,11 @@ export const albumStub = { albumUsers: [ { user: userStub.user1, - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, }, ], isActivityEnabled: true, - order: AssetOrder.DESC, + order: AssetOrder.Desc, updateId: '42', }), sharedWithMultiple: Object.freeze({ @@ -61,15 +61,15 @@ export const albumStub = { albumUsers: [ { user: userStub.user1, - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, }, { user: userStub.user2, - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, }, ], isActivityEnabled: true, - order: AssetOrder.DESC, + order: AssetOrder.Desc, updateId: '42', }), sharedWithAdmin: Object.freeze({ @@ -88,11 +88,11 @@ export const albumStub = { albumUsers: [ { user: userStub.admin, - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, }, ], isActivityEnabled: true, - order: AssetOrder.DESC, + order: AssetOrder.Desc, updateId: '42', }), oneAsset: Object.freeze({ @@ -110,7 +110,7 @@ export const albumStub = { sharedLinks: [], albumUsers: [], isActivityEnabled: true, - order: AssetOrder.DESC, + order: AssetOrder.Desc, updateId: '42', }), twoAssets: Object.freeze({ @@ -128,7 +128,7 @@ export const albumStub = { sharedLinks: [], albumUsers: [], isActivityEnabled: true, - order: AssetOrder.DESC, + order: AssetOrder.Desc, updateId: '42', }), emptyWithValidThumbnail: Object.freeze({ @@ -146,7 +146,7 @@ export const albumStub = { sharedLinks: [], albumUsers: [], isActivityEnabled: true, - order: AssetOrder.DESC, + order: AssetOrder.Desc, updateId: '42', }), }; diff --git a/server/test/fixtures/asset.stub.ts b/server/test/fixtures/asset.stub.ts index aa38a520ee..991c5d2c4f 100644 --- a/server/test/fixtures/asset.stub.ts +++ b/server/test/fixtures/asset.stub.ts @@ -8,19 +8,19 @@ import { userStub } from 'test/fixtures/user.stub'; export const previewFile: AssetFile = { id: 'file-1', - type: AssetFileType.PREVIEW, + type: AssetFileType.Preview, path: '/uploads/user-id/thumbs/path.jpg', }; const thumbnailFile: AssetFile = { id: 'file-2', - type: AssetFileType.THUMBNAIL, + type: AssetFileType.Thumbnail, path: '/uploads/user-id/webp/path.ext', }; const fullsizeFile: AssetFile = { id: 'file-3', - type: AssetFileType.FULLSIZE, + type: AssetFileType.FullSize, path: '/uploads/user-id/fullsize/path.webp', }; @@ -44,7 +44,7 @@ export const assetStub = { id: 'asset-id', ownerId: 'user-id', livePhotoVideoId: null, - type: AssetType.IMAGE, + type: AssetType.Image, isExternal: false, checksum: Buffer.from('file hash'), timeZone: null, @@ -57,7 +57,7 @@ export const assetStub = { }), noResizePath: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, originalFileName: 'IMG_123.jpg', deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -68,7 +68,7 @@ export const assetStub = { originalPath: 'upload/library/IMG_123.jpg', files: [thumbnailFile], checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, createdAt: new Date('2023-02-23T05:06:29.716Z'), @@ -89,12 +89,12 @@ export const assetStub = { libraryId: null, stackId: null, updateId: '42', - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), noWebpPath: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -104,7 +104,7 @@ export const assetStub = { originalPath: 'upload/library/IMG_456.jpg', files: [previewFile], checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, createdAt: new Date('2023-02-23T05:06:29.716Z'), @@ -128,12 +128,12 @@ export const assetStub = { libraryId: null, stackId: null, updateId: '42', - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), noThumbhash: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -143,7 +143,7 @@ export const assetStub = { originalPath: '/original/path.ext', files, checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, thumbhash: null, encodedVideoPath: null, createdAt: new Date('2023-02-23T05:06:29.716Z'), @@ -164,12 +164,12 @@ export const assetStub = { libraryId: null, stackId: null, updateId: '42', - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), primaryImage: Object.freeze({ id: 'primary-asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -179,7 +179,7 @@ export const assetStub = { originalPath: '/original/path.jpg', checksum: Buffer.from('file hash', 'utf8'), files, - type: AssetType.IMAGE, + type: AssetType.Image, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, createdAt: new Date('2023-02-23T05:06:29.716Z'), @@ -210,12 +210,12 @@ export const assetStub = { isOffline: false, updateId: '42', libraryId: null, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), image: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -225,7 +225,7 @@ export const assetStub = { originalPath: '/original/path.jpg', files, checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, createdAt: new Date('2023-02-23T05:06:29.716Z'), @@ -256,7 +256,7 @@ export const assetStub = { projectionType: null, height: 3840, width: 2160, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), trashed: Object.freeze({ @@ -269,7 +269,7 @@ export const assetStub = { deviceId: 'device-id', originalPath: '/original/path.jpg', checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, files, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, @@ -293,16 +293,16 @@ export const assetStub = { } as Exif, duplicateId: null, isOffline: false, - status: AssetStatus.TRASHED, + status: AssetStatus.Trashed, libraryId: null, stackId: null, updateId: '42', - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), trashedOffline: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -311,7 +311,7 @@ export const assetStub = { deviceId: 'device-id', originalPath: '/original/path.jpg', checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, files, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, @@ -338,11 +338,11 @@ export const assetStub = { isOffline: true, stackId: null, updateId: '42', - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), archived: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -351,7 +351,7 @@ export const assetStub = { deviceId: 'device-id', originalPath: '/original/path.jpg', checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, files, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, @@ -378,12 +378,12 @@ export const assetStub = { libraryId: null, stackId: null, updateId: '42', - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), external: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -392,7 +392,7 @@ export const assetStub = { deviceId: 'device-id', originalPath: '/data/user1/photo.jpg', checksum: Buffer.from('path hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, files, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, @@ -418,12 +418,12 @@ export const assetStub = { updateId: '42', stackId: null, stack: null, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), image1: Object.freeze({ id: 'asset-id-1', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -432,7 +432,7 @@ export const assetStub = { deviceId: 'device-id', originalPath: '/original/path.ext', checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, files, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, @@ -458,12 +458,12 @@ export const assetStub = { stackId: null, libraryId: null, stack: null, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), imageFrom2015: Object.freeze({ id: 'asset-id-1', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2015-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2015-02-23T05:06:29.716Z'), @@ -473,7 +473,7 @@ export const assetStub = { originalPath: '/original/path.ext', files, checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, createdAt: new Date('2015-02-23T05:06:29.716Z'), @@ -494,12 +494,12 @@ export const assetStub = { deletedAt: null, duplicateId: null, isOffline: false, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), video: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, originalFileName: 'asset-id.ext', deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -509,7 +509,7 @@ export const assetStub = { deviceId: 'device-id', originalPath: '/original/path.ext', checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.VIDEO, + type: AssetType.Video, files: [previewFile], thumbhash: null, encodedVideoPath: null, @@ -535,15 +535,15 @@ export const assetStub = { updateId: '42', libraryId: null, stackId: null, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), livePhotoMotionAsset: Object.freeze({ - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, id: fileStub.livePhotoMotion.uuid, originalPath: fileStub.livePhotoMotion.originalPath, ownerId: authStub.user1.user.id, - type: AssetType.VIDEO, + type: AssetType.Video, fileModifiedAt: new Date('2022-06-19T23:41:36.910Z'), fileCreatedAt: new Date('2022-06-19T23:41:36.910Z'), exifInfo: { @@ -551,15 +551,15 @@ export const assetStub = { timeZone: `America/New_York`, }, libraryId: null, - visibility: AssetVisibility.HIDDEN, + visibility: AssetVisibility.Hidden, } as MapAsset & { faces: AssetFace[]; files: AssetFile[]; exifInfo: Exif }), livePhotoStillAsset: Object.freeze({ id: 'live-photo-still-asset', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, originalPath: fileStub.livePhotoStill.originalPath, ownerId: authStub.user1.user.id, - type: AssetType.IMAGE, + type: AssetType.Image, livePhotoVideoId: 'live-photo-motion-asset', fileModifiedAt: new Date('2022-06-19T23:41:36.910Z'), fileCreatedAt: new Date('2022-06-19T23:41:36.910Z'), @@ -569,16 +569,16 @@ export const assetStub = { }, files, faces: [] as AssetFace[], - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, } as MapAsset & { faces: AssetFace[] }), livePhotoWithOriginalFileName: Object.freeze({ id: 'live-photo-still-asset', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, originalPath: fileStub.livePhotoStill.originalPath, originalFileName: fileStub.livePhotoStill.originalName, ownerId: authStub.user1.user.id, - type: AssetType.IMAGE, + type: AssetType.Image, livePhotoVideoId: 'live-photo-motion-asset', fileModifiedAt: new Date('2022-06-19T23:41:36.910Z'), fileCreatedAt: new Date('2022-06-19T23:41:36.910Z'), @@ -588,12 +588,12 @@ export const assetStub = { }, libraryId: null, faces: [] as AssetFace[], - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, } as MapAsset & { faces: AssetFace[] }), withLocation: Object.freeze({ id: 'asset-with-favorite-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-22T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-22T05:06:29.716Z'), @@ -603,7 +603,7 @@ export const assetStub = { checksum: Buffer.from('file hash', 'utf8'), originalPath: '/original/path.ext', sidecarPath: null, - type: AssetType.IMAGE, + type: AssetType.Image, files: [previewFile], thumbhash: null, encodedVideoPath: null, @@ -633,12 +633,12 @@ export const assetStub = { duplicateId: null, isOffline: false, tags: [], - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), sidecar: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -648,7 +648,7 @@ export const assetStub = { originalPath: '/original/path.ext', thumbhash: null, checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, files: [previewFile], encodedVideoPath: null, createdAt: new Date('2023-02-23T05:06:29.716Z'), @@ -669,12 +669,12 @@ export const assetStub = { updateId: 'foo', libraryId: null, stackId: null, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), sidecarWithoutExt: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -684,7 +684,7 @@ export const assetStub = { originalPath: '/original/path.ext', thumbhash: null, checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, files: [previewFile], encodedVideoPath: null, createdAt: new Date('2023-02-23T05:06:29.716Z'), @@ -702,12 +702,12 @@ export const assetStub = { deletedAt: null, duplicateId: null, isOffline: false, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), hasEncodedVideo: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, originalFileName: 'asset-id.ext', deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -717,7 +717,7 @@ export const assetStub = { deviceId: 'device-id', originalPath: '/original/path.ext', checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.VIDEO, + type: AssetType.Video, files: [previewFile], thumbhash: null, encodedVideoPath: '/encoded/video/path.mp4', @@ -742,12 +742,12 @@ export const assetStub = { libraryId: null, stackId: null, stack: null, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), hasFileExtension: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -756,7 +756,7 @@ export const assetStub = { deviceId: 'device-id', originalPath: '/data/user1/photo.jpg', checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, files, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, @@ -779,12 +779,12 @@ export const assetStub = { } as Exif, duplicateId: null, isOffline: false, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), imageDng: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -793,7 +793,7 @@ export const assetStub = { deviceId: 'device-id', originalPath: '/original/path.dng', checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, files, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, @@ -820,12 +820,12 @@ export const assetStub = { updateId: '42', libraryId: null, stackId: null, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), imageHif: Object.freeze({ id: 'asset-id', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, deviceAssetId: 'device-asset-id', fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'), @@ -834,7 +834,7 @@ export const assetStub = { deviceId: 'device-id', originalPath: '/original/path.hif', checksum: Buffer.from('file hash', 'utf8'), - type: AssetType.IMAGE, + type: AssetType.Image, files, thumbhash: Buffer.from('blablabla', 'base64'), encodedVideoPath: null, @@ -861,6 +861,6 @@ export const assetStub = { updateId: '42', libraryId: null, stackId: null, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }), }; diff --git a/server/test/fixtures/face.stub.ts b/server/test/fixtures/face.stub.ts index fe5cbb9a56..beecf7c69e 100644 --- a/server/test/fixtures/face.stub.ts +++ b/server/test/fixtures/face.stub.ts @@ -20,7 +20,7 @@ export const faceStub = { boundingBoxY2: 1, imageHeight: 1024, imageWidth: 1024, - sourceType: SourceType.MACHINE_LEARNING, + sourceType: SourceType.MachineLearning, faceSearch: { faceId: 'assetFaceId1', embedding: '[1, 2, 3, 4]' }, deletedAt: new Date(), }), @@ -36,7 +36,7 @@ export const faceStub = { boundingBoxY2: 1, imageHeight: 1024, imageWidth: 1024, - sourceType: SourceType.MACHINE_LEARNING, + sourceType: SourceType.MachineLearning, faceSearch: { faceId: 'assetFaceId2', embedding: '[1, 2, 3, 4]' }, deletedAt: null, }), @@ -52,7 +52,7 @@ export const faceStub = { boundingBoxY2: 1, imageHeight: 1024, imageWidth: 1024, - sourceType: SourceType.MACHINE_LEARNING, + sourceType: SourceType.MachineLearning, faceSearch: { faceId: 'assetFaceId3', embedding: '[1, 2, 3, 4]' }, deletedAt: null, }), @@ -68,7 +68,7 @@ export const faceStub = { boundingBoxY2: 1, imageHeight: 1024, imageWidth: 1024, - sourceType: SourceType.MACHINE_LEARNING, + sourceType: SourceType.MachineLearning, faceSearch: { faceId: 'assetFaceId8', embedding: '[1, 2, 3, 4]' }, deletedAt: null, }), @@ -84,7 +84,7 @@ export const faceStub = { boundingBoxY2: 1, imageHeight: 1024, imageWidth: 1024, - sourceType: SourceType.MACHINE_LEARNING, + sourceType: SourceType.MachineLearning, faceSearch: { faceId: 'assetFaceId9', embedding: '[1, 2, 3, 4]' }, deletedAt: null, }), @@ -100,7 +100,7 @@ export const faceStub = { boundingBoxY2: 200, imageHeight: 500, imageWidth: 400, - sourceType: SourceType.EXIF, + sourceType: SourceType.Exif, deletedAt: null, }), fromExif2: Object.freeze({ @@ -115,7 +115,7 @@ export const faceStub = { boundingBoxY2: 1, imageHeight: 1024, imageWidth: 1024, - sourceType: SourceType.EXIF, + sourceType: SourceType.Exif, deletedAt: null, }), withBirthDate: Object.freeze({ @@ -130,7 +130,7 @@ export const faceStub = { boundingBoxY2: 1, imageHeight: 1024, imageWidth: 1024, - sourceType: SourceType.MACHINE_LEARNING, + sourceType: SourceType.MachineLearning, deletedAt: null, }), }; diff --git a/server/test/fixtures/person.stub.ts b/server/test/fixtures/person.stub.ts index 86f3bcde21..35a7a8ed7d 100644 --- a/server/test/fixtures/person.stub.ts +++ b/server/test/fixtures/person.stub.ts @@ -176,7 +176,7 @@ export const personThumbnailStub = { y2: 505, oldHeight: 2880, oldWidth: 2160, - type: AssetType.IMAGE, + type: AssetType.Image, originalPath: '/original/path.jpg', exifOrientation: '1', previewPath: previewFile.path, @@ -189,7 +189,7 @@ export const personThumbnailStub = { y2: 200, oldHeight: 500, oldWidth: 400, - type: AssetType.IMAGE, + type: AssetType.Image, originalPath: '/original/path.jpg', exifOrientation: '1', previewPath: previewFile.path, @@ -202,7 +202,7 @@ export const personThumbnailStub = { y2: 495, oldHeight: 500, oldWidth: 500, - type: AssetType.IMAGE, + type: AssetType.Image, originalPath: '/original/path.jpg', exifOrientation: '1', previewPath: previewFile.path, @@ -215,7 +215,7 @@ export const personThumbnailStub = { y2: 200, oldHeight: 500, oldWidth: 400, - type: AssetType.IMAGE, + type: AssetType.Image, originalPath: '/original/path.dng', exifOrientation: '1', previewPath: previewFile.path, @@ -228,7 +228,7 @@ export const personThumbnailStub = { y2: 251, oldHeight: 1440, oldWidth: 2162, - type: AssetType.IMAGE, + type: AssetType.Image, originalPath: '/original/path.jpg', exifOrientation: '1', previewPath: previewFile.path, @@ -241,7 +241,7 @@ export const personThumbnailStub = { y2: 152, oldHeight: 1440, oldWidth: 2162, - type: AssetType.IMAGE, + type: AssetType.Image, originalPath: '/original/path.jpg', exifOrientation: '1', previewPath: previewFile.path, @@ -254,7 +254,7 @@ export const personThumbnailStub = { y2: 200, oldHeight: 500, oldWidth: 400, - type: AssetType.VIDEO, + type: AssetType.Video, originalPath: '/original/path.mp4', exifOrientation: '1', previewPath: previewFile.path, diff --git a/server/test/fixtures/shared-link.stub.ts b/server/test/fixtures/shared-link.stub.ts index f3096280d9..47201a5b3b 100644 --- a/server/test/fixtures/shared-link.stub.ts +++ b/server/test/fixtures/shared-link.stub.ts @@ -49,7 +49,7 @@ const assetResponse: AssetResponseDto = { deviceAssetId: 'device_asset_id_1', ownerId: 'user_id_1', deviceId: 'device_id_1', - type: AssetType.VIDEO, + type: AssetType.Video, originalMimeType: 'image/jpeg', originalPath: 'fake_path/jpeg', originalFileName: 'asset_1.jpeg', @@ -70,12 +70,12 @@ const assetResponse: AssetResponseDto = { isTrashed: false, libraryId: 'library-id', hasMetadata: true, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }; const assetResponseWithoutMetadata = { id: 'id_1', - type: AssetType.VIDEO, + type: AssetType.Video, originalMimeType: 'image/jpeg', thumbhash: null, localDateTime: today, @@ -99,7 +99,7 @@ const albumResponse: AlbumResponseDto = { assets: [], assetCount: 1, isActivityEnabled: true, - order: AssetOrder.DESC, + order: AssetOrder.Desc, }; export const sharedLinkStub = { @@ -107,7 +107,7 @@ export const sharedLinkStub = { id: '123', userId: authStub.admin.user.id, key: sharedLinkBytes, - type: SharedLinkType.INDIVIDUAL, + type: SharedLinkType.Individual, createdAt: today, expiresAt: tomorrow, allowUpload: true, @@ -124,7 +124,7 @@ export const sharedLinkStub = { userId: authStub.admin.user.id, user: userStub.admin, key: sharedLinkBytes, - type: SharedLinkType.ALBUM, + type: SharedLinkType.Album, createdAt: today, expiresAt: tomorrow, allowUpload: true, @@ -141,7 +141,7 @@ export const sharedLinkStub = { userId: authStub.admin.user.id, user: userStub.admin, key: sharedLinkBytes, - type: SharedLinkType.ALBUM, + type: SharedLinkType.Album, createdAt: today, expiresAt: yesterday, allowUpload: true, @@ -157,7 +157,7 @@ export const sharedLinkStub = { id: '123', userId: authStub.admin.user.id, key: sharedLinkBytes, - type: SharedLinkType.ALBUM, + type: SharedLinkType.Album, createdAt: today, expiresAt: tomorrow, allowUpload: false, @@ -182,16 +182,16 @@ export const sharedLinkStub = { albumUsers: [], sharedLinks: [], isActivityEnabled: true, - order: AssetOrder.DESC, + order: AssetOrder.Desc, assets: [ { id: 'id_1', - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, owner: undefined as unknown as UserAdmin, ownerId: 'user_id_1', deviceAssetId: 'device_asset_id_1', deviceId: 'device_id_1', - type: AssetType.VIDEO, + type: AssetType.Video, originalPath: 'fake_path/jpeg', checksum: Buffer.from('file hash', 'utf8'), fileModifiedAt: today, @@ -251,7 +251,7 @@ export const sharedLinkStub = { updateId: '42', libraryId: null, stackId: null, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }, ], }, @@ -260,7 +260,7 @@ export const sharedLinkStub = { id: '123', userId: authStub.admin.user.id, key: sharedLinkBytes, - type: SharedLinkType.ALBUM, + type: SharedLinkType.Album, createdAt: today, expiresAt: tomorrow, allowUpload: true, @@ -286,7 +286,7 @@ export const sharedLinkResponseStub = { id: '123', key: sharedLinkBytes.toString('base64url'), showMetadata: true, - type: SharedLinkType.ALBUM, + type: SharedLinkType.Album, userId: 'admin_id', }), expired: Object.freeze({ @@ -301,14 +301,14 @@ export const sharedLinkResponseStub = { id: '123', key: sharedLinkBytes.toString('base64url'), showMetadata: true, - type: SharedLinkType.ALBUM, + type: SharedLinkType.Album, userId: 'admin_id', }), readonlyNoMetadata: Object.freeze({ id: '123', userId: 'admin_id', key: sharedLinkBytes.toString('base64url'), - type: SharedLinkType.ALBUM, + type: SharedLinkType.Album, createdAt: today, expiresAt: tomorrow, description: null, diff --git a/server/test/fixtures/user.stub.ts b/server/test/fixtures/user.stub.ts index 0db58e2eed..807da5197f 100644 --- a/server/test/fixtures/user.stub.ts +++ b/server/test/fixtures/user.stub.ts @@ -5,7 +5,7 @@ import { authStub } from 'test/fixtures/auth.stub'; export const userStub = { admin: { ...authStub.admin.user, - status: UserStatus.ACTIVE, + status: UserStatus.Active, profileChangedAt: new Date('2021-01-01'), name: 'admin_name', id: 'admin_id', @@ -23,7 +23,7 @@ export const userStub = { }, user1: { ...authStub.user1.user, - status: UserStatus.ACTIVE, + status: UserStatus.Active, profileChangedAt: new Date('2021-01-01'), name: 'immich_name', storageLabel: null, @@ -40,7 +40,7 @@ export const userStub = { }, user2: { ...authStub.user2.user, - status: UserStatus.ACTIVE, + status: UserStatus.Active, profileChangedAt: new Date('2021-01-01'), metadata: [], name: 'immich_name', diff --git a/server/test/medium.factory.ts b/server/test/medium.factory.ts index 9c1032663f..4d13264fa2 100644 --- a/server/test/medium.factory.ts +++ b/server/test/medium.factory.ts @@ -182,7 +182,7 @@ export class MediumTestContext { } async newAlbumUser(dto: { albumId: string; userId: string; role?: AlbumUserRole }) { - const { albumId, userId, role = AlbumUserRole.EDITOR } = dto; + const { albumId, userId, role = AlbumUserRole.Editor } = dto; const result = await this.get(AlbumUserRepository).create({ albumsId: albumId, usersId: userId, role }); return { albumUser: { albumId, userId, role }, result }; } @@ -370,14 +370,14 @@ const assetInsert = (asset: Partial> = {}) => { deviceId: '', originalFileName: '', checksum: randomBytes(32), - type: AssetType.IMAGE, + type: AssetType.Image, originalPath: '/path/to/something.jpg', ownerId: '@immich.cloud', isFavorite: false, fileCreatedAt: now, fileModifiedAt: now, localDateTime: now, - visibility: AssetVisibility.TIMELINE, + visibility: AssetVisibility.Timeline, }; return { @@ -423,7 +423,7 @@ const assetFaceInsert = (assetFace: Partial & { assetId: string }) => imageHeight: assetFace.imageHeight ?? 10, imageWidth: assetFace.imageWidth ?? 10, personId: assetFace.personId ?? null, - sourceType: assetFace.sourceType ?? SourceType.MACHINE_LEARNING, + sourceType: assetFace.sourceType ?? SourceType.MachineLearning, }; return { @@ -516,7 +516,7 @@ const memoryInsert = (memory: Partial> = {}) => { createdAt: date, updatedAt: date, deletedAt: null, - type: MemoryType.ON_THIS_DAY, + type: MemoryType.OnThisDay, data: { year: 2025 }, showAt: null, hideAt: null, diff --git a/server/test/medium/specs/services/auth.service.spec.ts b/server/test/medium/specs/services/auth.service.spec.ts index f1d50ce841..14ea1451f2 100644 --- a/server/test/medium/specs/services/auth.service.spec.ts +++ b/server/test/medium/specs/services/auth.service.spec.ts @@ -102,7 +102,7 @@ describe(AuthService.name, () => { it('should logout', async () => { const { sut } = setup(); const auth = factory.auth(); - await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({ + await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({ successful: true, redirectUri: '/auth/login?autoLaunch=0', }); @@ -118,7 +118,7 @@ describe(AuthService.name, () => { eventRepo.emit.mockResolvedValue(); await expect(sessionRepo.get(session.id)).resolves.toEqual(expect.objectContaining({ id: session.id })); - await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({ + await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({ successful: true, redirectUri: '/auth/login?autoLaunch=0', }); diff --git a/server/test/medium/specs/services/memory.service.spec.ts b/server/test/medium/specs/services/memory.service.spec.ts index bdd06b4a3f..12df2f130e 100644 --- a/server/test/medium/specs/services/memory.service.spec.ts +++ b/server/test/medium/specs/services/memory.service.spec.ts @@ -45,7 +45,7 @@ describe(MemoryService.name, () => { const { user } = await ctx.newUser(); const auth = factory.auth({ user }); const dto = { - type: MemoryType.ON_THIS_DAY, + type: MemoryType.OnThisDay, data: { year: 2021 }, memoryAt: new Date(2021), }; @@ -70,7 +70,7 @@ describe(MemoryService.name, () => { const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id }); const auth = factory.auth({ user }); const dto = { - type: MemoryType.ON_THIS_DAY, + type: MemoryType.OnThisDay, data: { year: 2021 }, memoryAt: new Date(2021), assetIds: [asset1.id, asset2.id], @@ -92,7 +92,7 @@ describe(MemoryService.name, () => { const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id }); const auth = factory.auth({ user: user1 }); const dto = { - type: MemoryType.ON_THIS_DAY, + type: MemoryType.OnThisDay, data: { year: 2021 }, memoryAt: new Date(2021), assetIds: [asset1.id, asset2.id], @@ -124,8 +124,8 @@ describe(MemoryService.name, () => { ctx.newExif({ assetId: asset.id, make: 'Canon' }), ctx.newJobStatus({ assetId: asset.id }), assetRepo.upsertFiles([ - { assetId: asset.id, type: AssetFileType.PREVIEW, path: '/path/to/preview.jpg' }, - { assetId: asset.id, type: AssetFileType.THUMBNAIL, path: '/path/to/thumbnail.jpg' }, + { assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' }, + { assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' }, ]), ]); @@ -178,8 +178,8 @@ describe(MemoryService.name, () => { ctx.newExif({ assetId: asset.id, make: 'Canon' }), ctx.newJobStatus({ assetId: asset.id }), assetRepo.upsertFiles([ - { assetId: asset.id, type: AssetFileType.PREVIEW, path: '/path/to/preview.jpg' }, - { assetId: asset.id, type: AssetFileType.THUMBNAIL, path: '/path/to/thumbnail.jpg' }, + { assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' }, + { assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' }, ]), ]); } diff --git a/server/test/medium/specs/services/timeline.service.spec.ts b/server/test/medium/specs/services/timeline.service.spec.ts index 6af936eb49..fa4a75e869 100644 --- a/server/test/medium/specs/services/timeline.service.spec.ts +++ b/server/test/medium/specs/services/timeline.service.spec.ts @@ -46,7 +46,7 @@ describe(TimelineService.name, () => { it('should return error if time bucket is requested with partners asset and archived', async () => { const { sut } = setup(); const auth = factory.auth(); - const response1 = sut.getTimeBuckets(auth, { withPartners: true, visibility: AssetVisibility.ARCHIVE }); + const response1 = sut.getTimeBuckets(auth, { withPartners: true, visibility: AssetVisibility.Archive }); await expect(response1).rejects.toBeInstanceOf(BadRequestException); await expect(response1).rejects.toThrow( 'withPartners is only supported for non-archived, non-trashed, non-favorited assets', diff --git a/server/test/medium/specs/services/user.service.spec.ts b/server/test/medium/specs/services/user.service.spec.ts index 535ac30aec..f5bddca196 100644 --- a/server/test/medium/specs/services/user.service.spec.ts +++ b/server/test/medium/specs/services/user.service.spec.ts @@ -16,7 +16,7 @@ import { getKyselyDB } from 'test/utils'; let defaultDatabase: Kysely; const setup = (db?: Kysely) => { - process.env.IMMICH_ENV = ImmichEnvironment.TESTING; + process.env.IMMICH_ENV = ImmichEnvironment.Testing; return newMediumService(UserService, { database: db || defaultDatabase, @@ -140,7 +140,7 @@ describe(UserService.name, () => { const { sut, ctx } = setup(); const jobMock = ctx.getMock(JobRepository); jobMock.queueAll.mockResolvedValue(void 0); - await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS); + await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success); expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]); }); @@ -149,10 +149,8 @@ describe(UserService.name, () => { const jobMock = ctx.getMock(JobRepository); const { user } = await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 60 }).toJSDate() }); jobMock.queueAll.mockResolvedValue(void 0); - await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS); - expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([ - { name: JobName.USER_DELETION, data: { id: user.id } }, - ]); + await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success); + expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([{ name: JobName.UserDeletion, data: { id: user.id } }]); }); it('should skip a recently deleted user', async () => { @@ -160,7 +158,7 @@ describe(UserService.name, () => { const jobMock = ctx.getMock(JobRepository); await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 5 }).toJSDate() }); jobMock.queueAll.mockResolvedValue(void 0); - await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS); + await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success); expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]); }); @@ -172,7 +170,7 @@ describe(UserService.name, () => { const config = await sut.getConfig({ withCache: false }); config.user.deleteDelay = 30; await sut.updateConfig(config); - await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS); + await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success); expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]); }); }); diff --git a/server/test/medium/specs/services/version.service.spec.ts b/server/test/medium/specs/services/version.service.spec.ts index 9feda5b8c4..2d0c57667b 100644 --- a/server/test/medium/specs/services/version.service.spec.ts +++ b/server/test/medium/specs/services/version.service.spec.ts @@ -53,7 +53,7 @@ describe(VersionService.name, () => { await versionHistoryRepo.create({ version: 'v1.128.0' }); await sut.onBootstrap(); - expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.MEMORIES_CREATE }); + expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.MemoriesCreate }); }); it('should not queue memory creation when upgrading from 1.129.0', async () => { diff --git a/server/test/medium/specs/sync/sync-album-asset-exif.spec.ts b/server/test/medium/specs/sync/sync-album-asset-exif.spec.ts index 2c6b98e949..808a4785ce 100644 --- a/server/test/medium/specs/sync/sync-album-asset-exif.spec.ts +++ b/server/test/medium/specs/sync/sync-album-asset-exif.spec.ts @@ -25,7 +25,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => { await ctx.newExif({ assetId: asset.id, make: 'Canon' }); const { album } = await ctx.newAlbum({ ownerId: user2.id }); await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]); expect(response).toHaveLength(1); @@ -86,7 +86,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => { await ctx.newExif({ assetId: asset.id, make: 'Canon' }); const { album } = await ctx.newAlbum({ ownerId: user2.id }); await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: user3.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: user3.id, role: AlbumUserRole.Editor }); const { session } = await ctx.newSession({ userId: user3.id }); const authUser3 = factory.auth({ session, user: user3 }); @@ -110,7 +110,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => { await ctx.newExif({ assetId: asset3User2.id, make: 'asset3User2' }); const { album: album1 } = await ctx.newAlbum({ ownerId: user2.id }); await ctx.newAlbumAsset({ albumId: album1.id, assetId: asset2User2.id }); - await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]); expect(response).toHaveLength(1); @@ -134,7 +134,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => { ctx.newAlbumAsset({ albumId: album2.id, assetId }), ), ); - await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor }); // should backfill the album user const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]); diff --git a/server/test/medium/specs/sync/sync-album-asset.spec.ts b/server/test/medium/specs/sync/sync-album-asset.spec.ts index 41700d29d4..9a42c0f027 100644 --- a/server/test/medium/specs/sync/sync-album-asset.spec.ts +++ b/server/test/medium/specs/sync/sync-album-asset.spec.ts @@ -41,7 +41,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => { }); const { album } = await ctx.newAlbum({ ownerId: user2.id }); await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]); expect(response).toHaveLength(1); @@ -90,7 +90,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => { const { asset } = await ctx.newAsset({ ownerId: user3.id }); const { album } = await ctx.newAlbum({ ownerId: user2.id }); await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: user3.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: user3.id, role: AlbumUserRole.Editor }); const { session } = await ctx.newSession({ userId: user3.id }); const authUser3 = factory.auth({ session, user: user3 }); @@ -111,7 +111,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => { await wait(2); const { album: album1 } = await ctx.newAlbum({ ownerId: user2.id }); await ctx.newAlbumAsset({ albumId: album1.id, assetId: asset2User2.id }); - await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]); expect(response).toHaveLength(1); @@ -135,7 +135,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => { ctx.newAlbumAsset({ albumId: album2.id, assetId }), ), ); - await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor }); // should backfill the album user const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]); diff --git a/server/test/medium/specs/sync/sync-album-to-asset.spec.ts b/server/test/medium/specs/sync/sync-album-to-asset.spec.ts index a0c1d413a0..ee529c5001 100644 --- a/server/test/medium/specs/sync/sync-album-to-asset.spec.ts +++ b/server/test/medium/specs/sync/sync-album-to-asset.spec.ts @@ -73,7 +73,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => { const { asset } = await ctx.newAsset({ ownerId: auth.user.id }); const { album } = await ctx.newAlbum({ ownerId: user2.id }); await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]); expect(response).toHaveLength(1); @@ -130,7 +130,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => { await ctx.syncAckAll(auth, response); // add user to backfill album - await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor }); // should backfill the album to asset relation const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]); diff --git a/server/test/medium/specs/sync/sync-album-user.spec.ts b/server/test/medium/specs/sync/sync-album-user.spec.ts index 798b3d607d..e3d8a21493 100644 --- a/server/test/medium/specs/sync/sync-album-user.spec.ts +++ b/server/test/medium/specs/sync/sync-album-user.spec.ts @@ -22,7 +22,7 @@ describe(SyncRequestType.AlbumUsersV1, () => { const { auth, ctx } = await setup(); const { album } = await ctx.newAlbum({ ownerId: auth.user.id }); const { user } = await ctx.newUser(); - const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.EDITOR }); + const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor }); await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([ { @@ -42,7 +42,7 @@ describe(SyncRequestType.AlbumUsersV1, () => { const { auth, ctx } = await setup(); const { user: user1 } = await ctx.newUser(); const { album } = await ctx.newAlbum({ ownerId: auth.user.id }); - const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.EDITOR }); + const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]); expect(response).toHaveLength(1); @@ -67,13 +67,13 @@ describe(SyncRequestType.AlbumUsersV1, () => { const albumUserRepo = ctx.get(AlbumUserRepository); const { user: user1 } = await ctx.newUser(); const { album } = await ctx.newAlbum({ ownerId: auth.user.id }); - const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.EDITOR }); + const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]); await ctx.syncAckAll(auth, response); await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]); - await albumUserRepo.update({ albumsId: album.id, usersId: user1.id }, { role: AlbumUserRole.VIEWER }); + await albumUserRepo.update({ albumsId: album.id, usersId: user1.id }, { role: AlbumUserRole.Viewer }); const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]); expect(newResponse).toHaveLength(1); expect(newResponse).toEqual([ @@ -81,7 +81,7 @@ describe(SyncRequestType.AlbumUsersV1, () => { ack: expect.any(String), data: expect.objectContaining({ albumId: albumUser.albumId, - role: AlbumUserRole.VIEWER, + role: AlbumUserRole.Viewer, userId: albumUser.userId, }), type: SyncEntityType.AlbumUserV1, @@ -97,7 +97,7 @@ describe(SyncRequestType.AlbumUsersV1, () => { const albumUserRepo = ctx.get(AlbumUserRepository); const { user: user1 } = await ctx.newUser(); const { album } = await ctx.newAlbum({ ownerId: auth.user.id }); - const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.EDITOR }); + const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]); expect(response).toHaveLength(1); @@ -130,7 +130,7 @@ describe(SyncRequestType.AlbumUsersV1, () => { const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]); @@ -157,8 +157,8 @@ describe(SyncRequestType.AlbumUsersV1, () => { const { user: owner } = await ctx.newUser(); const { user: user } = await ctx.newUser(); const { album } = await ctx.newAlbum({ ownerId: owner.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); - await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor }); + await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]); expect(response).toHaveLength(2); @@ -166,14 +166,14 @@ describe(SyncRequestType.AlbumUsersV1, () => { await ctx.syncAckAll(auth, response); await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]); - await albumUserRepo.update({ albumsId: album.id, usersId: user.id }, { role: AlbumUserRole.VIEWER }); + await albumUserRepo.update({ albumsId: album.id, usersId: user.id }, { role: AlbumUserRole.Viewer }); const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]); expect(newResponse).toEqual([ { ack: expect.any(String), data: expect.objectContaining({ albumId: album.id, - role: AlbumUserRole.VIEWER, + role: AlbumUserRole.Viewer, userId: user.id, }), type: SyncEntityType.AlbumUserV1, @@ -190,8 +190,8 @@ describe(SyncRequestType.AlbumUsersV1, () => { const { user: owner } = await ctx.newUser(); const { user: user } = await ctx.newUser(); const { album } = await ctx.newAlbum({ ownerId: owner.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); - await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor }); + await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]); expect(response).toHaveLength(2); @@ -223,13 +223,13 @@ describe(SyncRequestType.AlbumUsersV1, () => { const { album: album1 } = await ctx.newAlbum({ ownerId: user1.id }); const { album: album2 } = await ctx.newAlbum({ ownerId: user1.id }); // backfill album user - await ctx.newAlbumUser({ albumId: album1.id, userId: user1.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album1.id, userId: user1.id, role: AlbumUserRole.Editor }); await wait(2); // initial album user - await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor }); await wait(2); // post checkpoint album user - await ctx.newAlbumUser({ albumId: album1.id, userId: user2.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album1.id, userId: user2.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]); expect(response).toHaveLength(1); @@ -238,7 +238,7 @@ describe(SyncRequestType.AlbumUsersV1, () => { ack: expect.any(String), data: expect.objectContaining({ albumId: album2.id, - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, userId: auth.user.id, }), type: SyncEntityType.AlbumUserV1, @@ -248,7 +248,7 @@ describe(SyncRequestType.AlbumUsersV1, () => { // ack initial user await ctx.syncAckAll(auth, response); // get access to the backfill album user - await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor }); // should backfill the album user const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]); @@ -257,7 +257,7 @@ describe(SyncRequestType.AlbumUsersV1, () => { ack: expect.any(String), data: expect.objectContaining({ albumId: album1.id, - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, userId: user1.id, }), type: SyncEntityType.AlbumUserBackfillV1, @@ -271,7 +271,7 @@ describe(SyncRequestType.AlbumUsersV1, () => { ack: expect.any(String), data: expect.objectContaining({ albumId: album1.id, - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, userId: user2.id, }), type: SyncEntityType.AlbumUserV1, @@ -280,7 +280,7 @@ describe(SyncRequestType.AlbumUsersV1, () => { ack: expect.any(String), data: expect.objectContaining({ albumId: album1.id, - role: AlbumUserRole.EDITOR, + role: AlbumUserRole.Editor, userId: auth.user.id, }), type: SyncEntityType.AlbumUserV1, diff --git a/server/test/medium/specs/sync/sync-album.spec.ts b/server/test/medium/specs/sync/sync-album.spec.ts index 83e9f12651..9f44e617e3 100644 --- a/server/test/medium/specs/sync/sync-album.spec.ts +++ b/server/test/medium/specs/sync/sync-album.spec.ts @@ -101,7 +101,7 @@ describe(SyncRequestType.AlbumsV1, () => { const { auth, ctx } = await setup(); const { user: user2 } = await ctx.newUser(); const { album } = await ctx.newAlbum({ ownerId: user2.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]); expect(response).toHaveLength(1); @@ -121,7 +121,7 @@ describe(SyncRequestType.AlbumsV1, () => { const { auth, ctx } = await setup(); const { user: user2 } = await ctx.newUser(); const { album } = await ctx.newAlbum({ ownerId: user2.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]); expect(response).toHaveLength(1); @@ -153,7 +153,7 @@ describe(SyncRequestType.AlbumsV1, () => { ]); await ctx.syncAckAll(auth, response); - await ctx.newAlbumUser({ userId: auth.user.id, albumId: user2Album.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ userId: auth.user.id, albumId: user2Album.id, role: AlbumUserRole.Editor }); const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]); expect(newResponse).toHaveLength(1); @@ -174,7 +174,7 @@ describe(SyncRequestType.AlbumsV1, () => { const albumRepo = ctx.get(AlbumRepository); const { user: user2 } = await ctx.newUser(); const { album } = await ctx.newAlbum({ ownerId: user2.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]); expect(response).toHaveLength(1); @@ -202,7 +202,7 @@ describe(SyncRequestType.AlbumsV1, () => { const albumUserRepo = ctx.get(AlbumUserRepository); const { user: user2 } = await ctx.newUser(); const { album } = await ctx.newAlbum({ ownerId: user2.id }); - await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.EDITOR }); + await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor }); const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]); expect(response).toHaveLength(1); diff --git a/server/test/medium/specs/sync/sync-user-metadata.spec.ts b/server/test/medium/specs/sync/sync-user-metadata.spec.ts index bb4a500a60..7cd53e76e3 100644 --- a/server/test/medium/specs/sync/sync-user-metadata.spec.ts +++ b/server/test/medium/specs/sync/sync-user-metadata.spec.ts @@ -22,7 +22,7 @@ describe(SyncEntityType.UserMetadataV1, () => { const { auth, user, ctx } = await setup(); const userRepo = ctx.get(UserRepository); - await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.ONBOARDING, value: { isOnboarded: true } }); + await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } }); const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]); expect(response).toHaveLength(1); @@ -30,7 +30,7 @@ describe(SyncEntityType.UserMetadataV1, () => { { ack: expect.any(String), data: { - key: UserMetadataKey.ONBOARDING, + key: UserMetadataKey.Onboarding, userId: user.id, value: { isOnboarded: true }, }, @@ -46,7 +46,7 @@ describe(SyncEntityType.UserMetadataV1, () => { const { auth, user, ctx } = await setup(); const userRepo = ctx.get(UserRepository); - await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.ONBOARDING, value: { isOnboarded: true } }); + await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } }); const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]); expect(response).toHaveLength(1); @@ -54,7 +54,7 @@ describe(SyncEntityType.UserMetadataV1, () => { { ack: expect.any(String), data: { - key: UserMetadataKey.ONBOARDING, + key: UserMetadataKey.Onboarding, userId: user.id, value: { isOnboarded: true }, }, @@ -64,14 +64,14 @@ describe(SyncEntityType.UserMetadataV1, () => { await ctx.syncAckAll(auth, response); - await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.ONBOARDING, value: { isOnboarded: false } }); + await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: false } }); const updatedResponse = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]); expect(updatedResponse).toEqual([ { ack: expect.any(String), data: { - key: UserMetadataKey.ONBOARDING, + key: UserMetadataKey.Onboarding, userId: user.id, value: { isOnboarded: false }, }, @@ -89,7 +89,7 @@ describe(SyncEntityType.UserMetadataDeleteV1, () => { const { auth, user, ctx } = await setup(); const userRepo = ctx.get(UserRepository); - await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.ONBOARDING, value: { isOnboarded: true } }); + await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } }); const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]); expect(response).toHaveLength(1); @@ -97,7 +97,7 @@ describe(SyncEntityType.UserMetadataDeleteV1, () => { { ack: expect.any(String), data: { - key: UserMetadataKey.ONBOARDING, + key: UserMetadataKey.Onboarding, userId: user.id, value: { isOnboarded: true }, }, @@ -107,14 +107,14 @@ describe(SyncEntityType.UserMetadataDeleteV1, () => { await ctx.syncAckAll(auth, response); - await userRepo.deleteMetadata(auth.user.id, UserMetadataKey.ONBOARDING); + await userRepo.deleteMetadata(auth.user.id, UserMetadataKey.Onboarding); await expect(ctx.syncStream(auth, [SyncRequestType.UserMetadataV1])).resolves.toEqual([ { ack: expect.any(String), data: { userId: user.id, - key: UserMetadataKey.ONBOARDING, + key: UserMetadataKey.Onboarding, }, type: 'UserMetadataDeleteV1', }, diff --git a/server/test/repositories/config.repository.mock.ts b/server/test/repositories/config.repository.mock.ts index 460dfeef57..e31e1a3348 100644 --- a/server/test/repositories/config.repository.mock.ts +++ b/server/test/repositories/config.repository.mock.ts @@ -5,7 +5,7 @@ import { Mocked, vitest } from 'vitest'; const envData: EnvData = { port: 2283, - environment: ImmichEnvironment.PRODUCTION, + environment: ImmichEnvironment.Production, buildMetadata: {}, bull: { @@ -31,7 +31,7 @@ const envData: EnvData = { }, skipMigrations: false, - vectorExtension: DatabaseExtension.VECTORS, + vectorExtension: DatabaseExtension.Vectors, }, licensePublicKey: { @@ -84,7 +84,7 @@ const envData: EnvData = { metrics: new Set(), }, - workers: [ImmichWorker.API, ImmichWorker.MICROSERVICES], + workers: [ImmichWorker.Api, ImmichWorker.Microservices], noColor: false, }; @@ -93,7 +93,7 @@ export const mockEnvData = (config: Partial) => ({ ...envData, ...confi export const newConfigRepositoryMock = (): Mocked> => { return { getEnv: vitest.fn().mockReturnValue(mockEnvData({})), - getWorker: vitest.fn().mockReturnValue(ImmichWorker.API), + getWorker: vitest.fn().mockReturnValue(ImmichWorker.Api), isDev: vitest.fn().mockReturnValue(false), }; }; diff --git a/server/test/small.factory.ts b/server/test/small.factory.ts index 7b0ebeb86b..6929924c3e 100644 --- a/server/test/small.factory.ts +++ b/server/test/small.factory.ts @@ -88,7 +88,7 @@ const authSharedLinkFactory = (sharedLink: Partial = {}) => { const authApiKeyFactory = (apiKey: Partial = {}) => ({ id: newUuid(), - permissions: [Permission.ALL], + permissions: [Permission.All], ...apiKey, }); @@ -154,7 +154,7 @@ const userFactory = (user: Partial = {}) => ({ profileChangedAt: newDate(), metadata: [ { - key: UserMetadataKey.ONBOARDING, + key: UserMetadataKey.Onboarding, value: 'true', }, ] as UserMetadataItem[], @@ -178,7 +178,7 @@ const userAdminFactory = (user: Partial = {}) => { oauthId = '', quotaSizeInBytes = null, quotaUsageInBytes = 0, - status = UserStatus.ACTIVE, + status = UserStatus.Active, metadata = [], } = user; return { @@ -208,7 +208,7 @@ const assetFactory = (asset: Partial = {}) => ({ updatedAt: newDate(), deletedAt: null, updateId: newUuidV7(), - status: AssetStatus.ACTIVE, + status: AssetStatus.Active, checksum: newSha1(), deviceAssetId: '', deviceId: '', @@ -229,8 +229,8 @@ const assetFactory = (asset: Partial = {}) => ({ sidecarPath: null, stackId: null, thumbhash: null, - type: AssetType.IMAGE, - visibility: AssetVisibility.TIMELINE, + type: AssetType.Image, + visibility: AssetVisibility.Timeline, ...asset, }); @@ -258,7 +258,7 @@ const apiKeyFactory = (apiKey: Partial = {}) => ({ updatedAt: newDate(), updateId: newUuidV7(), name: 'Api Key', - permissions: [Permission.ALL], + permissions: [Permission.All], ...apiKey, }); @@ -284,7 +284,7 @@ const memoryFactory = (memory: Partial = {}) => ({ updateId: newUuidV7(), deletedAt: null, ownerId: newUuid(), - type: MemoryType.ON_THIS_DAY, + type: MemoryType.OnThisDay, data: { year: 2024 } as OnThisDayData, isSaved: false, memoryAt: newDate(),