diff --git a/server/src/app.module.ts b/server/src/app.module.ts index b02d869a1e..3cc0446306 100644 --- a/server/src/app.module.ts +++ b/server/src/app.module.ts @@ -1,8 +1,7 @@ import { BullModule } from '@nestjs/bullmq'; import { Inject, Module, OnModuleDestroy, OnModuleInit, ValidationPipe } from '@nestjs/common'; -import { APP_FILTER, APP_GUARD, APP_INTERCEPTOR, APP_PIPE, ModuleRef } from '@nestjs/core'; +import { APP_FILTER, APP_GUARD, APP_INTERCEPTOR, APP_PIPE } from '@nestjs/core'; import { ScheduleModule, SchedulerRegistry } from '@nestjs/schedule'; -import { TypeOrmModule } from '@nestjs/typeorm'; import { PostgresJSDialect } from 'kysely-postgres-js'; import { ClsModule } from 'nestjs-cls'; import { KyselyModule } from 'nestjs-kysely'; @@ -11,7 +10,6 @@ import postgres from 'postgres'; import { commands } from 'src/commands'; import { IWorker } from 'src/constants'; import { controllers } from 'src/controllers'; -import { entities } from 'src/entities'; import { ImmichWorker } from 'src/enum'; import { AuthGuard } from 'src/middleware/auth.guard'; import { ErrorInterceptor } from 'src/middleware/error.interceptor'; @@ -27,7 +25,6 @@ import { teardownTelemetry, TelemetryRepository } from 'src/repositories/telemet import { services } from 'src/services'; import { AuthService } from 'src/services/auth.service'; import { CliService } from 'src/services/cli.service'; -import { DatabaseService } from 'src/services/database.service'; const common = [...repositories, ...services, GlobalExceptionFilter]; @@ -48,18 +45,6 @@ const imports = [ BullModule.registerQueue(...bull.queues), ClsModule.forRoot(cls.config), OpenTelemetryModule.forRoot(otel), - TypeOrmModule.forRootAsync({ - inject: [ModuleRef], - useFactory: (moduleRef: ModuleRef) => { - return { - ...database.config.typeorm, - poolErrorHandler: (error) => { - moduleRef.get(DatabaseService, { strict: false }).handleConnectionError(error); - }, - }; - }, - }), - TypeOrmModule.forFeature(entities), KyselyModule.forRoot({ dialect: new PostgresJSDialect({ postgres: postgres(database.config.kysely) }), log(event) { diff --git a/server/src/bin/sync-sql.ts b/server/src/bin/sync-sql.ts index a9f5d72ec9..61c19c02fb 100644 --- a/server/src/bin/sync-sql.ts +++ b/server/src/bin/sync-sql.ts @@ -3,7 +3,6 @@ import { INestApplication } from '@nestjs/common'; import { Reflector } from '@nestjs/core'; import { SchedulerRegistry } from '@nestjs/schedule'; import { Test } from '@nestjs/testing'; -import { TypeOrmModule } from '@nestjs/typeorm'; import { ClassConstructor } from 'class-transformer'; import { PostgresJSDialect } from 'kysely-postgres-js'; import { ClsModule } from 'nestjs-cls'; @@ -14,15 +13,13 @@ import { join } from 'node:path'; import postgres from 'postgres'; import { format } from 'sql-formatter'; import { GENERATE_SQL_KEY, GenerateSqlQueries } from 'src/decorators'; -import { entities } from 'src/entities'; import { repositories } from 'src/repositories'; import { AccessRepository } from 'src/repositories/access.repository'; import { ConfigRepository } from 'src/repositories/config.repository'; import { LoggingRepository } from 'src/repositories/logging.repository'; import { AuthService } from 'src/services/auth.service'; -import { Logger } from 'typeorm'; -export class SqlLogger implements Logger { +export class SqlLogger { queries: string[] = []; errors: Array<{ error: string | Error; query: string }> = []; @@ -38,11 +35,6 @@ export class SqlLogger implements Logger { logQueryError(error: string | Error, query: string) { this.errors.push({ error, query }); } - - logQuerySlow() {} - logSchemaBuild() {} - logMigration() {} - log() {} } const reflector = new Reflector(); @@ -94,13 +86,6 @@ class SqlGenerator { }, }), ClsModule.forRoot(cls.config), - TypeOrmModule.forRoot({ - ...database.config.typeorm, - entities, - logging: ['query'], - logger: this.sqlLogger, - }), - TypeOrmModule.forFeature(entities), OpenTelemetryModule.forRoot(otel), ], providers: [...repositories, AuthService, SchedulerRegistry], diff --git a/server/src/constants.ts b/server/src/constants.ts index 20ce7dd497..3e946578ab 100644 --- a/server/src/constants.ts +++ b/server/src/constants.ts @@ -7,10 +7,6 @@ export const POSTGRES_VERSION_RANGE = '>=14.0.0'; export const VECTORS_VERSION_RANGE = '>=0.2 <0.4'; export const VECTOR_VERSION_RANGE = '>=0.5 <1'; -export const ASSET_FILE_CONFLICT_KEYS = ['assetId', 'type'] as const; -export const EXIF_CONFLICT_KEYS = ['assetId'] as const; -export const JOB_STATUS_CONFLICT_KEYS = ['assetId'] as const; - export const NEXT_RELEASE = 'NEXT_RELEASE'; export const LIFECYCLE_EXTENSION = 'x-immich-lifecycle'; export const DEPRECATED_IN_PREFIX = 'This property was deprecated in '; diff --git a/server/src/db.d.ts b/server/src/db.d.ts index 4c75562ba1..4617ddf707 100644 --- a/server/src/db.d.ts +++ b/server/src/db.d.ts @@ -4,7 +4,7 @@ */ import type { ColumnType } from 'kysely'; -import { Permission, SyncEntityType } from 'src/enum'; +import { AssetType, Permission, SyncEntityType } from 'src/enum'; export type ArrayType = ArrayTypeImpl extends (infer U)[] ? U[] : ArrayTypeImpl; @@ -145,7 +145,7 @@ export interface Assets { stackId: string | null; status: Generated; thumbhash: Buffer | null; - type: string; + type: AssetType; updatedAt: Generated; updateId: Generated; } diff --git a/server/src/entities/index.ts b/server/src/entities/index.ts deleted file mode 100644 index a1df269c09..0000000000 --- a/server/src/entities/index.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { ActivityEntity } from 'src/entities/activity.entity'; -import { AlbumUserEntity } from 'src/entities/album-user.entity'; -import { AlbumEntity } from 'src/entities/album.entity'; -import { APIKeyEntity } from 'src/entities/api-key.entity'; -import { AssetFaceEntity } from 'src/entities/asset-face.entity'; -import { AssetFileEntity } from 'src/entities/asset-files.entity'; -import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity'; -import { AssetEntity } from 'src/entities/asset.entity'; -import { AuditEntity } from 'src/entities/audit.entity'; -import { ExifEntity } from 'src/entities/exif.entity'; -import { FaceSearchEntity } from 'src/entities/face-search.entity'; -import { GeodataPlacesEntity } from 'src/entities/geodata-places.entity'; -import { LibraryEntity } from 'src/entities/library.entity'; -import { MemoryEntity } from 'src/entities/memory.entity'; -import { MoveEntity } from 'src/entities/move.entity'; -import { NaturalEarthCountriesEntity } from 'src/entities/natural-earth-countries.entity'; -import { PartnerEntity } from 'src/entities/partner.entity'; -import { PersonEntity } from 'src/entities/person.entity'; -import { SessionEntity } from 'src/entities/session.entity'; -import { SharedLinkEntity } from 'src/entities/shared-link.entity'; -import { SmartSearchEntity } from 'src/entities/smart-search.entity'; -import { StackEntity } from 'src/entities/stack.entity'; -import { SessionSyncCheckpointEntity } from 'src/entities/sync-checkpoint.entity'; -import { SystemMetadataEntity } from 'src/entities/system-metadata.entity'; -import { TagEntity } from 'src/entities/tag.entity'; -import { UserAuditEntity } from 'src/entities/user-audit.entity'; -import { UserMetadataEntity } from 'src/entities/user-metadata.entity'; -import { UserEntity } from 'src/entities/user.entity'; -import { VersionHistoryEntity } from 'src/entities/version-history.entity'; - -export const entities = [ - ActivityEntity, - AlbumEntity, - AlbumUserEntity, - APIKeyEntity, - AssetEntity, - AssetFaceEntity, - AssetFileEntity, - AssetJobStatusEntity, - AuditEntity, - ExifEntity, - FaceSearchEntity, - GeodataPlacesEntity, - NaturalEarthCountriesEntity, - MemoryEntity, - MoveEntity, - PartnerEntity, - PersonEntity, - SessionSyncCheckpointEntity, - SharedLinkEntity, - SmartSearchEntity, - StackEntity, - SystemMetadataEntity, - TagEntity, - UserEntity, - UserAuditEntity, - UserMetadataEntity, - SessionEntity, - LibraryEntity, - VersionHistoryEntity, -]; diff --git a/server/src/migrations/1645130759468-CreateUserTable.ts b/server/src/migrations/1645130759468-CreateUserTable.ts index 6e3d427dd2..1aedfb67d4 100644 --- a/server/src/migrations/1645130759468-CreateUserTable.ts +++ b/server/src/migrations/1645130759468-CreateUserTable.ts @@ -2,6 +2,7 @@ import { MigrationInterface, QueryRunner } from 'typeorm'; export class CreateUserTable1645130759468 implements MigrationInterface { public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE EXTENSION IF NOT EXISTS "uuid-ossp"`); await queryRunner.query(` create table if not exists users ( diff --git a/server/src/queries/database.repository.sql b/server/src/queries/database.repository.sql new file mode 100644 index 0000000000..8c87a7470f --- /dev/null +++ b/server/src/queries/database.repository.sql @@ -0,0 +1,21 @@ +-- NOTE: This file is auto generated by ./sql-generator + +-- DatabaseRepository.getExtensionVersion +SELECT + default_version as "availableVersion", + installed_version as "installedVersion" +FROM + pg_available_extensions +WHERE + name = $1 + +-- DatabaseRepository.getPostgresVersion +SHOW server_version + +-- DatabaseRepository.shouldReindex +SELECT + idx_status +FROM + pg_vector_index_stat +WHERE + indexname = $1 diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index 93931edb32..3c05e12b6f 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -2,7 +2,6 @@ import { Injectable } from '@nestjs/common'; import { Insertable, Kysely, UpdateResult, Updateable, sql } from 'kysely'; import { isEmpty, isUndefined, omitBy } from 'lodash'; import { InjectKysely } from 'nestjs-kysely'; -import { ASSET_FILE_CONFLICT_KEYS, EXIF_CONFLICT_KEYS, JOB_STATUS_CONFLICT_KEYS } from 'src/constants'; import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db'; import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators'; import { @@ -24,7 +23,7 @@ import { } from 'src/entities/asset.entity'; import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum'; import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository'; -import { anyUuid, asUuid, mapUpsertColumns, unnest } from 'src/utils/database'; +import { anyUuid, asUuid, removeUndefinedKeys, unnest } from 'src/utils/database'; import { globToSqlPattern } from 'src/utils/misc'; import { Paginated, PaginationOptions, paginationHelper } from 'src/utils/pagination'; @@ -162,7 +161,41 @@ export class AssetRepository { .insertInto('exif') .values(value) .onConflict((oc) => - oc.columns(EXIF_CONFLICT_KEYS).doUpdateSet(() => mapUpsertColumns('exif', value, EXIF_CONFLICT_KEYS)), + oc.column('assetId').doUpdateSet((eb) => + removeUndefinedKeys( + { + description: eb.ref('excluded.description'), + exifImageWidth: eb.ref('excluded.exifImageWidth'), + exifImageHeight: eb.ref('excluded.exifImageHeight'), + fileSizeInByte: eb.ref('excluded.fileSizeInByte'), + orientation: eb.ref('excluded.orientation'), + dateTimeOriginal: eb.ref('excluded.dateTimeOriginal'), + modifyDate: eb.ref('excluded.modifyDate'), + timeZone: eb.ref('excluded.timeZone'), + latitude: eb.ref('excluded.latitude'), + longitude: eb.ref('excluded.longitude'), + projectionType: eb.ref('excluded.projectionType'), + city: eb.ref('excluded.city'), + livePhotoCID: eb.ref('excluded.livePhotoCID'), + autoStackId: eb.ref('excluded.autoStackId'), + state: eb.ref('excluded.state'), + country: eb.ref('excluded.country'), + make: eb.ref('excluded.make'), + model: eb.ref('excluded.model'), + lensModel: eb.ref('excluded.lensModel'), + fNumber: eb.ref('excluded.fNumber'), + focalLength: eb.ref('excluded.focalLength'), + iso: eb.ref('excluded.iso'), + exposureTime: eb.ref('excluded.exposureTime'), + profileDescription: eb.ref('excluded.profileDescription'), + colorspace: eb.ref('excluded.colorspace'), + bitsPerSample: eb.ref('excluded.bitsPerSample'), + rating: eb.ref('excluded.rating'), + fps: eb.ref('excluded.fps'), + }, + value, + ), + ), ) .execute(); } @@ -177,9 +210,18 @@ export class AssetRepository { .insertInto('asset_job_status') .values(values) .onConflict((oc) => - oc - .columns(JOB_STATUS_CONFLICT_KEYS) - .doUpdateSet(() => mapUpsertColumns('asset_job_status', values[0], JOB_STATUS_CONFLICT_KEYS)), + oc.column('assetId').doUpdateSet((eb) => + removeUndefinedKeys( + { + duplicatesDetectedAt: eb.ref('excluded.duplicatesDetectedAt'), + facesRecognizedAt: eb.ref('excluded.facesRecognizedAt'), + metadataExtractedAt: eb.ref('excluded.metadataExtractedAt'), + previewAt: eb.ref('excluded.previewAt'), + thumbnailAt: eb.ref('excluded.thumbnailAt'), + }, + values[0], + ), + ), ) .execute(); } @@ -936,9 +978,9 @@ export class AssetRepository { .insertInto('asset_files') .values(value) .onConflict((oc) => - oc - .columns(ASSET_FILE_CONFLICT_KEYS) - .doUpdateSet(() => mapUpsertColumns('asset_files', value, ASSET_FILE_CONFLICT_KEYS)), + oc.columns(['assetId', 'type']).doUpdateSet((eb) => ({ + path: eb.ref('excluded.path'), + })), ) .execute(); } @@ -953,9 +995,9 @@ export class AssetRepository { .insertInto('asset_files') .values(values) .onConflict((oc) => - oc - .columns(ASSET_FILE_CONFLICT_KEYS) - .doUpdateSet(() => mapUpsertColumns('asset_files', values[0], ASSET_FILE_CONFLICT_KEYS)), + oc.columns(['assetId', 'type']).doUpdateSet((eb) => ({ + path: eb.ref('excluded.path'), + })), ) .execute(); } diff --git a/server/src/repositories/database.repository.ts b/server/src/repositories/database.repository.ts index ef97147c61..557dba3605 100644 --- a/server/src/repositories/database.repository.ts +++ b/server/src/repositories/database.repository.ts @@ -1,18 +1,17 @@ import { Injectable } from '@nestjs/common'; -import { InjectDataSource } from '@nestjs/typeorm'; import AsyncLock from 'async-lock'; -import { Kysely, sql } from 'kysely'; +import { Kysely, sql, Transaction } from 'kysely'; import { InjectKysely } from 'nestjs-kysely'; import semver from 'semver'; import { EXTENSION_NAMES, POSTGRES_VERSION_RANGE, VECTOR_VERSION_RANGE, VECTORS_VERSION_RANGE } from 'src/constants'; import { DB } from 'src/db'; +import { GenerateSql } from 'src/decorators'; import { DatabaseExtension, DatabaseLock, VectorIndex } from 'src/enum'; import { ConfigRepository } from 'src/repositories/config.repository'; import { LoggingRepository } from 'src/repositories/logging.repository'; import { ExtensionVersion, VectorExtension, VectorUpdateResult } from 'src/types'; -import { UPSERT_COLUMNS } from 'src/utils/database'; import { isValidInteger } from 'src/validation'; -import { DataSource, EntityManager, EntityMetadata, QueryRunner } from 'typeorm'; +import { DataSource } from 'typeorm'; @Injectable() export class DatabaseRepository { @@ -21,9 +20,8 @@ export class DatabaseRepository { constructor( @InjectKysely() private db: Kysely, - @InjectDataSource() private dataSource: DataSource, private logger: LoggingRepository, - configRepository: ConfigRepository, + private configRepository: ConfigRepository, ) { this.vectorExtension = configRepository.getEnv().database.vectorExtension; this.logger.setContext(DatabaseRepository.name); @@ -33,43 +31,24 @@ export class DatabaseRepository { await this.db.destroy(); } - init() { - for (const metadata of this.dataSource.entityMetadatas) { - const table = metadata.tableName as keyof DB; - UPSERT_COLUMNS[table] = this.getUpsertColumns(metadata); - } - } - - async reconnect() { - try { - if (this.dataSource.isInitialized) { - await this.dataSource.destroy(); - } - const { isInitialized } = await this.dataSource.initialize(); - return isInitialized; - } catch (error) { - this.logger.error(`Database connection failed: ${error}`); - return false; - } - } - + @GenerateSql({ params: [DatabaseExtension.VECTORS] }) async getExtensionVersion(extension: DatabaseExtension): Promise { - const [res]: ExtensionVersion[] = await this.dataSource.query( - `SELECT default_version as "availableVersion", installed_version as "installedVersion" + const { rows } = await sql` + SELECT default_version as "availableVersion", installed_version as "installedVersion" FROM pg_available_extensions - WHERE name = $1`, - [extension], - ); - return res ?? { availableVersion: null, installedVersion: null }; + WHERE name = ${extension} + `.execute(this.db); + return rows[0] ?? { availableVersion: null, installedVersion: null }; } getExtensionVersionRange(extension: VectorExtension): string { return extension === DatabaseExtension.VECTORS ? VECTORS_VERSION_RANGE : VECTOR_VERSION_RANGE; } + @GenerateSql() async getPostgresVersion(): Promise { - const [{ server_version: version }] = await this.dataSource.query(`SHOW server_version`); - return version; + const { rows } = await sql<{ server_version: string }>`SHOW server_version`.execute(this.db); + return rows[0].server_version; } getPostgresVersionRange(): string { @@ -77,7 +56,7 @@ export class DatabaseRepository { } async createExtension(extension: DatabaseExtension): Promise { - await this.dataSource.query(`CREATE EXTENSION IF NOT EXISTS ${extension}`); + await sql`CREATE EXTENSION IF NOT EXISTS ${sql.raw(extension)}`.execute(this.db); } async updateVectorExtension(extension: VectorExtension, targetVersion?: string): Promise { @@ -93,23 +72,23 @@ export class DatabaseRepository { const isVectors = extension === DatabaseExtension.VECTORS; let restartRequired = false; - await this.dataSource.manager.transaction(async (manager) => { - await this.setSearchPath(manager); + await this.db.transaction().execute(async (tx) => { + await this.setSearchPath(tx); if (isVectors && installedVersion === '0.1.1') { - await this.setExtVersion(manager, DatabaseExtension.VECTORS, '0.1.11'); + await this.setExtVersion(tx, DatabaseExtension.VECTORS, '0.1.11'); } const isSchemaUpgrade = semver.satisfies(installedVersion, '0.1.1 || 0.1.11'); if (isSchemaUpgrade && isVectors) { - await this.updateVectorsSchema(manager); + await this.updateVectorsSchema(tx); } - await manager.query(`ALTER EXTENSION ${extension} UPDATE TO '${targetVersion}'`); + await sql`ALTER EXTENSION ${sql.raw(extension)} UPDATE TO ${sql.lit(targetVersion)}`.execute(tx); const diff = semver.diff(installedVersion, targetVersion); if (isVectors && diff && ['minor', 'major'].includes(diff)) { - await manager.query('SELECT pgvectors_upgrade()'); + await sql`SELECT pgvectors_upgrade()`.execute(tx); restartRequired = true; } else { await this.reindex(VectorIndex.CLIP); @@ -122,7 +101,7 @@ export class DatabaseRepository { async reindex(index: VectorIndex): Promise { try { - await this.dataSource.query(`REINDEX INDEX ${index}`); + await sql`REINDEX INDEX ${sql.raw(index)}`.execute(this.db); } catch (error) { if (this.vectorExtension !== DatabaseExtension.VECTORS) { throw error; @@ -131,29 +110,34 @@ export class DatabaseRepository { const table = await this.getIndexTable(index); const dimSize = await this.getDimSize(table); - await this.dataSource.manager.transaction(async (manager) => { - await this.setSearchPath(manager); - await manager.query(`DROP INDEX IF EXISTS ${index}`); - await manager.query(`ALTER TABLE ${table} ALTER COLUMN embedding SET DATA TYPE real[]`); - await manager.query(`ALTER TABLE ${table} ALTER COLUMN embedding SET DATA TYPE vector(${dimSize})`); - await manager.query(`SET vectors.pgvector_compatibility=on`); - await manager.query(` - CREATE INDEX IF NOT EXISTS ${index} ON ${table} + await this.db.transaction().execute(async (tx) => { + await this.setSearchPath(tx); + await sql`DROP INDEX IF EXISTS ${sql.raw(index)}`.execute(tx); + await sql`ALTER TABLE ${sql.raw(table)} ALTER COLUMN embedding SET DATA TYPE real[]`.execute(tx); + await sql`ALTER TABLE ${sql.raw(table)} ALTER COLUMN embedding SET DATA TYPE vector(${sql.raw(String(dimSize))})`.execute( + tx, + ); + await sql`SET vectors.pgvector_compatibility=on`.execute(tx); + await sql` + CREATE INDEX IF NOT EXISTS ${sql.raw(index)} ON ${sql.raw(table)} USING hnsw (embedding vector_cosine_ops) - WITH (ef_construction = 300, m = 16)`); + WITH (ef_construction = 300, m = 16) + `.execute(tx); }); } } + @GenerateSql({ params: [VectorIndex.CLIP] }) async shouldReindex(name: VectorIndex): Promise { if (this.vectorExtension !== DatabaseExtension.VECTORS) { return false; } try { - const query = `SELECT idx_status FROM pg_vector_index_stat WHERE indexname = $1`; - const res = await this.dataSource.query(query, [name]); - return res[0]?.['idx_status'] === 'UPGRADE'; + const { rows } = await sql<{ + idx_status: string; + }>`SELECT idx_status FROM pg_vector_index_stat WHERE indexname = ${name}`.execute(this.db); + return rows[0]?.idx_status === 'UPGRADE'; } catch (error) { const message: string = (error as any).message; if (message.includes('index is not existing')) { @@ -165,44 +149,45 @@ export class DatabaseRepository { } } - private async setSearchPath(manager: EntityManager): Promise { - await manager.query(`SET search_path TO "$user", public, vectors`); + private async setSearchPath(tx: Transaction): Promise { + await sql`SET search_path TO "$user", public, vectors`.execute(tx); } - private async setExtVersion(manager: EntityManager, extName: DatabaseExtension, version: string): Promise { - const query = `UPDATE pg_catalog.pg_extension SET extversion = $1 WHERE extname = $2`; - await manager.query(query, [version, extName]); + private async setExtVersion(tx: Transaction, extName: DatabaseExtension, version: string): Promise { + await sql`UPDATE pg_catalog.pg_extension SET extversion = ${version} WHERE extname = ${extName}`.execute(tx); } private async getIndexTable(index: VectorIndex): Promise { - const tableQuery = `SELECT relname FROM pg_stat_all_indexes WHERE indexrelname = $1`; - const [res]: { relname: string | null }[] = await this.dataSource.manager.query(tableQuery, [index]); - const table = res?.relname; + const { rows } = await sql<{ + relname: string | null; + }>`SELECT relname FROM pg_stat_all_indexes WHERE indexrelname = ${index}`.execute(this.db); + const table = rows[0]?.relname; if (!table) { throw new Error(`Could not find table for index ${index}`); } return table; } - private async updateVectorsSchema(manager: EntityManager): Promise { + private async updateVectorsSchema(tx: Transaction): Promise { const extension = DatabaseExtension.VECTORS; - await manager.query(`CREATE SCHEMA IF NOT EXISTS ${extension}`); - await manager.query('UPDATE pg_catalog.pg_extension SET extrelocatable = true WHERE extname = $1', [extension]); - await manager.query('ALTER EXTENSION vectors SET SCHEMA vectors'); - await manager.query('UPDATE pg_catalog.pg_extension SET extrelocatable = false WHERE extname = $1', [extension]); + await sql`CREATE SCHEMA IF NOT EXISTS ${extension}`.execute(tx); + await sql`UPDATE pg_catalog.pg_extension SET extrelocatable = true WHERE extname = ${extension}`.execute(tx); + await sql`ALTER EXTENSION vectors SET SCHEMA vectors`.execute(tx); + await sql`UPDATE pg_catalog.pg_extension SET extrelocatable = false WHERE extname = ${extension}`.execute(tx); } private async getDimSize(table: string, column = 'embedding'): Promise { - const res = await this.dataSource.query(` + const { rows } = await sql<{ dimsize: number }>` SELECT atttypmod as dimsize FROM pg_attribute f JOIN pg_class c ON c.oid = f.attrelid WHERE c.relkind = 'r'::char AND f.attnum > 0 - AND c.relname = '${table}' - AND f.attname = '${column}'`); + AND c.relname = ${table} + AND f.attname = '${column}' + `.execute(this.db); - const dimSize = res[0]['dimsize']; + const dimSize = rows[0]?.dimsize; if (!isValidInteger(dimSize, { min: 1, max: 2 ** 16 })) { throw new Error(`Could not retrieve dimension size`); } @@ -210,31 +195,32 @@ export class DatabaseRepository { } async runMigrations(options?: { transaction?: 'all' | 'none' | 'each' }): Promise { - await this.dataSource.runMigrations(options); + const { database } = this.configRepository.getEnv(); + const dataSource = new DataSource(database.config.typeorm); + + await dataSource.initialize(); + await dataSource.runMigrations(options); + await dataSource.destroy(); } async withLock(lock: DatabaseLock, callback: () => Promise): Promise { let res; await this.asyncLock.acquire(DatabaseLock[lock], async () => { - const queryRunner = this.dataSource.createQueryRunner(); - try { - await this.acquireLock(lock, queryRunner); - res = await callback(); - } finally { + await this.db.connection().execute(async (connection) => { try { - await this.releaseLock(lock, queryRunner); + await this.acquireLock(lock, connection); + res = await callback(); } finally { - await queryRunner.release(); + await this.releaseLock(lock, connection); } - } + }); }); return res as R; } - async tryLock(lock: DatabaseLock): Promise { - const queryRunner = this.dataSource.createQueryRunner(); - return await this.acquireTryLock(lock, queryRunner); + tryLock(lock: DatabaseLock): Promise { + return this.db.connection().execute(async (connection) => this.acquireTryLock(lock, connection)); } isBusy(lock: DatabaseLock): boolean { @@ -245,22 +231,18 @@ export class DatabaseRepository { await this.asyncLock.acquire(DatabaseLock[lock], () => {}); } - private async acquireLock(lock: DatabaseLock, queryRunner: QueryRunner): Promise { - return queryRunner.query('SELECT pg_advisory_lock($1)', [lock]); + private async acquireLock(lock: DatabaseLock, connection: Kysely): Promise { + await sql`SELECT pg_advisory_lock(${lock})`.execute(connection); } - private async acquireTryLock(lock: DatabaseLock, queryRunner: QueryRunner): Promise { - const lockResult = await queryRunner.query('SELECT pg_try_advisory_lock($1)', [lock]); - return lockResult[0].pg_try_advisory_lock; + private async acquireTryLock(lock: DatabaseLock, connection: Kysely): Promise { + const { rows } = await sql<{ + pg_try_advisory_lock: boolean; + }>`SELECT pg_try_advisory_lock(${lock})`.execute(connection); + return rows[0].pg_try_advisory_lock; } - private async releaseLock(lock: DatabaseLock, queryRunner: QueryRunner): Promise { - return queryRunner.query('SELECT pg_advisory_unlock($1)', [lock]); - } - - private getUpsertColumns(metadata: EntityMetadata) { - return Object.fromEntries( - metadata.ownColumns.map((column) => [column.propertyName, sql`excluded.${sql.ref(column.propertyName)}`]), - ) as any; + private async releaseLock(lock: DatabaseLock, connection: Kysely): Promise { + await sql`SELECT pg_advisory_unlock(${lock})`.execute(connection); } } diff --git a/server/src/repositories/person.repository.ts b/server/src/repositories/person.repository.ts index 2ac2a59cf1..d5855d3b91 100644 --- a/server/src/repositories/person.repository.ts +++ b/server/src/repositories/person.repository.ts @@ -7,7 +7,7 @@ import { ChunkedArray, DummyValue, GenerateSql } from 'src/decorators'; import { AssetFaceEntity } from 'src/entities/asset-face.entity'; import { PersonEntity } from 'src/entities/person.entity'; import { SourceType } from 'src/enum'; -import { mapUpsertColumns } from 'src/utils/database'; +import { removeUndefinedKeys } from 'src/utils/database'; import { Paginated, PaginationOptions } from 'src/utils/pagination'; import { FindOptionsRelations } from 'typeorm'; @@ -417,7 +417,22 @@ export class PersonRepository { await this.db .insertInto('person') .values(people) - .onConflict((oc) => oc.column('id').doUpdateSet(() => mapUpsertColumns('person', people[0], ['id']))) + .onConflict((oc) => + oc.column('id').doUpdateSet((eb) => + removeUndefinedKeys( + { + name: eb.ref('excluded.name'), + birthDate: eb.ref('excluded.birthDate'), + thumbnailPath: eb.ref('excluded.thumbnailPath'), + faceAssetId: eb.ref('excluded.faceAssetId'), + isHidden: eb.ref('excluded.isHidden'), + isFavorite: eb.ref('excluded.isFavorite'), + color: eb.ref('excluded.color'), + }, + people[0], + ), + ), + ) .execute(); } diff --git a/server/src/repositories/search.repository.ts b/server/src/repositories/search.repository.ts index 46f38db55f..e2e389f47c 100644 --- a/server/src/repositories/search.repository.ts +++ b/server/src/repositories/search.repository.ts @@ -404,7 +404,7 @@ export class SearchRepository { .where('assets.ownerId', '=', anyUuid(userIds)) .where('assets.isVisible', '=', true) .where('assets.isArchived', '=', false) - .where('assets.type', '=', 'IMAGE') + .where('assets.type', '=', AssetType.IMAGE) .where('assets.deletedAt', 'is', null) .orderBy('city') .limit(1); @@ -421,7 +421,7 @@ export class SearchRepository { .where('assets.ownerId', '=', anyUuid(userIds)) .where('assets.isVisible', '=', true) .where('assets.isArchived', '=', false) - .where('assets.type', '=', 'IMAGE') + .where('assets.type', '=', AssetType.IMAGE) .where('assets.deletedAt', 'is', null) .whereRef('exif.city', '>', 'cte.city') .orderBy('city') diff --git a/server/src/repositories/user.repository.ts b/server/src/repositories/user.repository.ts index 302f868971..1387828be6 100644 --- a/server/src/repositories/user.repository.ts +++ b/server/src/repositories/user.repository.ts @@ -5,7 +5,7 @@ import { DB, UserMetadata as DbUserMetadata, Users } from 'src/db'; import { DummyValue, GenerateSql } from 'src/decorators'; import { UserMetadata, UserMetadataItem } from 'src/entities/user-metadata.entity'; import { UserEntity, withMetadata } from 'src/entities/user.entity'; -import { UserStatus } from 'src/enum'; +import { AssetType, UserStatus } from 'src/enum'; import { asUuid } from 'src/utils/database'; const columns = [ @@ -209,11 +209,11 @@ export class UserRepository { .select((eb) => [ eb.fn .countAll() - .filterWhere((eb) => eb.and([eb('assets.type', '=', 'IMAGE'), eb('assets.isVisible', '=', true)])) + .filterWhere((eb) => eb.and([eb('assets.type', '=', AssetType.IMAGE), eb('assets.isVisible', '=', true)])) .as('photos'), eb.fn .countAll() - .filterWhere((eb) => eb.and([eb('assets.type', '=', 'VIDEO'), eb('assets.isVisible', '=', true)])) + .filterWhere((eb) => eb.and([eb('assets.type', '=', AssetType.VIDEO), eb('assets.isVisible', '=', true)])) .as('videos'), eb.fn .coalesce(eb.fn.sum('exif.fileSizeInByte').filterWhere('assets.libraryId', 'is', null), eb.lit(0)) @@ -222,7 +222,9 @@ export class UserRepository { .coalesce( eb.fn .sum('exif.fileSizeInByte') - .filterWhere((eb) => eb.and([eb('assets.libraryId', 'is', null), eb('assets.type', '=', 'IMAGE')])), + .filterWhere((eb) => + eb.and([eb('assets.libraryId', 'is', null), eb('assets.type', '=', AssetType.IMAGE)]), + ), eb.lit(0), ) .as('usagePhotos'), @@ -230,7 +232,9 @@ export class UserRepository { .coalesce( eb.fn .sum('exif.fileSizeInByte') - .filterWhere((eb) => eb.and([eb('assets.libraryId', 'is', null), eb('assets.type', '=', 'VIDEO')])), + .filterWhere((eb) => + eb.and([eb('assets.libraryId', 'is', null), eb('assets.type', '=', AssetType.VIDEO)]), + ), eb.lit(0), ) .as('usageVideos'), diff --git a/server/src/services/database.service.spec.ts b/server/src/services/database.service.spec.ts index 1314d5327e..4e45ec3ae0 100644 --- a/server/src/services/database.service.spec.ts +++ b/server/src/services/database.service.spec.ts @@ -384,50 +384,4 @@ describe(DatabaseService.name, () => { expect(mocks.database.runMigrations).not.toHaveBeenCalled(); }); }); - - describe('handleConnectionError', () => { - beforeAll(() => { - vi.useFakeTimers(); - }); - - afterAll(() => { - vi.useRealTimers(); - }); - - it('should not override interval', () => { - sut.handleConnectionError(new Error('Error')); - expect(mocks.logger.error).toHaveBeenCalled(); - - sut.handleConnectionError(new Error('foo')); - expect(mocks.logger.error).toHaveBeenCalledTimes(1); - }); - - it('should reconnect when interval elapses', async () => { - mocks.database.reconnect.mockResolvedValue(true); - - sut.handleConnectionError(new Error('error')); - await vi.advanceTimersByTimeAsync(5000); - - expect(mocks.database.reconnect).toHaveBeenCalledTimes(1); - expect(mocks.logger.log).toHaveBeenCalledWith('Database reconnected'); - - await vi.advanceTimersByTimeAsync(5000); - expect(mocks.database.reconnect).toHaveBeenCalledTimes(1); - }); - - it('should try again when reconnection fails', async () => { - mocks.database.reconnect.mockResolvedValueOnce(false); - - sut.handleConnectionError(new Error('error')); - await vi.advanceTimersByTimeAsync(5000); - - expect(mocks.database.reconnect).toHaveBeenCalledTimes(1); - expect(mocks.logger.warn).toHaveBeenCalledWith(expect.stringContaining('Database connection failed')); - - mocks.database.reconnect.mockResolvedValueOnce(true); - await vi.advanceTimersByTimeAsync(5000); - expect(mocks.database.reconnect).toHaveBeenCalledTimes(2); - expect(mocks.logger.log).toHaveBeenCalledWith('Database reconnected'); - }); - }); }); diff --git a/server/src/services/database.service.ts b/server/src/services/database.service.ts index 36f4c09177..d71dc25104 100644 --- a/server/src/services/database.service.ts +++ b/server/src/services/database.service.ts @@ -1,5 +1,4 @@ import { Injectable } from '@nestjs/common'; -import { Duration } from 'luxon'; import semver from 'semver'; import { EXTENSION_NAMES } from 'src/constants'; import { OnEvent } from 'src/decorators'; @@ -54,12 +53,8 @@ const messages = { If ${name} ${installedVersion} is compatible with Immich, please ensure the Postgres instance has this available.`, }; -const RETRY_DURATION = Duration.fromObject({ seconds: 5 }); - @Injectable() export class DatabaseService extends BaseService { - private reconnection?: NodeJS.Timeout; - @OnEvent({ name: 'app.bootstrap', priority: BootstrapEventPriority.DatabaseService }) async onBootstrap() { const version = await this.databaseRepository.getPostgresVersion(); @@ -108,30 +103,9 @@ export class DatabaseService extends BaseService { if (!database.skipMigrations) { await this.databaseRepository.runMigrations(); } - this.databaseRepository.init(); }); } - handleConnectionError(error: Error) { - if (this.reconnection) { - return; - } - - this.logger.error(`Database disconnected: ${error}`); - this.reconnection = setInterval(() => void this.reconnect(), RETRY_DURATION.toMillis()); - } - - private async reconnect() { - const isConnected = await this.databaseRepository.reconnect(); - if (isConnected) { - this.logger.log('Database reconnected'); - clearInterval(this.reconnection); - delete this.reconnection; - } else { - this.logger.warn(`Database connection failed, retrying in ${RETRY_DURATION.toHuman()}`); - } - } - private async createExtension(extension: DatabaseExtension) { try { await this.databaseRepository.createExtension(extension); diff --git a/server/src/utils/database.ts b/server/src/utils/database.ts index c879fa4274..456165063c 100644 --- a/server/src/utils/database.ts +++ b/server/src/utils/database.ts @@ -1,6 +1,4 @@ -import { Expression, RawBuilder, sql, ValueExpression } from 'kysely'; -import { InsertObject } from 'node_modules/kysely/dist/cjs'; -import { DB } from 'src/db'; +import { Expression, sql } from 'kysely'; import { Between, LessThanOrEqual, MoreThanOrEqual } from 'typeorm'; /** @@ -17,27 +15,6 @@ export function OptionalBetween(from?: T, to?: T) { } } -// populated by the database repository at bootstrap -export const UPSERT_COLUMNS = {} as { [T in keyof DB]: { [K in keyof DB[T]]: RawBuilder } }; - -/** Generates the columns for an upsert statement, excluding the conflict keys. - * Assumes that all entries have the same keys. */ -export function mapUpsertColumns( - table: T, - entry: InsertObject, - conflictKeys: readonly (keyof DB[T])[], -) { - const columns = UPSERT_COLUMNS[table] as { [K in keyof DB[T]]: RawBuilder }; - const upsertColumns: Partial>> = {}; - for (const entryColumn in entry) { - if (!conflictKeys.includes(entryColumn as keyof DB[T])) { - upsertColumns[entryColumn as keyof typeof entry] = columns[entryColumn as keyof DB[T]]; - } - } - - return upsertColumns as Expand>>; -} - export const asUuid = (id: string | Expression) => sql`${id}::uuid`; export const anyUuid = (ids: string[]) => sql`any(${`{${ids}}`}::uuid[])`; @@ -46,6 +23,16 @@ export const asVector = (embedding: number[]) => sql`${`[${embedding}]`} export const unnest = (array: string[]) => sql>`unnest(array[${sql.join(array)}]::text[])`; +export const removeUndefinedKeys = (update: T, template: unknown) => { + for (const key in update) { + if ((template as T)[key] === undefined) { + delete update[key]; + } + } + + return update; +}; + /** * Mainly for type debugging to make VS Code display a more useful tooltip. * Source: https://stackoverflow.com/a/69288824 diff --git a/server/test/medium/globalSetup.ts b/server/test/medium/globalSetup.ts index c6a37148c4..3c25142073 100644 --- a/server/test/medium/globalSetup.ts +++ b/server/test/medium/globalSetup.ts @@ -53,7 +53,6 @@ const globalSetup = async () => { // @ts-expect-error const dataSource = new DataSource(config); await dataSource.initialize(); - await dataSource.query('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"'); await dataSource.runMigrations(); await dataSource.destroy(); }; diff --git a/server/test/repositories/database.repository.mock.ts b/server/test/repositories/database.repository.mock.ts index fe954c725b..eeedf682de 100644 --- a/server/test/repositories/database.repository.mock.ts +++ b/server/test/repositories/database.repository.mock.ts @@ -4,9 +4,7 @@ import { Mocked, vitest } from 'vitest'; export const newDatabaseRepositoryMock = (): Mocked> => { return { - init: vitest.fn(), shutdown: vitest.fn(), - reconnect: vitest.fn(), getExtensionVersion: vitest.fn(), getExtensionVersionRange: vitest.fn(), getPostgresVersion: vitest.fn().mockResolvedValue('14.10 (Debian 14.10-1.pgdg120+1)'),