feat: upgrade kysely (#17630)

* feat: upgrade kysely

* chore: pr feedback
This commit is contained in:
Jason Rasmussen 2025-04-15 13:26:56 -04:00 committed by GitHub
parent 270d178a2e
commit b710ad36f3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 474 additions and 478 deletions

330
server/package-lock.json generated
View File

@ -43,7 +43,7 @@
"ioredis": "^5.3.2",
"joi": "^17.10.0",
"js-yaml": "^4.1.0",
"kysely": "^0.27.3",
"kysely": "^0.28.0",
"kysely-postgres-js": "^2.0.0",
"lodash": "^4.17.21",
"luxon": "^3.4.2",
@ -105,7 +105,6 @@
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^57.0.0",
"globals": "^16.0.0",
"kysely-codegen": "^0.18.0",
"mock-fs": "^5.2.0",
"node-addon-api": "^8.3.0",
"patch-package": "^8.0.0",
@ -8460,16 +8459,6 @@
"license": "Apache-2.0",
"peer": true
},
"node_modules/diff": {
"version": "3.5.0",
"resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz",
"integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.3.1"
}
},
"node_modules/discontinuous-range": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/discontinuous-range/-/discontinuous-range-1.0.0.tgz",
@ -8680,22 +8669,6 @@
"url": "https://dotenvx.com"
}
},
"node_modules/dotenv-expand": {
"version": "12.0.1",
"resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-12.0.1.tgz",
"integrity": "sha512-LaKRbou8gt0RNID/9RoI+J2rvXsBRPMV7p+ElHlPhcSARbCPDYcYG2s1TIzAfWv4YSgyY5taidWzzs31lNV3yQ==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"dotenv": "^16.4.5"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
@ -8877,16 +8850,6 @@
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/env-paths": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz",
"integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/error-ex": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
@ -10132,101 +10095,6 @@
"node": ">= 0.4"
}
},
"node_modules/git-diff": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/git-diff/-/git-diff-2.0.6.tgz",
"integrity": "sha512-/Iu4prUrydE3Pb3lCBMbcSNIf81tgGt0W1ZwknnyF62t3tHmtiJTRj0f+1ZIhp3+Rh0ktz1pJVoa7ZXUCskivA==",
"dev": true,
"license": "ISC",
"dependencies": {
"chalk": "^2.3.2",
"diff": "^3.5.0",
"loglevel": "^1.6.1",
"shelljs": "^0.8.1",
"shelljs.exec": "^1.1.7"
},
"engines": {
"node": ">= 4.8.0"
}
},
"node_modules/git-diff/node_modules/ansi-styles": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
"dev": true,
"license": "MIT",
"dependencies": {
"color-convert": "^1.9.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/git-diff/node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^3.2.1",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.3.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/git-diff/node_modules/color-convert": {
"version": "1.9.3",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
"dev": true,
"license": "MIT",
"dependencies": {
"color-name": "1.1.3"
}
},
"node_modules/git-diff/node_modules/color-name": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
"dev": true,
"license": "MIT"
},
"node_modules/git-diff/node_modules/escape-string-regexp": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/git-diff/node_modules/has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/git-diff/node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-flag": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/glob": {
"version": "11.0.1",
"resolved": "https://registry.npmjs.org/glob/-/glob-11.0.1.tgz",
@ -10745,16 +10613,6 @@
"node": ">=8"
}
},
"node_modules/interpret": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz",
"integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.10"
}
},
"node_modules/ioredis": {
"version": "5.6.1",
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.6.1.tgz",
@ -11293,104 +11151,12 @@
}
},
"node_modules/kysely": {
"version": "0.27.6",
"resolved": "https://registry.npmjs.org/kysely/-/kysely-0.27.6.tgz",
"integrity": "sha512-FIyV/64EkKhJmjgC0g2hygpBv5RNWVPyNCqSAD7eTCv6eFWNIi4PN1UvdSJGicN/o35bnevgis4Y0UDC0qi8jQ==",
"version": "0.28.0",
"resolved": "https://registry.npmjs.org/kysely/-/kysely-0.28.0.tgz",
"integrity": "sha512-hq8VcLy57Ww7oPTTVEOrT9ml+g8ehbbmEUkHmW4Xtubu+NHdKZi6SH6egmD4cjDhn3b/0s0h/6AjdPayOTJhNw==",
"license": "MIT",
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/kysely-codegen": {
"version": "0.18.3",
"resolved": "https://registry.npmjs.org/kysely-codegen/-/kysely-codegen-0.18.3.tgz",
"integrity": "sha512-u2PFL1i8kaD+Jhcd5aIGPLgDqNriWvfWKtA7+kkvR2zZxr3DkdvT/B3nJWObZ/uj+GeONq0TChvf6mf6RqqWuA==",
"dev": true,
"license": "MIT",
"dependencies": {
"chalk": "4.1.2",
"cosmiconfig": "^9.0.0",
"dotenv": "^16.4.7",
"dotenv-expand": "^12.0.1",
"git-diff": "^2.0.6",
"micromatch": "^4.0.8",
"minimist": "^1.2.8",
"pluralize": "^8.0.0",
"zod": "^3.24.2"
},
"bin": {
"kysely-codegen": "dist/cli/bin.js"
},
"peerDependencies": {
"@libsql/kysely-libsql": "^0.3.0 || ^0.4.1",
"@tediousjs/connection-string": "^0.5.0",
"better-sqlite3": ">=7.6.2",
"kysely": "^0.27.0",
"kysely-bun-sqlite": "^0.3.2",
"kysely-bun-worker": "^0.5.3",
"mysql2": "^2.3.3 || ^3.0.0",
"pg": "^8.8.0",
"tarn": "^3.0.0",
"tedious": "^18.0.0"
},
"peerDependenciesMeta": {
"@libsql/kysely-libsql": {
"optional": true
},
"@tediousjs/connection-string": {
"optional": true
},
"better-sqlite3": {
"optional": true
},
"kysely": {
"optional": false
},
"kysely-bun-sqlite": {
"optional": true
},
"kysely-bun-worker": {
"optional": true
},
"mysql2": {
"optional": true
},
"pg": {
"optional": true
},
"tarn": {
"optional": true
},
"tedious": {
"optional": true
}
}
},
"node_modules/kysely-codegen/node_modules/cosmiconfig": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.0.tgz",
"integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==",
"dev": true,
"license": "MIT",
"dependencies": {
"env-paths": "^2.2.1",
"import-fresh": "^3.3.0",
"js-yaml": "^4.1.0",
"parse-json": "^5.2.0"
},
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/sponsors/d-fischer"
},
"peerDependencies": {
"typescript": ">=4.9.5"
},
"peerDependenciesMeta": {
"typescript": {
"optional": true
}
"node": ">=18.0.0"
}
},
"node_modules/kysely-postgres-js": {
@ -11582,20 +11348,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/loglevel": {
"version": "1.9.2",
"resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.9.2.tgz",
"integrity": "sha512-HgMmCqIJSAKqo68l0rS2AanEWfkxaZ5wNiEFb5ggm08lDs9Xl2KxBlX3PTcaD2chBM1gXAYf491/M2Rv8Jwayg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6.0"
},
"funding": {
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/loglevel"
}
},
"node_modules/long": {
"version": "5.3.1",
"resolved": "https://registry.npmjs.org/long/-/long-5.3.1.tgz",
@ -14032,18 +13784,6 @@
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/rechoir": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz",
"integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==",
"dev": true,
"dependencies": {
"resolve": "^1.1.6"
},
"engines": {
"node": ">= 0.10"
}
},
"node_modules/redis-errors": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
@ -14645,56 +14385,6 @@
"node": ">=8"
}
},
"node_modules/shelljs": {
"version": "0.8.5",
"resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz",
"integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"glob": "^7.0.0",
"interpret": "^1.0.0",
"rechoir": "^0.6.2"
},
"bin": {
"shjs": "bin/shjs"
},
"engines": {
"node": ">=4"
}
},
"node_modules/shelljs.exec": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/shelljs.exec/-/shelljs.exec-1.1.8.tgz",
"integrity": "sha512-vFILCw+lzUtiwBAHV8/Ex8JsFjelFMdhONIsgKNLgTzeRckp2AOYRQtHJE/9LhNvdMmE27AGtzWx0+DHpwIwSw==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">= 4.0.0"
}
},
"node_modules/shelljs/node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"deprecated": "Glob versions prior to v9 are no longer supported",
"dev": true,
"license": "ISC",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/shimmer": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz",
@ -18010,16 +17700,6 @@
"engines": {
"node": ">= 14"
}
},
"node_modules/zod": {
"version": "3.24.2",
"resolved": "https://registry.npmjs.org/zod/-/zod-3.24.2.tgz",
"integrity": "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
}
}
}

View File

@ -69,7 +69,7 @@
"ioredis": "^5.3.2",
"joi": "^17.10.0",
"js-yaml": "^4.1.0",
"kysely": "^0.27.3",
"kysely": "^0.28.0",
"kysely-postgres-js": "^2.0.0",
"lodash": "^4.17.21",
"luxon": "^3.4.2",
@ -131,7 +131,6 @@
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^57.0.0",
"globals": "^16.0.0",
"kysely-codegen": "^0.18.0",
"mock-fs": "^5.2.0",
"node-addon-api": "^8.3.0",
"patch-package": "^8.0.0",

View File

@ -2,11 +2,9 @@ import { BullModule } from '@nestjs/bullmq';
import { Inject, Module, OnModuleDestroy, OnModuleInit, ValidationPipe } from '@nestjs/common';
import { APP_FILTER, APP_GUARD, APP_INTERCEPTOR, APP_PIPE } from '@nestjs/core';
import { ScheduleModule, SchedulerRegistry } from '@nestjs/schedule';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { ClsModule } from 'nestjs-cls';
import { KyselyModule } from 'nestjs-kysely';
import { OpenTelemetryModule } from 'nestjs-otel';
import postgres from 'postgres';
import { commands } from 'src/commands';
import { IWorker } from 'src/constants';
import { controllers } from 'src/controllers';
@ -25,6 +23,7 @@ import { teardownTelemetry, TelemetryRepository } from 'src/repositories/telemet
import { services } from 'src/services';
import { AuthService } from 'src/services/auth.service';
import { CliService } from 'src/services/cli.service';
import { getKyselyConfig } from 'src/utils/database';
const common = [...repositories, ...services, GlobalExceptionFilter];
@ -45,19 +44,7 @@ const imports = [
BullModule.registerQueue(...bull.queues),
ClsModule.forRoot(cls.config),
OpenTelemetryModule.forRoot(otel),
KyselyModule.forRoot({
dialect: new PostgresJSDialect({ postgres: postgres(database.config.kysely) }),
log(event) {
if (event.level === 'error') {
console.error('Query failed :', {
durationMs: event.queryDurationMillis,
error: event.error,
sql: event.query.sql,
params: event.query.parameters,
});
}
},
}),
KyselyModule.forRoot(getKyselyConfig(database.config.kysely)),
];
class BaseModule implements OnModuleInit, OnModuleDestroy {

View File

@ -2,7 +2,6 @@
process.env.DB_URL = process.env.DB_URL || 'postgres://postgres:postgres@localhost:5432/immich';
import { Kysely } from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { writeFileSync } from 'node:fs';
import { basename, dirname, extname, join } from 'node:path';
import postgres from 'postgres';
@ -11,6 +10,7 @@ import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import 'src/schema';
import { schemaDiff, schemaFromCode, schemaFromDatabase } from 'src/sql-tools';
import { getKyselyConfig } from 'src/utils/database';
const main = async () => {
const command = process.argv[2];
@ -52,19 +52,7 @@ const run = async (only?: 'kysely' | 'typeorm') => {
const configRepository = new ConfigRepository();
const { database } = configRepository.getEnv();
const logger = new LoggingRepository(undefined, configRepository);
const db = new Kysely<any>({
dialect: new PostgresJSDialect({ postgres: postgres(database.config.kysely) }),
log(event) {
if (event.level === 'error') {
console.error('Query failed :', {
durationMs: event.queryDurationMillis,
error: event.error,
sql: event.query.sql,
params: event.query.parameters,
});
}
},
});
const db = new Kysely<any>(getKyselyConfig(database.config.kysely));
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
await databaseRepository.runMigrations({ only });

View File

@ -4,13 +4,11 @@ import { Reflector } from '@nestjs/core';
import { SchedulerRegistry } from '@nestjs/schedule';
import { Test } from '@nestjs/testing';
import { ClassConstructor } from 'class-transformer';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { ClsModule } from 'nestjs-cls';
import { KyselyModule } from 'nestjs-kysely';
import { OpenTelemetryModule } from 'nestjs-otel';
import { mkdir, rm, writeFile } from 'node:fs/promises';
import { join } from 'node:path';
import postgres from 'postgres';
import { format } from 'sql-formatter';
import { GENERATE_SQL_KEY, GenerateSqlQueries } from 'src/decorators';
import { repositories } from 'src/repositories';
@ -18,6 +16,11 @@ import { AccessRepository } from 'src/repositories/access.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { AuthService } from 'src/services/auth.service';
import { getKyselyConfig } from 'src/utils/database';
const handleError = (label: string, error: Error | any) => {
console.error(`${label} error: ${error}`);
};
export class SqlLogger {
queries: string[] = [];
@ -75,7 +78,7 @@ class SqlGenerator {
const moduleFixture = await Test.createTestingModule({
imports: [
KyselyModule.forRoot({
dialect: new PostgresJSDialect({ postgres: postgres(database.config.kysely) }),
...getKyselyConfig(database.config.kysely),
log: (event) => {
if (event.level === 'query') {
this.sqlLogger.logQuery(event.query.sql);
@ -135,7 +138,7 @@ class SqlGenerator {
queries.push({ params: [] });
}
for (const { name, params } of queries) {
for (const { name, params, stream } of queries) {
let queryLabel = `${label}.${key}`;
if (name) {
queryLabel += ` (${name})`;
@ -143,8 +146,19 @@ class SqlGenerator {
this.sqlLogger.clear();
// errors still generate sql, which is all we care about
await target.apply(instance, params).catch((error: Error) => console.error(`${queryLabel} error: ${error}`));
if (stream) {
try {
const result: AsyncIterableIterator<unknown> = target.apply(instance, params);
for await (const _ of result) {
break;
}
} catch (error) {
handleError(queryLabel, error);
}
} else {
// errors still generate sql, which is all we care about
await target.apply(instance, params).catch((error: Error) => handleError(queryLabel, error));
}
if (this.sqlLogger.queries.length === 0) {
console.warn(`No queries recorded for ${queryLabel}`);

View File

@ -123,6 +123,7 @@ export const GENERATE_SQL_KEY = 'generate-sql-key';
export interface GenerateSqlQueries {
name?: string;
params: unknown[];
stream?: boolean;
}
export const Telemetry = (options: { enabled?: boolean }) =>

View File

@ -58,3 +58,53 @@ where
"assets"."id" = $1::uuid
limit
$2
-- AssetJobRepository.getForStorageTemplateJob
select
"assets"."id",
"assets"."ownerId",
"assets"."type",
"assets"."checksum",
"assets"."originalPath",
"assets"."isExternal",
"assets"."sidecarPath",
"assets"."originalFileName",
"assets"."livePhotoVideoId",
"assets"."fileCreatedAt",
"exif"."timeZone",
"exif"."fileSizeInByte"
from
"assets"
inner join "exif" on "assets"."id" = "exif"."assetId"
where
"assets"."deletedAt" is null
and "assets"."id" = $1
-- AssetJobRepository.streamForStorageTemplateJob
select
"assets"."id",
"assets"."ownerId",
"assets"."type",
"assets"."checksum",
"assets"."originalPath",
"assets"."isExternal",
"assets"."sidecarPath",
"assets"."originalFileName",
"assets"."livePhotoVideoId",
"assets"."fileCreatedAt",
"exif"."timeZone",
"exif"."fileSizeInByte"
from
"assets"
inner join "exif" on "assets"."id" = "exif"."assetId"
where
"assets"."deletedAt" is null
-- AssetJobRepository.streamForDeletedJob
select
"id",
"isOffline"
from
"assets"
where
"assets"."deletedAt" <= $1

View File

@ -0,0 +1,248 @@
-- NOTE: This file is auto generated by ./sql-generator
-- SyncRepository.getCheckpoints
select
"type",
"ack"
from
"session_sync_checkpoints"
where
"sessionId" = $1
-- SyncRepository.deleteCheckpoints
delete from "session_sync_checkpoints"
where
"sessionId" = $1
-- SyncRepository.getUserUpserts
select
"id",
"name",
"email",
"deletedAt",
"updateId"
from
"users"
where
"updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc
-- SyncRepository.getUserDeletes
select
"id",
"userId"
from
"users_audit"
where
"deletedAt" < now() - interval '1 millisecond'
order by
"id" asc
-- SyncRepository.getPartnerUpserts
select
"sharedById",
"sharedWithId",
"inTimeline",
"updateId"
from
"partners"
where
(
"sharedById" = $1
or "sharedWithId" = $2
)
and "updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc
-- SyncRepository.getPartnerDeletes
select
"id",
"sharedById",
"sharedWithId"
from
"partners_audit"
where
(
"sharedById" = $1
or "sharedWithId" = $2
)
and "deletedAt" < now() - interval '1 millisecond'
order by
"id" asc
-- SyncRepository.getAssetUpserts
select
"id",
"ownerId",
"thumbhash",
"checksum",
"fileCreatedAt",
"fileModifiedAt",
"localDateTime",
"type",
"deletedAt",
"isFavorite",
"isVisible",
"updateId"
from
"assets"
where
"ownerId" = $1
and "updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc
-- SyncRepository.getPartnerAssetsUpserts
select
"id",
"ownerId",
"thumbhash",
"checksum",
"fileCreatedAt",
"fileModifiedAt",
"localDateTime",
"type",
"deletedAt",
"isFavorite",
"isVisible",
"updateId"
from
"assets"
where
"ownerId" in (
select
"sharedById"
from
"partners"
where
"sharedWithId" = $1
)
and "updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc
-- SyncRepository.getAssetDeletes
select
"id",
"assetId"
from
"assets_audit"
where
"ownerId" = $1
and "deletedAt" < now() - interval '1 millisecond'
order by
"id" asc
-- SyncRepository.getPartnerAssetDeletes
select
"id",
"assetId"
from
"assets_audit"
where
"ownerId" in (
select
"sharedById"
from
"partners"
where
"sharedWithId" = $1
)
and "deletedAt" < now() - interval '1 millisecond'
order by
"id" asc
-- SyncRepository.getAssetExifsUpserts
select
"exif"."assetId",
"exif"."description",
"exif"."exifImageWidth",
"exif"."exifImageHeight",
"exif"."fileSizeInByte",
"exif"."orientation",
"exif"."dateTimeOriginal",
"exif"."modifyDate",
"exif"."timeZone",
"exif"."latitude",
"exif"."longitude",
"exif"."projectionType",
"exif"."city",
"exif"."state",
"exif"."country",
"exif"."make",
"exif"."model",
"exif"."lensModel",
"exif"."fNumber",
"exif"."focalLength",
"exif"."iso",
"exif"."exposureTime",
"exif"."profileDescription",
"exif"."rating",
"exif"."fps",
"exif"."updateId"
from
"exif"
where
"assetId" in (
select
"id"
from
"assets"
where
"ownerId" = $1
)
and "updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc
-- SyncRepository.getPartnerAssetExifsUpserts
select
"exif"."assetId",
"exif"."description",
"exif"."exifImageWidth",
"exif"."exifImageHeight",
"exif"."fileSizeInByte",
"exif"."orientation",
"exif"."dateTimeOriginal",
"exif"."modifyDate",
"exif"."timeZone",
"exif"."latitude",
"exif"."longitude",
"exif"."projectionType",
"exif"."city",
"exif"."state",
"exif"."country",
"exif"."make",
"exif"."model",
"exif"."lensModel",
"exif"."fNumber",
"exif"."focalLength",
"exif"."iso",
"exif"."exposureTime",
"exif"."profileDescription",
"exif"."rating",
"exif"."fps",
"exif"."updateId"
from
"exif"
where
"assetId" in (
select
"id"
from
"assets"
where
"ownerId" in (
select
"sharedById"
from
"partners"
where
"sharedWithId" = $1
)
)
and "updatedAt" < now() - interval '1 millisecond'
order by
"updateId" asc

View File

@ -75,16 +75,19 @@ export class AssetJobRepository {
.where('assets.deletedAt', 'is', null);
}
@GenerateSql({ params: [DummyValue.UUID] })
getForStorageTemplateJob(id: string): Promise<StorageAsset | undefined> {
return this.storageTemplateAssetQuery().where('assets.id', '=', id).executeTakeFirst() as Promise<
StorageAsset | undefined
>;
}
@GenerateSql({ params: [], stream: true })
streamForStorageTemplateJob() {
return this.storageTemplateAssetQuery().stream() as AsyncIterableIterator<StorageAsset>;
}
@GenerateSql({ params: [DummyValue.DATE], stream: true })
streamForDeletedJob(trashedBefore: Date) {
return this.db
.selectFrom('assets')

View File

@ -9,7 +9,6 @@ import { CLS_ID, ClsModuleOptions } from 'nestjs-cls';
import { OpenTelemetryModuleOptions } from 'nestjs-otel/lib/interfaces';
import { join, resolve } from 'node:path';
import { parse } from 'pg-connection-string';
import { Notice } from 'postgres';
import { citiesFile, excludePaths, IWorker } from 'src/constants';
import { Telemetry } from 'src/decorators';
import { EnvDto } from 'src/dtos/env.dto';
@ -23,23 +22,10 @@ import {
QueueName,
} from 'src/enum';
import { DatabaseConnectionParams, VectorExtension } from 'src/types';
import { isValidSsl, PostgresConnectionConfig } from 'src/utils/database';
import { setDifference } from 'src/utils/set';
import { PostgresConnectionOptions } from 'typeorm/driver/postgres/PostgresConnectionOptions.js';
type Ssl = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object;
type PostgresConnectionConfig = {
host?: string;
password?: string;
user?: string;
port?: number;
database?: string;
client_encoding?: string;
ssl?: Ssl;
application_name?: string;
fallback_application_name?: string;
options?: string;
};
export interface EnvData {
host?: string;
port: number;
@ -144,9 +130,6 @@ const asSet = <T>(value: string | undefined, defaults: T[]) => {
return new Set(values.length === 0 ? defaults : (values as T[]));
};
const isValidSsl = (ssl?: string | boolean | object): ssl is Ssl =>
typeof ssl !== 'string' || ssl === 'require' || ssl === 'allow' || ssl === 'prefer' || ssl === 'verify-full';
const getEnv = (): EnvData => {
const dto = plainToInstance(EnvDto, process.env);
const errors = validateSync(dto);
@ -233,33 +216,6 @@ const getEnv = (): EnvData => {
};
}
const driverOptions = {
...parsedOptions,
onnotice: (notice: Notice) => {
if (notice['severity'] !== 'NOTICE') {
console.warn('Postgres notice:', notice);
}
},
max: 10,
types: {
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: (x: Date | string) => (x instanceof Date ? x.toISOString() : x),
parse: (x: string) => new Date(x),
},
bigint: {
to: 20,
from: [20, 1700],
parse: (value: string) => Number.parseInt(value),
serialize: (value: number) => value.toString(),
},
},
connection: {
TimeZone: 'UTC',
},
};
return {
host: dto.IMMICH_HOST,
port: dto.IMMICH_PORT || 2283,
@ -325,7 +281,7 @@ const getEnv = (): EnvData => {
parseInt8: true,
...(databaseUrl ? { connectionType: 'url', url: databaseUrl } : parts),
},
kysely: driverOptions,
kysely: parsedOptions,
},
skipMigrations: dto.DB_SKIP_MIGRATIONS ?? false,

View File

@ -1,5 +1,5 @@
import { Injectable } from '@nestjs/common';
import { Kysely, OrderByDirectionExpression, sql } from 'kysely';
import { Kysely, OrderByDirection, sql } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { randomUUID } from 'node:crypto';
import { DB } from 'src/db';
@ -223,7 +223,7 @@ export class SearchRepository {
],
})
async searchMetadata(pagination: SearchPaginationOptions, options: AssetSearchOptions): Paginated<AssetEntity> {
const orderDirection = (options.orderDirection?.toLowerCase() || 'desc') as OrderByDirectionExpression;
const orderDirection = (options.orderDirection?.toLowerCase() || 'desc') as OrderByDirection;
const items = await searchAssetBuilder(this.db, options)
.orderBy('assets.fileCreatedAt', orderDirection)
.limit(pagination.size + 1)

View File

@ -3,6 +3,7 @@ import { Insertable, Kysely, SelectQueryBuilder, sql } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { DB, SessionSyncCheckpoints } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { SyncEntityType } from 'src/enum';
import { SyncAck } from 'src/types';
@ -13,6 +14,7 @@ type upsertTables = 'users' | 'partners' | 'assets' | 'exif';
export class SyncRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({ params: [DummyValue.UUID] })
getCheckpoints(sessionId: string) {
return this.db
.selectFrom('session_sync_checkpoints')
@ -33,6 +35,7 @@ export class SyncRepository {
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
deleteCheckpoints(sessionId: string, types?: SyncEntityType[]) {
return this.db
.deleteFrom('session_sync_checkpoints')
@ -41,6 +44,7 @@ export class SyncRepository {
.execute();
}
@GenerateSql({ params: [], stream: true })
getUserUpserts(ack?: SyncAck) {
return this.db
.selectFrom('users')
@ -49,6 +53,7 @@ export class SyncRepository {
.stream();
}
@GenerateSql({ params: [], stream: true })
getUserDeletes(ack?: SyncAck) {
return this.db
.selectFrom('users_audit')
@ -57,6 +62,7 @@ export class SyncRepository {
.stream();
}
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerUpserts(userId: string, ack?: SyncAck) {
return this.db
.selectFrom('partners')
@ -66,6 +72,7 @@ export class SyncRepository {
.stream();
}
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerDeletes(userId: string, ack?: SyncAck) {
return this.db
.selectFrom('partners_audit')
@ -75,6 +82,7 @@ export class SyncRepository {
.stream();
}
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getAssetUpserts(userId: string, ack?: SyncAck) {
return this.db
.selectFrom('assets')
@ -84,6 +92,7 @@ export class SyncRepository {
.stream();
}
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerAssetsUpserts(userId: string, ack?: SyncAck) {
return this.db
.selectFrom('assets')
@ -95,6 +104,7 @@ export class SyncRepository {
.stream();
}
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getAssetDeletes(userId: string, ack?: SyncAck) {
return this.db
.selectFrom('assets_audit')
@ -105,6 +115,7 @@ export class SyncRepository {
.stream();
}
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerAssetDeletes(userId: string, ack?: SyncAck) {
return this.db
.selectFrom('assets_audit')
@ -116,6 +127,7 @@ export class SyncRepository {
.stream();
}
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getAssetExifsUpserts(userId: string, ack?: SyncAck) {
return this.db
.selectFrom('exif')
@ -125,6 +137,7 @@ export class SyncRepository {
.stream();
}
@GenerateSql({ params: [DummyValue.UUID], stream: true })
getPartnerAssetExifsUpserts(userId: string, ack?: SyncAck) {
return this.db
.selectFrom('exif')

View File

@ -1,4 +1,77 @@
import { Expression, ExpressionBuilder, ExpressionWrapper, Nullable, Selectable, Simplify, sql } from 'kysely';
import {
Expression,
ExpressionBuilder,
ExpressionWrapper,
KyselyConfig,
Nullable,
Selectable,
Simplify,
sql,
} from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import postgres, { Notice } from 'postgres';
type Ssl = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object;
export type PostgresConnectionConfig = {
host?: string;
password?: string;
user?: string;
port?: number;
database?: string;
max?: number;
client_encoding?: string;
ssl?: Ssl;
application_name?: string;
fallback_application_name?: string;
options?: string;
};
export const isValidSsl = (ssl?: string | boolean | object): ssl is Ssl =>
typeof ssl !== 'string' || ssl === 'require' || ssl === 'allow' || ssl === 'prefer' || ssl === 'verify-full';
export const getKyselyConfig = (options: PostgresConnectionConfig): KyselyConfig => {
return {
dialect: new PostgresJSDialect({
postgres: postgres({
onnotice: (notice: Notice) => {
if (notice['severity'] !== 'NOTICE') {
console.warn('Postgres notice:', notice);
}
},
max: 10,
types: {
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: (x: Date | string) => (x instanceof Date ? x.toISOString() : x),
parse: (x: string) => new Date(x),
},
bigint: {
to: 20,
from: [20, 1700],
parse: (value: string) => Number.parseInt(value),
serialize: (value: number) => value.toString(),
},
},
connection: {
TimeZone: 'UTC',
},
...options,
}),
}),
log(event) {
if (event.level === 'error') {
console.error('Query failed :', {
durationMs: event.queryDurationMillis,
error: event.error,
sql: event.query.sql,
params: event.query.parameters,
});
}
},
};
};
export const asUuid = (id: string | Expression<string>) => sql<string>`${id}::uuid`;

View File

@ -1,9 +1,8 @@
import { FileMigrationProvider, Kysely, Migrator } from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { mkdir, readdir } from 'node:fs/promises';
import { join } from 'node:path';
import { parse } from 'pg-connection-string';
import postgres, { Notice } from 'postgres';
import { getKyselyConfig } from 'src/utils/database';
import { GenericContainer, Wait } from 'testcontainers';
import { DataSource } from 'typeorm';
@ -78,36 +77,7 @@ const globalSetup = async () => {
database: parsed.database ?? undefined,
};
const driverOptions = {
...parsedOptions,
onnotice: (notice: Notice) => {
if (notice['severity'] !== 'NOTICE') {
console.warn('Postgres notice:', notice);
}
},
max: 10,
types: {
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: (x: Date | string) => (x instanceof Date ? x.toISOString() : x),
parse: (x: string) => new Date(x),
},
bigint: {
to: 20,
from: [20],
parse: (value: string) => Number.parseInt(value),
serialize: (value: number) => value.toString(),
},
},
connection: {
TimeZone: 'UTC',
},
};
const db = new Kysely({
dialect: new PostgresJSDialect({ postgres: postgres({ ...driverOptions, max: 1, database: 'postgres' }) }),
});
const db = new Kysely(getKyselyConfig(parsedOptions));
// TODO just call `databaseRepository.migrate()` (probably have to wait until TypeOrm is gone)
const migrator = new Migrator({

View File

@ -0,0 +1,46 @@
import { Kysely } from 'kysely';
import { DB } from 'src/db';
import { AssetRepository } from 'src/repositories/asset.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { AssetService } from 'src/services/asset.service';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
describe(AssetService.name, () => {
let defaultDatabase: Kysely<DB>;
let assetRepo: AssetRepository;
let userRepo: UserRepository;
const createSut = (db?: Kysely<DB>) => {
return newMediumService(AssetService, {
database: db || defaultDatabase,
repos: {
asset: 'real',
},
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
assetRepo = new AssetRepository(defaultDatabase);
userRepo = new UserRepository(defaultDatabase);
});
describe('getStatistics', () => {
it('should return stats as numbers, not strings', async () => {
const { sut } = createSut();
const user = mediumFactory.userInsert();
const asset = mediumFactory.assetInsert({ ownerId: user.id });
await userRepo.create(user);
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, fileSizeInByte: 12_345 });
const auth = factory.auth({ user: { id: user.id } });
await expect(sut.getStatistics(auth, {})).resolves.toEqual({ images: 1, total: 1, videos: 0 });
});
});
});

View File

@ -1,11 +1,9 @@
import { ClassConstructor } from 'class-transformer';
import { Kysely, sql } from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { ChildProcessWithoutNullStreams } from 'node:child_process';
import { Writable } from 'node:stream';
import { parse } from 'pg-connection-string';
import { PNG } from 'pngjs';
import postgres, { Notice } from 'postgres';
import { DB } from 'src/db';
import { AccessRepository } from 'src/repositories/access.repository';
import { ActivityRepository } from 'src/repositories/activity.repository';
@ -51,6 +49,7 @@ import { VersionHistoryRepository } from 'src/repositories/version-history.repos
import { ViewRepository } from 'src/repositories/view-repository';
import { BaseService } from 'src/services/base.service';
import { RepositoryInterface } from 'src/types';
import { getKyselyConfig } from 'src/utils/database';
import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock';
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
import { newConfigRepositoryMock } from 'test/repositories/config.repository.mock';
@ -305,44 +304,13 @@ export const getKyselyDB = async (suffix?: string): Promise<Kysely<DB>> => {
database: parsed.database ?? undefined,
};
const driverOptions = {
...parsedOptions,
onnotice: (notice: Notice) => {
if (notice['severity'] !== 'NOTICE') {
console.warn('Postgres notice:', notice);
}
},
max: 10,
types: {
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: (x: Date | string) => (x instanceof Date ? x.toISOString() : x),
parse: (x: string) => new Date(x),
},
bigint: {
to: 20,
from: [20],
parse: (value: string) => Number.parseInt(value),
serialize: (value: number) => value.toString(),
},
},
connection: {
TimeZone: 'UTC',
},
};
const kysely = new Kysely<DB>({
dialect: new PostgresJSDialect({ postgres: postgres({ ...driverOptions, max: 1, database: 'postgres' }) }),
});
const kysely = new Kysely<DB>(getKyselyConfig({ ...parsedOptions, max: 1, database: 'postgres' }));
const randomSuffix = Math.random().toString(36).slice(2, 7);
const dbName = `immich_${suffix ?? randomSuffix}`;
await sql.raw(`CREATE DATABASE ${dbName} WITH TEMPLATE immich OWNER postgres;`).execute(kysely);
return new Kysely<DB>({
dialect: new PostgresJSDialect({ postgres: postgres({ ...driverOptions, database: dbName }) }),
});
return new Kysely<DB>(getKyselyConfig({ ...parsedOptions, database: dbName }));
};
export const newRandomImage = () => {