mirror of
https://github.com/immich-app/immich.git
synced 2025-05-24 01:12:58 -04:00
feat: medium tests for user and sync service (#16304)
Co-authored-by: Zack Pollard <zackpollard@ymail.com>
This commit is contained in:
parent
ae61ea7984
commit
7c851893b4
21
.github/workflows/test.yml
vendored
21
.github/workflows/test.yml
vendored
@ -246,25 +246,30 @@ jobs:
|
|||||||
run: npm run check
|
run: npm run check
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
medium-tests-server:
|
server-medium-tests:
|
||||||
name: Medium Tests (Server)
|
name: Medium Tests (Server)
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||||
runs-on: mich
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: ./server
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
submodules: 'recursive'
|
|
||||||
|
|
||||||
- name: Production build
|
- name: Setup Node
|
||||||
if: ${{ !cancelled() }}
|
uses: actions/setup-node@v4
|
||||||
run: docker compose -f e2e/docker-compose.yml build
|
with:
|
||||||
|
node-version-file: './server/.nvmrc'
|
||||||
|
|
||||||
|
- name: Run npm install
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
- name: Run medium tests
|
- name: Run medium tests
|
||||||
|
run: npm run test:medium
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
run: make test-medium
|
|
||||||
|
|
||||||
e2e-tests-server-cli:
|
e2e-tests-server-cli:
|
||||||
name: End-to-End Tests (Server & CLI)
|
name: End-to-End Tests (Server & CLI)
|
||||||
|
12
server/package-lock.json
generated
12
server/package-lock.json
generated
@ -77,6 +77,7 @@
|
|||||||
"@nestjs/testing": "^11.0.4",
|
"@nestjs/testing": "^11.0.4",
|
||||||
"@swc/core": "^1.4.14",
|
"@swc/core": "^1.4.14",
|
||||||
"@testcontainers/postgresql": "^10.2.1",
|
"@testcontainers/postgresql": "^10.2.1",
|
||||||
|
"@testcontainers/redis": "^10.18.0",
|
||||||
"@types/archiver": "^6.0.0",
|
"@types/archiver": "^6.0.0",
|
||||||
"@types/async-lock": "^1.4.2",
|
"@types/async-lock": "^1.4.2",
|
||||||
"@types/bcrypt": "^5.0.0",
|
"@types/bcrypt": "^5.0.0",
|
||||||
@ -113,6 +114,7 @@
|
|||||||
"rimraf": "^6.0.0",
|
"rimraf": "^6.0.0",
|
||||||
"source-map-support": "^0.5.21",
|
"source-map-support": "^0.5.21",
|
||||||
"sql-formatter": "^15.0.0",
|
"sql-formatter": "^15.0.0",
|
||||||
|
"testcontainers": "^10.18.0",
|
||||||
"tsconfig-paths": "^4.2.0",
|
"tsconfig-paths": "^4.2.0",
|
||||||
"typescript": "^5.3.3",
|
"typescript": "^5.3.3",
|
||||||
"unplugin-swc": "^1.4.5",
|
"unplugin-swc": "^1.4.5",
|
||||||
@ -5619,6 +5621,16 @@
|
|||||||
"testcontainers": "^10.18.0"
|
"testcontainers": "^10.18.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@testcontainers/redis": {
|
||||||
|
"version": "10.18.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@testcontainers/redis/-/redis-10.18.0.tgz",
|
||||||
|
"integrity": "sha512-ZRIemaCl7C6ozC6D3PdR7BBfD3roT+EHX3ATIopUCXdemhQ/0gNaCNwt4Zq8akxkf8TvgnJkK/t6+Itm01FcVQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"testcontainers": "^10.18.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@turf/boolean-point-in-polygon": {
|
"node_modules/@turf/boolean-point-in-polygon": {
|
||||||
"version": "7.1.0",
|
"version": "7.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/@turf/boolean-point-in-polygon/-/boolean-point-in-polygon-7.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/@turf/boolean-point-in-polygon/-/boolean-point-in-polygon-7.1.0.tgz",
|
||||||
|
@ -18,9 +18,9 @@
|
|||||||
"check": "tsc --noEmit",
|
"check": "tsc --noEmit",
|
||||||
"check:code": "npm run format && npm run lint && npm run check",
|
"check:code": "npm run format && npm run lint && npm run check",
|
||||||
"check:all": "npm run check:code && npm run test:cov",
|
"check:all": "npm run check:code && npm run test:cov",
|
||||||
"test": "vitest",
|
"test": "vitest --config test/vitest.config.mjs",
|
||||||
"test:cov": "vitest --coverage",
|
"test:cov": "vitest --config test/vitest.config.mjs --coverage",
|
||||||
"test:medium": "vitest --config vitest.config.medium.mjs",
|
"test:medium": "vitest --config test/vitest.config.medium.mjs",
|
||||||
"typeorm": "typeorm",
|
"typeorm": "typeorm",
|
||||||
"lifecycle": "node ./dist/utils/lifecycle.js",
|
"lifecycle": "node ./dist/utils/lifecycle.js",
|
||||||
"typeorm:migrations:create": "typeorm migration:create",
|
"typeorm:migrations:create": "typeorm migration:create",
|
||||||
@ -103,6 +103,7 @@
|
|||||||
"@nestjs/testing": "^11.0.4",
|
"@nestjs/testing": "^11.0.4",
|
||||||
"@swc/core": "^1.4.14",
|
"@swc/core": "^1.4.14",
|
||||||
"@testcontainers/postgresql": "^10.2.1",
|
"@testcontainers/postgresql": "^10.2.1",
|
||||||
|
"@testcontainers/redis": "^10.18.0",
|
||||||
"@types/archiver": "^6.0.0",
|
"@types/archiver": "^6.0.0",
|
||||||
"@types/async-lock": "^1.4.2",
|
"@types/async-lock": "^1.4.2",
|
||||||
"@types/bcrypt": "^5.0.0",
|
"@types/bcrypt": "^5.0.0",
|
||||||
@ -139,6 +140,7 @@
|
|||||||
"rimraf": "^6.0.0",
|
"rimraf": "^6.0.0",
|
||||||
"source-map-support": "^0.5.21",
|
"source-map-support": "^0.5.21",
|
||||||
"sql-formatter": "^15.0.0",
|
"sql-formatter": "^15.0.0",
|
||||||
|
"testcontainers": "^10.18.0",
|
||||||
"tsconfig-paths": "^4.2.0",
|
"tsconfig-paths": "^4.2.0",
|
||||||
"typescript": "^5.3.3",
|
"typescript": "^5.3.3",
|
||||||
"unplugin-swc": "^1.4.5",
|
"unplugin-swc": "^1.4.5",
|
||||||
|
@ -157,6 +157,6 @@ export function mapUserAdmin(entity: UserEntity): UserAdminResponseDto {
|
|||||||
quotaSizeInBytes: entity.quotaSizeInBytes,
|
quotaSizeInBytes: entity.quotaSizeInBytes,
|
||||||
quotaUsageInBytes: entity.quotaUsageInBytes,
|
quotaUsageInBytes: entity.quotaUsageInBytes,
|
||||||
status: entity.status,
|
status: entity.status,
|
||||||
license: license ?? null,
|
license: license ? { ...license, activatedAt: new Date(license?.activatedAt) } : null,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -115,5 +115,5 @@ export const getDefaultPreferences = (user: { email: string }): UserPreferences
|
|||||||
|
|
||||||
export interface UserMetadata extends Record<UserMetadataKey, Record<string, any>> {
|
export interface UserMetadata extends Record<UserMetadataKey, Record<string, any>> {
|
||||||
[UserMetadataKey.PREFERENCES]: DeepPartial<UserPreferences>;
|
[UserMetadataKey.PREFERENCES]: DeepPartial<UserPreferences>;
|
||||||
[UserMetadataKey.LICENSE]: { licenseKey: string; activationKey: string; activatedAt: Date };
|
[UserMetadataKey.LICENSE]: { licenseKey: string; activationKey: string; activatedAt: string };
|
||||||
}
|
}
|
||||||
|
@ -189,7 +189,7 @@ export class UserRepository {
|
|||||||
await this.db.deleteFrom('user_metadata').where('userId', '=', id).where('key', '=', key).execute();
|
await this.db.deleteFrom('user_metadata').where('userId', '=', id).where('key', '=', key).execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
delete(user: UserEntity, hard?: boolean): Promise<UserEntity> {
|
delete(user: { id: string }, hard?: boolean): Promise<UserEntity> {
|
||||||
return hard
|
return hard
|
||||||
? (this.db.deleteFrom('users').where('id', '=', user.id).execute() as unknown as Promise<UserEntity>)
|
? (this.db.deleteFrom('users').where('id', '=', user.id).execute() as unknown as Promise<UserEntity>)
|
||||||
: (this.db
|
: (this.db
|
||||||
|
@ -140,7 +140,7 @@ export class UserService extends BaseService {
|
|||||||
if (!license) {
|
if (!license) {
|
||||||
throw new NotFoundException();
|
throw new NotFoundException();
|
||||||
}
|
}
|
||||||
return license.value;
|
return { ...license.value, activatedAt: new Date(license.value.activatedAt) };
|
||||||
}
|
}
|
||||||
|
|
||||||
async deleteLicense({ user }: AuthDto): Promise<void> {
|
async deleteLicense({ user }: AuthDto): Promise<void> {
|
||||||
@ -170,17 +170,14 @@ export class UserService extends BaseService {
|
|||||||
throw new BadRequestException('Invalid license key');
|
throw new BadRequestException('Invalid license key');
|
||||||
}
|
}
|
||||||
|
|
||||||
const licenseData = {
|
const activatedAt = new Date();
|
||||||
...license,
|
|
||||||
activatedAt: new Date(),
|
|
||||||
};
|
|
||||||
|
|
||||||
await this.userRepository.upsertMetadata(auth.user.id, {
|
await this.userRepository.upsertMetadata(auth.user.id, {
|
||||||
key: UserMetadataKey.LICENSE,
|
key: UserMetadataKey.LICENSE,
|
||||||
value: licenseData,
|
value: { ...license, activatedAt: activatedAt.toISOString() },
|
||||||
});
|
});
|
||||||
|
|
||||||
return licenseData;
|
return { ...license, activatedAt };
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnJob({ name: JobName.USER_SYNC_USAGE, queue: QueueName.BACKGROUND_TASK })
|
@OnJob({ name: JobName.USER_SYNC_USAGE, queue: QueueName.BACKGROUND_TASK })
|
||||||
|
169
server/test/factory.ts
Normal file
169
server/test/factory.ts
Normal file
@ -0,0 +1,169 @@
|
|||||||
|
import { Insertable, Kysely } from 'kysely';
|
||||||
|
import { randomBytes, randomUUID } from 'node:crypto';
|
||||||
|
import { Writable } from 'node:stream';
|
||||||
|
import { Assets, DB, Sessions, Users } from 'src/db';
|
||||||
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
|
import { AssetType } from 'src/enum';
|
||||||
|
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||||
|
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||||
|
import { SessionRepository } from 'src/repositories/session.repository';
|
||||||
|
import { SyncRepository } from 'src/repositories/sync.repository';
|
||||||
|
import { UserRepository } from 'src/repositories/user.repository';
|
||||||
|
|
||||||
|
class CustomWritable extends Writable {
|
||||||
|
private data = '';
|
||||||
|
|
||||||
|
_write(chunk: any, encoding: string, callback: () => void) {
|
||||||
|
this.data += chunk.toString();
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
|
||||||
|
getResponse() {
|
||||||
|
const result = this.data;
|
||||||
|
return result
|
||||||
|
.split('\n')
|
||||||
|
.filter((x) => x.length > 0)
|
||||||
|
.map((x) => JSON.parse(x));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Asset = Insertable<Assets>;
|
||||||
|
type User = Partial<Insertable<Users>>;
|
||||||
|
type Session = Omit<Insertable<Sessions>, 'token'> & { token?: string };
|
||||||
|
|
||||||
|
export const newUuid = () => randomUUID() as string;
|
||||||
|
|
||||||
|
export class TestFactory {
|
||||||
|
private assets: Asset[] = [];
|
||||||
|
private sessions: Session[] = [];
|
||||||
|
private users: User[] = [];
|
||||||
|
|
||||||
|
private constructor(private context: TestContext) {}
|
||||||
|
|
||||||
|
static create(context: TestContext) {
|
||||||
|
return new TestFactory(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
static stream() {
|
||||||
|
return new CustomWritable();
|
||||||
|
}
|
||||||
|
|
||||||
|
static asset(asset: Asset) {
|
||||||
|
const assetId = asset.id || newUuid();
|
||||||
|
const defaults: Insertable<Assets> = {
|
||||||
|
deviceAssetId: '',
|
||||||
|
deviceId: '',
|
||||||
|
originalFileName: '',
|
||||||
|
checksum: randomBytes(32),
|
||||||
|
type: AssetType.IMAGE,
|
||||||
|
originalPath: '/path/to/something.jpg',
|
||||||
|
ownerId: '@immich.cloud',
|
||||||
|
isVisible: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...defaults,
|
||||||
|
...asset,
|
||||||
|
id: assetId,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
static auth(auth: { user: User; session?: Session }) {
|
||||||
|
return auth as AuthDto;
|
||||||
|
}
|
||||||
|
|
||||||
|
static user(user: User = {}) {
|
||||||
|
const userId = user.id || newUuid();
|
||||||
|
const defaults: Insertable<Users> = {
|
||||||
|
email: `${userId}@immich.cloud`,
|
||||||
|
name: `User ${userId}`,
|
||||||
|
deletedAt: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...defaults,
|
||||||
|
...user,
|
||||||
|
id: userId,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
static session(session: Session) {
|
||||||
|
const id = session.id || newUuid();
|
||||||
|
const defaults = {
|
||||||
|
token: randomBytes(36).toString('base64url'),
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...defaults,
|
||||||
|
...session,
|
||||||
|
id,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
withAsset(asset: Asset) {
|
||||||
|
this.assets.push(asset);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
withSession(session: Session) {
|
||||||
|
this.sessions.push(session);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
withUser(user: User = {}) {
|
||||||
|
this.users.push(user);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
async create() {
|
||||||
|
for (const asset of this.assets) {
|
||||||
|
await this.context.createAsset(asset);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const user of this.users) {
|
||||||
|
await this.context.createUser(user);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const session of this.sessions) {
|
||||||
|
await this.context.createSession(session);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.context;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class TestContext {
|
||||||
|
userRepository: UserRepository;
|
||||||
|
assetRepository: AssetRepository;
|
||||||
|
albumRepository: AlbumRepository;
|
||||||
|
sessionRepository: SessionRepository;
|
||||||
|
syncRepository: SyncRepository;
|
||||||
|
|
||||||
|
private constructor(private db: Kysely<DB>) {
|
||||||
|
this.userRepository = new UserRepository(this.db);
|
||||||
|
this.assetRepository = new AssetRepository(this.db);
|
||||||
|
this.albumRepository = new AlbumRepository(this.db);
|
||||||
|
this.sessionRepository = new SessionRepository(this.db);
|
||||||
|
this.syncRepository = new SyncRepository(this.db);
|
||||||
|
}
|
||||||
|
|
||||||
|
static from(db: Kysely<DB>) {
|
||||||
|
return new TestContext(db).getFactory();
|
||||||
|
}
|
||||||
|
|
||||||
|
getFactory() {
|
||||||
|
return TestFactory.create(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
createUser(user: User = {}) {
|
||||||
|
return this.userRepository.create(TestFactory.user(user));
|
||||||
|
}
|
||||||
|
|
||||||
|
createAsset(asset: Asset) {
|
||||||
|
return this.assetRepository.create(TestFactory.asset(asset));
|
||||||
|
}
|
||||||
|
|
||||||
|
createSession(session: Session) {
|
||||||
|
return this.sessionRepository.create(TestFactory.session(session));
|
||||||
|
}
|
||||||
|
}
|
61
server/test/medium/globalSetup.ts
Normal file
61
server/test/medium/globalSetup.ts
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import { GenericContainer, Wait } from 'testcontainers';
|
||||||
|
import { DataSource } from 'typeorm';
|
||||||
|
|
||||||
|
const globalSetup = async () => {
|
||||||
|
const postgres = await new GenericContainer('tensorchord/pgvecto-rs:pg14-v0.2.0')
|
||||||
|
.withExposedPorts(5432)
|
||||||
|
.withEnvironment({
|
||||||
|
POSTGRES_PASSWORD: 'postgres',
|
||||||
|
POSTGRES_USER: 'postgres',
|
||||||
|
POSTGRES_DB: 'immich',
|
||||||
|
})
|
||||||
|
.withCommand([
|
||||||
|
'postgres',
|
||||||
|
'-c',
|
||||||
|
'shared_preload_libraries=vectors.so',
|
||||||
|
'-c',
|
||||||
|
'search_path="$$user", public, vectors',
|
||||||
|
'-c',
|
||||||
|
'max_wal_size=2GB',
|
||||||
|
'-c',
|
||||||
|
'shared_buffers=512MB',
|
||||||
|
'-c',
|
||||||
|
'fsync=off',
|
||||||
|
'-c',
|
||||||
|
'full_page_writes=off',
|
||||||
|
'-c',
|
||||||
|
'synchronous_commit=off',
|
||||||
|
])
|
||||||
|
.withWaitStrategy(Wait.forAll([Wait.forLogMessage('database system is ready to accept connections', 2)]))
|
||||||
|
.start();
|
||||||
|
|
||||||
|
const postgresPort = postgres.getMappedPort(5432);
|
||||||
|
const postgresUrl = `postgres://postgres:postgres@localhost:${postgresPort}/immich`;
|
||||||
|
process.env.IMMICH_TEST_POSTGRES_URL = postgresUrl;
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-expect-error
|
||||||
|
const modules = import.meta.glob('/src/migrations/*.ts', { eager: true });
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
type: 'postgres' as const,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-expect-error
|
||||||
|
migrations: Object.values(modules).map((module) => Object.values(module)[0]),
|
||||||
|
migrationsRun: false,
|
||||||
|
synchronize: false,
|
||||||
|
connectTimeoutMS: 10_000, // 10 seconds
|
||||||
|
parseInt8: true,
|
||||||
|
url: postgresUrl,
|
||||||
|
};
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-expect-error
|
||||||
|
const dataSource = new DataSource(config);
|
||||||
|
await dataSource.initialize();
|
||||||
|
await dataSource.query('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"');
|
||||||
|
await dataSource.runMigrations();
|
||||||
|
await dataSource.destroy();
|
||||||
|
};
|
||||||
|
|
||||||
|
export default globalSetup;
|
189
server/test/medium/specs/sync.service.spec.ts
Normal file
189
server/test/medium/specs/sync.service.spec.ts
Normal file
@ -0,0 +1,189 @@
|
|||||||
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
|
import { SyncRequestType } from 'src/enum';
|
||||||
|
import { SyncService } from 'src/services/sync.service';
|
||||||
|
import { TestContext, TestFactory } from 'test/factory';
|
||||||
|
import { getKyselyDB, newTestService } from 'test/utils';
|
||||||
|
|
||||||
|
const setup = async () => {
|
||||||
|
const user = TestFactory.user();
|
||||||
|
const session = TestFactory.session({ userId: user.id });
|
||||||
|
const auth = TestFactory.auth({ session, user });
|
||||||
|
|
||||||
|
const db = await getKyselyDB();
|
||||||
|
|
||||||
|
const context = await TestContext.from(db).withUser(user).withSession(session).create();
|
||||||
|
|
||||||
|
const { sut } = newTestService(SyncService, context);
|
||||||
|
|
||||||
|
const testSync = async (auth: AuthDto, types: SyncRequestType[]) => {
|
||||||
|
const stream = TestFactory.stream();
|
||||||
|
await sut.stream(auth, stream, { types });
|
||||||
|
|
||||||
|
return stream.getResponse();
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
auth,
|
||||||
|
context,
|
||||||
|
sut,
|
||||||
|
testSync,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
describe(SyncService.name, () => {
|
||||||
|
describe.concurrent('users', () => {
|
||||||
|
it('should detect and sync the first user', async () => {
|
||||||
|
const { context, auth, sut, testSync } = await setup();
|
||||||
|
|
||||||
|
const user = await context.userRepository.get(auth.user.id, { withDeleted: false });
|
||||||
|
if (!user) {
|
||||||
|
expect.fail('First user should exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
const initialSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
|
||||||
|
expect(initialSyncResponse).toHaveLength(1);
|
||||||
|
expect(initialSyncResponse).toEqual([
|
||||||
|
{
|
||||||
|
ack: expect.any(String),
|
||||||
|
data: {
|
||||||
|
deletedAt: user.deletedAt,
|
||||||
|
email: user.email,
|
||||||
|
id: user.id,
|
||||||
|
name: user.name,
|
||||||
|
},
|
||||||
|
type: 'UserV1',
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const acks = [initialSyncResponse[0].ack];
|
||||||
|
await sut.setAcks(auth, { acks });
|
||||||
|
const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
|
||||||
|
|
||||||
|
expect(ackSyncResponse).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should detect and sync a soft deleted user', async () => {
|
||||||
|
const { auth, context, sut, testSync } = await setup();
|
||||||
|
|
||||||
|
const deletedAt = new Date().toISOString();
|
||||||
|
const deleted = await context.createUser({ deletedAt });
|
||||||
|
|
||||||
|
const response = await testSync(auth, [SyncRequestType.UsersV1]);
|
||||||
|
|
||||||
|
expect(response).toHaveLength(2);
|
||||||
|
expect(response).toEqual(
|
||||||
|
expect.arrayContaining([
|
||||||
|
{
|
||||||
|
ack: expect.any(String),
|
||||||
|
data: {
|
||||||
|
deletedAt: null,
|
||||||
|
email: auth.user.email,
|
||||||
|
id: auth.user.id,
|
||||||
|
name: auth.user.name,
|
||||||
|
},
|
||||||
|
type: 'UserV1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ack: expect.any(String),
|
||||||
|
data: {
|
||||||
|
deletedAt,
|
||||||
|
email: deleted.email,
|
||||||
|
id: deleted.id,
|
||||||
|
name: deleted.name,
|
||||||
|
},
|
||||||
|
type: 'UserV1',
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
|
const acks = [response[1].ack];
|
||||||
|
await sut.setAcks(auth, { acks });
|
||||||
|
const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
|
||||||
|
|
||||||
|
expect(ackSyncResponse).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should detect and sync a deleted user', async () => {
|
||||||
|
const { auth, context, sut, testSync } = await setup();
|
||||||
|
|
||||||
|
const user = await context.createUser();
|
||||||
|
await context.userRepository.delete({ id: user.id }, true);
|
||||||
|
|
||||||
|
const response = await testSync(auth, [SyncRequestType.UsersV1]);
|
||||||
|
|
||||||
|
expect(response).toHaveLength(2);
|
||||||
|
expect(response).toEqual(
|
||||||
|
expect.arrayContaining([
|
||||||
|
{
|
||||||
|
ack: expect.any(String),
|
||||||
|
data: {
|
||||||
|
userId: user.id,
|
||||||
|
},
|
||||||
|
type: 'UserDeleteV1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ack: expect.any(String),
|
||||||
|
data: {
|
||||||
|
deletedAt: null,
|
||||||
|
email: auth.user.email,
|
||||||
|
id: auth.user.id,
|
||||||
|
name: auth.user.name,
|
||||||
|
},
|
||||||
|
type: 'UserV1',
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
|
const acks = response.map(({ ack }) => ack);
|
||||||
|
await sut.setAcks(auth, { acks });
|
||||||
|
const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
|
||||||
|
|
||||||
|
expect(ackSyncResponse).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should sync a user and then an update to that same user', async () => {
|
||||||
|
const { auth, context, sut, testSync } = await setup();
|
||||||
|
|
||||||
|
const initialSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
|
||||||
|
|
||||||
|
expect(initialSyncResponse).toHaveLength(1);
|
||||||
|
expect(initialSyncResponse).toEqual(
|
||||||
|
expect.arrayContaining([
|
||||||
|
{
|
||||||
|
ack: expect.any(String),
|
||||||
|
data: {
|
||||||
|
deletedAt: null,
|
||||||
|
email: auth.user.email,
|
||||||
|
id: auth.user.id,
|
||||||
|
name: auth.user.name,
|
||||||
|
},
|
||||||
|
type: 'UserV1',
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
|
const acks = [initialSyncResponse[0].ack];
|
||||||
|
await sut.setAcks(auth, { acks });
|
||||||
|
|
||||||
|
const updated = await context.userRepository.update(auth.user.id, { name: 'new name' });
|
||||||
|
|
||||||
|
const updatedSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
|
||||||
|
|
||||||
|
expect(updatedSyncResponse).toHaveLength(1);
|
||||||
|
expect(updatedSyncResponse).toEqual(
|
||||||
|
expect.arrayContaining([
|
||||||
|
{
|
||||||
|
ack: expect.any(String),
|
||||||
|
data: {
|
||||||
|
deletedAt: null,
|
||||||
|
email: auth.user.email,
|
||||||
|
id: auth.user.id,
|
||||||
|
name: updated.name,
|
||||||
|
},
|
||||||
|
type: 'UserV1',
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
116
server/test/medium/specs/user.service.spec.ts
Normal file
116
server/test/medium/specs/user.service.spec.ts
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
import { UserService } from 'src/services/user.service';
|
||||||
|
import { TestContext, TestFactory } from 'test/factory';
|
||||||
|
import { getKyselyDB, newTestService } from 'test/utils';
|
||||||
|
|
||||||
|
describe.concurrent(UserService.name, () => {
|
||||||
|
let sut: UserService;
|
||||||
|
let context: TestContext;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const db = await getKyselyDB();
|
||||||
|
context = await TestContext.from(db).withUser({ isAdmin: true }).create();
|
||||||
|
({ sut } = newTestService(UserService, context));
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('create', () => {
|
||||||
|
it('should create a user', async () => {
|
||||||
|
const userDto = TestFactory.user();
|
||||||
|
|
||||||
|
await expect(sut.createUser(userDto)).resolves.toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
id: userDto.id,
|
||||||
|
name: userDto.name,
|
||||||
|
email: userDto.email,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject user with duplicate email', async () => {
|
||||||
|
const userDto = TestFactory.user();
|
||||||
|
const userDto2 = TestFactory.user({ email: userDto.email });
|
||||||
|
|
||||||
|
await sut.createUser(userDto);
|
||||||
|
|
||||||
|
await expect(sut.createUser(userDto2)).rejects.toThrow('User exists');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not return password', async () => {
|
||||||
|
const user = await sut.createUser(TestFactory.user());
|
||||||
|
|
||||||
|
expect((user as any).password).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('get', () => {
|
||||||
|
it('should get a user', async () => {
|
||||||
|
const userDto = TestFactory.user();
|
||||||
|
|
||||||
|
await context.createUser(userDto);
|
||||||
|
|
||||||
|
await expect(sut.get(userDto.id)).resolves.toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
id: userDto.id,
|
||||||
|
name: userDto.name,
|
||||||
|
email: userDto.email,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not return password', async () => {
|
||||||
|
const { id } = await context.createUser();
|
||||||
|
|
||||||
|
const user = await sut.get(id);
|
||||||
|
|
||||||
|
expect((user as any).password).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('updateMe', () => {
|
||||||
|
it('should update a user', async () => {
|
||||||
|
const userDto = TestFactory.user();
|
||||||
|
const sessionDto = TestFactory.session({ userId: userDto.id });
|
||||||
|
const authDto = TestFactory.auth({ user: userDto });
|
||||||
|
|
||||||
|
const before = await context.createUser(userDto);
|
||||||
|
await context.createSession(sessionDto);
|
||||||
|
|
||||||
|
const newUserDto = TestFactory.user();
|
||||||
|
|
||||||
|
const after = await sut.updateMe(authDto, { name: newUserDto.name, email: newUserDto.email });
|
||||||
|
|
||||||
|
if (!before || !after) {
|
||||||
|
expect.fail('User should be found');
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(before.updatedAt).toBeDefined();
|
||||||
|
expect(after.updatedAt).toBeDefined();
|
||||||
|
expect(before.updatedAt).not.toEqual(after.updatedAt);
|
||||||
|
expect(after).toEqual(expect.objectContaining({ name: newUserDto.name, email: newUserDto.email }));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('setLicense', () => {
|
||||||
|
const userLicense = {
|
||||||
|
licenseKey: 'IMCL-FF69-TUK1-RWZU-V9Q8-QGQS-S5GC-X4R2-UFK4',
|
||||||
|
activationKey:
|
||||||
|
'KuX8KsktrBSiXpQMAH0zLgA5SpijXVr_PDkzLdWUlAogCTMBZ0I3KCHXK0eE9EEd7harxup8_EHMeqAWeHo5VQzol6LGECpFv585U9asXD4Zc-UXt3mhJr2uhazqipBIBwJA2YhmUCDy8hiyiGsukDQNu9Rg9C77UeoKuZBWVjWUBWG0mc1iRqfvF0faVM20w53czAzlhaMxzVGc3Oimbd7xi_CAMSujF_2y8QpA3X2fOVkQkzdcH9lV0COejl7IyH27zQQ9HrlrXv3Lai5Hw67kNkaSjmunVBxC5PS0TpKoc9SfBJMaAGWnaDbjhjYUrm-8nIDQnoeEAidDXVAdPw',
|
||||||
|
};
|
||||||
|
it('should set a license', async () => {
|
||||||
|
const userDto = TestFactory.user();
|
||||||
|
const sessionDto = TestFactory.session({ userId: userDto.id });
|
||||||
|
const authDto = TestFactory.auth({ user: userDto });
|
||||||
|
|
||||||
|
await context.getFactory().withUser(userDto).withSession(sessionDto).create();
|
||||||
|
|
||||||
|
await expect(sut.getLicense(authDto)).rejects.toThrowError();
|
||||||
|
|
||||||
|
const after = await sut.setLicense(authDto, userLicense);
|
||||||
|
|
||||||
|
expect(after.licenseKey).toEqual(userLicense.licenseKey);
|
||||||
|
expect(after.activationKey).toEqual(userLicense.activationKey);
|
||||||
|
|
||||||
|
const getResponse = await sut.getLicense(authDto);
|
||||||
|
expect(getResponse).toEqual(after);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -1,6 +1,11 @@
|
|||||||
|
import { Kysely, sql } from 'kysely';
|
||||||
|
import { PostgresJSDialect } from 'kysely-postgres-js';
|
||||||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||||
import { Writable } from 'node:stream';
|
import { Writable } from 'node:stream';
|
||||||
|
import { parse } from 'pg-connection-string';
|
||||||
import { PNG } from 'pngjs';
|
import { PNG } from 'pngjs';
|
||||||
|
import postgres, { Notice } from 'postgres';
|
||||||
|
import { DB } from 'src/db';
|
||||||
import { ImmichWorker } from 'src/enum';
|
import { ImmichWorker } from 'src/enum';
|
||||||
import { AccessRepository } from 'src/repositories/access.repository';
|
import { AccessRepository } from 'src/repositories/access.repository';
|
||||||
import { ActivityRepository } from 'src/repositories/activity.repository';
|
import { ActivityRepository } from 'src/repositories/activity.repository';
|
||||||
@ -90,6 +95,8 @@ import { Mocked, vitest } from 'vitest';
|
|||||||
type Overrides = {
|
type Overrides = {
|
||||||
worker?: ImmichWorker;
|
worker?: ImmichWorker;
|
||||||
metadataRepository?: MetadataRepository;
|
metadataRepository?: MetadataRepository;
|
||||||
|
syncRepository?: SyncRepository;
|
||||||
|
userRepository?: UserRepository;
|
||||||
};
|
};
|
||||||
type BaseServiceArgs = ConstructorParameters<typeof BaseService>;
|
type BaseServiceArgs = ConstructorParameters<typeof BaseService>;
|
||||||
type Constructor<Type, Args extends Array<any>> = {
|
type Constructor<Type, Args extends Array<any>> = {
|
||||||
@ -144,7 +151,7 @@ export const newTestService = <T extends BaseService>(
|
|||||||
Service: Constructor<T, BaseServiceArgs>,
|
Service: Constructor<T, BaseServiceArgs>,
|
||||||
overrides?: Overrides,
|
overrides?: Overrides,
|
||||||
) => {
|
) => {
|
||||||
const { metadataRepository } = overrides || {};
|
const { metadataRepository, userRepository, syncRepository } = overrides || {};
|
||||||
|
|
||||||
const accessMock = newAccessRepositoryMock();
|
const accessMock = newAccessRepositoryMock();
|
||||||
const loggerMock = newLoggingRepositoryMock();
|
const loggerMock = newLoggingRepositoryMock();
|
||||||
@ -180,12 +187,12 @@ export const newTestService = <T extends BaseService>(
|
|||||||
const sharedLinkMock = newSharedLinkRepositoryMock();
|
const sharedLinkMock = newSharedLinkRepositoryMock();
|
||||||
const stackMock = newStackRepositoryMock();
|
const stackMock = newStackRepositoryMock();
|
||||||
const storageMock = newStorageRepositoryMock();
|
const storageMock = newStorageRepositoryMock();
|
||||||
const syncMock = newSyncRepositoryMock();
|
const syncMock = (syncRepository || newSyncRepositoryMock()) as Mocked<RepositoryInterface<SyncRepository>>;
|
||||||
const systemMock = newSystemMetadataRepositoryMock();
|
const systemMock = newSystemMetadataRepositoryMock();
|
||||||
const tagMock = newTagRepositoryMock();
|
const tagMock = newTagRepositoryMock();
|
||||||
const telemetryMock = newTelemetryRepositoryMock();
|
const telemetryMock = newTelemetryRepositoryMock();
|
||||||
const trashMock = newTrashRepositoryMock();
|
const trashMock = newTrashRepositoryMock();
|
||||||
const userMock = newUserRepositoryMock();
|
const userMock = (userRepository || newUserRepositoryMock()) as Mocked<RepositoryInterface<UserRepository>>;
|
||||||
const versionHistoryMock = newVersionHistoryRepositoryMock();
|
const versionHistoryMock = newVersionHistoryRepositoryMock();
|
||||||
const viewMock = newViewRepositoryMock();
|
const viewMock = newViewRepositoryMock();
|
||||||
|
|
||||||
@ -299,6 +306,57 @@ function* newPngFactory() {
|
|||||||
|
|
||||||
const pngFactory = newPngFactory();
|
const pngFactory = newPngFactory();
|
||||||
|
|
||||||
|
export const getKyselyDB = async (suffix?: string): Promise<Kysely<DB>> => {
|
||||||
|
const parsed = parse(process.env.IMMICH_TEST_POSTGRES_URL!);
|
||||||
|
|
||||||
|
const parsedOptions = {
|
||||||
|
...parsed,
|
||||||
|
ssl: false,
|
||||||
|
host: parsed.host ?? undefined,
|
||||||
|
port: parsed.port ? Number(parsed.port) : undefined,
|
||||||
|
database: parsed.database ?? undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
const driverOptions = {
|
||||||
|
...parsedOptions,
|
||||||
|
onnotice: (notice: Notice) => {
|
||||||
|
if (notice['severity'] !== 'NOTICE') {
|
||||||
|
console.warn('Postgres notice:', notice);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
max: 10,
|
||||||
|
types: {
|
||||||
|
date: {
|
||||||
|
to: 1184,
|
||||||
|
from: [1082, 1114, 1184],
|
||||||
|
serialize: (x: Date | string) => (x instanceof Date ? x.toISOString() : x),
|
||||||
|
parse: (x: string) => new Date(x),
|
||||||
|
},
|
||||||
|
bigint: {
|
||||||
|
to: 20,
|
||||||
|
from: [20],
|
||||||
|
parse: (value: string) => Number.parseInt(value),
|
||||||
|
serialize: (value: number) => value.toString(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
connection: {
|
||||||
|
TimeZone: 'UTC',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const kysely = new Kysely<DB>({
|
||||||
|
dialect: new PostgresJSDialect({ postgres: postgres({ ...driverOptions, max: 1, database: 'postgres' }) }),
|
||||||
|
});
|
||||||
|
const randomSuffix = Math.random().toString(36).slice(2, 7);
|
||||||
|
const dbName = `immich_${suffix ?? randomSuffix}`;
|
||||||
|
|
||||||
|
await sql.raw(`CREATE DATABASE ${dbName} WITH TEMPLATE immich OWNER postgres;`).execute(kysely);
|
||||||
|
|
||||||
|
return new Kysely<DB>({
|
||||||
|
dialect: new PostgresJSDialect({ postgres: postgres({ ...driverOptions, database: dbName }) }),
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
export const newRandomImage = () => {
|
export const newRandomImage = () => {
|
||||||
const { value } = pngFactory.next();
|
const { value } = pngFactory.next();
|
||||||
if (!value) {
|
if (!value) {
|
||||||
|
@ -7,6 +7,7 @@ export default defineConfig({
|
|||||||
root: './',
|
root: './',
|
||||||
globals: true,
|
globals: true,
|
||||||
include: ['test/medium/**/*.spec.ts'],
|
include: ['test/medium/**/*.spec.ts'],
|
||||||
|
globalSetup: ['test/medium/globalSetup.ts'],
|
||||||
server: {
|
server: {
|
||||||
deps: {
|
deps: {
|
||||||
fallbackCJS: true,
|
fallbackCJS: true,
|
Loading…
x
Reference in New Issue
Block a user