refactor: medium tests (#17599)

This commit is contained in:
Jason Rasmussen 2025-04-15 08:53:14 -04:00 committed by GitHub
parent c5f087a3ca
commit f189c7b101
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 625 additions and 132 deletions

View File

@ -0,0 +1,331 @@
import { ClassConstructor } from 'class-transformer';
import { Insertable, Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { randomBytes } from 'node:crypto';
import { AssetJobStatus, Assets, DB } from 'src/db';
import { AssetType } from 'src/enum';
import { ActivityRepository } from 'src/repositories/activity.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { SessionRepository } from 'src/repositories/session.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
import { UserTable } from 'src/schema/tables/user.table';
import { BaseService } from 'src/services/base.service';
import { RepositoryInterface } from 'src/types';
import { newUuid } from 'test/small.factory';
import { automock, ServiceOverrides } from 'test/utils';
import { Mocked } from 'vitest';
// type Repositories = Omit<ServiceOverrides, 'access' | 'telemetry'>;
type Repositories = {
activity: ActivityRepository;
album: AlbumRepository;
asset: AssetRepository;
config: ConfigRepository;
crypto: CryptoRepository;
database: DatabaseRepository;
job: JobRepository;
user: UserRepository;
logger: LoggingRepository;
memory: MemoryRepository;
partner: PartnerRepository;
session: SessionRepository;
systemMetadata: SystemMetadataRepository;
versionHistory: VersionHistoryRepository;
};
type RepositoryMocks = { [K in keyof Repositories]: Mocked<RepositoryInterface<Repositories[K]>> };
type RepositoryOptions = Partial<{ [K in keyof Repositories]: 'mock' | 'real' }>;
type ContextRepositoryMocks<R extends RepositoryOptions> = {
[K in keyof Repositories as R[K] extends 'mock' ? K : never]: Mocked<RepositoryInterface<Repositories[K]>>;
};
type ContextRepositories<R extends RepositoryOptions> = {
[K in keyof Repositories as R[K] extends 'real' ? K : never]: Repositories[K];
};
export type Context<R extends RepositoryOptions, S extends BaseService> = {
sut: S;
mocks: ContextRepositoryMocks<R>;
repos: ContextRepositories<R>;
getRepository<T extends keyof Repositories>(key: T): Repositories[T];
};
export const newMediumService = <R extends RepositoryOptions, S extends BaseService>(
Service: ClassConstructor<S>,
options: {
database: Kysely<DB>;
repos: R;
},
): Context<R, S> => {
const repos: Partial<Repositories> = {};
const mocks: Partial<RepositoryMocks> = {};
const loggerMock = getRepositoryMock('logger') as Mocked<LoggingRepository>;
loggerMock.setContext.mockImplementation(() => {});
repos.logger = loggerMock;
for (const [_key, type] of Object.entries(options.repos)) {
if (type === 'real') {
const key = _key as keyof Repositories;
repos[key] = getRepository(key, options.database) as any;
continue;
}
if (type === 'mock') {
const key = _key as keyof RepositoryMocks;
mocks[key] = getRepositoryMock(key) as any;
continue;
}
}
const makeRepository = <K extends keyof Repositories>(key: K) => {
return repos[key] || getRepository(key, options.database);
};
const deps = asDeps({ ...mocks, ...repos } as ServiceOverrides);
const sut = new Service(...deps);
return {
sut,
mocks,
repos,
getRepository: makeRepository,
} as Context<R, S>;
};
export const getRepository = <K extends keyof Repositories>(key: K, db: Kysely<DB>) => {
switch (key) {
case 'activity': {
return new ActivityRepository(db);
}
case 'asset': {
return new AssetRepository(db);
}
case 'config': {
return new ConfigRepository();
}
case 'crypto': {
return new CryptoRepository();
}
case 'database': {
const configRepo = new ConfigRepository();
return new DatabaseRepository(db, new LoggingRepository(undefined, configRepo), configRepo);
}
case 'logger': {
const configMock = { getEnv: () => ({ noColor: false }) };
return new LoggingRepository(undefined, configMock as ConfigRepository);
}
case 'memory': {
return new MemoryRepository(db);
}
case 'partner': {
return new PartnerRepository(db);
}
case 'session': {
return new SessionRepository(db);
}
case 'systemMetadata': {
return new SystemMetadataRepository(db);
}
case 'user': {
return new UserRepository(db);
}
case 'versionHistory': {
return new VersionHistoryRepository(db);
}
default: {
throw new Error(`Invalid repository key: ${key}`);
}
}
};
const getRepositoryMock = <K extends keyof Repositories>(key: K) => {
switch (key) {
case 'activity': {
return automock(ActivityRepository);
}
case 'album': {
return automock(AlbumRepository);
}
case 'asset': {
return automock(AssetRepository);
}
case 'config': {
return automock(ConfigRepository);
}
case 'crypto': {
return automock(CryptoRepository);
}
case 'database': {
return automock(DatabaseRepository, {
args: [undefined, { setContext: () => {} }, { getEnv: () => ({ database: { vectorExtension: '' } }) }],
});
}
case 'job': {
return automock(JobRepository, { args: [undefined, undefined, undefined, { setContext: () => {} }] });
}
case 'logger': {
const configMock = { getEnv: () => ({ noColor: false }) };
return automock(LoggingRepository, { args: [undefined, configMock], strict: false });
}
case 'memory': {
return automock(MemoryRepository);
}
case 'partner': {
return automock(PartnerRepository);
}
case 'session': {
return automock(SessionRepository);
}
case 'systemMetadata': {
return automock(SystemMetadataRepository);
}
case 'user': {
return automock(UserRepository);
}
case 'versionHistory': {
return automock(VersionHistoryRepository);
}
default: {
throw new Error(`Invalid repository key: ${key}`);
}
}
};
export const asDeps = (repositories: ServiceOverrides) => {
return [
repositories.logger || getRepositoryMock('logger'), // logger
repositories.access, // access
repositories.activity || getRepositoryMock('activity'),
repositories.album || getRepositoryMock('album'),
repositories.albumUser,
repositories.apiKey,
repositories.asset || getRepositoryMock('asset'),
repositories.audit,
repositories.config || getRepositoryMock('config'),
repositories.cron,
repositories.crypto || getRepositoryMock('crypto'),
repositories.database || getRepositoryMock('database'),
repositories.downloadRepository,
repositories.event,
repositories.job || getRepositoryMock('job'),
repositories.library,
repositories.machineLearning,
repositories.map,
repositories.media,
repositories.memory || getRepositoryMock('memory'),
repositories.metadata,
repositories.move,
repositories.notification,
repositories.oauth,
repositories.partner || getRepositoryMock('partner'),
repositories.person,
repositories.process,
repositories.search,
repositories.serverInfo,
repositories.session || getRepositoryMock('session'),
repositories.sharedLink,
repositories.stack,
repositories.storage,
repositories.sync,
repositories.systemMetadata || getRepositoryMock('systemMetadata'),
repositories.tag,
repositories.telemetry,
repositories.trash,
repositories.user,
repositories.versionHistory || getRepositoryMock('versionHistory'),
repositories.view,
];
};
const assetInsert = (asset: Partial<Insertable<Assets>> = {}) => {
const id = asset.id || newUuid();
const defaults: Insertable<Assets> = {
deviceAssetId: '',
deviceId: '',
originalFileName: '',
checksum: randomBytes(32),
type: AssetType.IMAGE,
originalPath: '/path/to/something.jpg',
ownerId: '@immich.cloud',
isVisible: true,
};
return {
...defaults,
...asset,
id,
};
};
const assetJobStatusInsert = (
job: Partial<Insertable<AssetJobStatus>> & { assetId: string },
): Insertable<AssetJobStatus> => {
const date = DateTime.now().minus({ days: 15 }).toISO();
const defaults: Omit<Insertable<AssetJobStatus>, 'assetId'> = {
duplicatesDetectedAt: date,
facesRecognizedAt: date,
metadataExtractedAt: date,
previewAt: date,
thumbnailAt: date,
};
return {
...defaults,
...job,
};
};
const userInsert = (user: Partial<Insertable<UserTable>> = {}) => {
const id = user.id || newUuid();
const defaults: Insertable<UserTable> = {
email: `${id}@immich.cloud`,
name: `User ${id}`,
deletedAt: null,
};
return { ...defaults, ...user, id };
};
export const mediumFactory = {
assetInsert,
assetJobStatusInsert,
userInsert,
};

View File

@ -1,67 +1,74 @@
import { TestContext, TestFactory } from 'test/factory';
import { Kysely } from 'kysely';
import { DB } from 'src/db';
import { AssetRepository } from 'src/repositories/asset.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { partners_delete_audit } from 'src/schema/functions';
import { mediumFactory } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
describe('audit', () => {
let context: TestContext;
let defaultDatabase: Kysely<DB>;
let assetRepo: AssetRepository;
let userRepo: UserRepository;
let partnerRepo: PartnerRepository;
beforeAll(async () => {
const db = await getKyselyDB();
context = await TestContext.from(db).create();
defaultDatabase = await getKyselyDB();
assetRepo = new AssetRepository(defaultDatabase);
userRepo = new UserRepository(defaultDatabase);
partnerRepo = new PartnerRepository(defaultDatabase);
});
describe('partners_audit', () => {
describe(partners_delete_audit.name, () => {
it('should not cascade user deletes to partners_audit', async () => {
const user1 = TestFactory.user();
const user2 = TestFactory.user();
const user1 = mediumFactory.userInsert();
const user2 = mediumFactory.userInsert();
await context
.getFactory()
.withUser(user1)
.withUser(user2)
.withPartner({ sharedById: user1.id, sharedWithId: user2.id })
.create();
await context.user.delete(user1, true);
await Promise.all([userRepo.create(user1), userRepo.create(user2)]);
await partnerRepo.create({ sharedById: user1.id, sharedWithId: user2.id });
await userRepo.delete(user1, true);
await expect(
context.db.selectFrom('partners_audit').select(['id']).where('sharedById', '=', user1.id).execute(),
defaultDatabase.selectFrom('partners_audit').select(['id']).where('sharedById', '=', user1.id).execute(),
).resolves.toHaveLength(0);
});
});
describe('assets_audit', () => {
it('should not cascade user deletes to assets_audit', async () => {
const user = TestFactory.user();
const asset = TestFactory.asset({ ownerId: user.id });
const user = mediumFactory.userInsert();
const asset = mediumFactory.assetInsert({ ownerId: user.id });
await context.getFactory().withUser(user).withAsset(asset).create();
await context.user.delete(user, true);
await userRepo.create(user);
await assetRepo.create(asset);
await userRepo.delete(user, true);
await expect(
context.db.selectFrom('assets_audit').select(['id']).where('assetId', '=', asset.id).execute(),
defaultDatabase.selectFrom('assets_audit').select(['id']).where('assetId', '=', asset.id).execute(),
).resolves.toHaveLength(0);
});
});
describe('exif', () => {
it('should automatically set updatedAt and updateId when the row is updated', async () => {
const user = TestFactory.user();
const asset = TestFactory.asset({ ownerId: user.id });
const exif = { assetId: asset.id, make: 'Canon' };
const user = mediumFactory.userInsert();
const asset = mediumFactory.assetInsert({ ownerId: user.id });
await context.getFactory().withUser(user).withAsset(asset).create();
await context.asset.upsertExif(exif);
await userRepo.create(user);
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
const before = await context.db
const before = await defaultDatabase
.selectFrom('exif')
.select(['updatedAt', 'updateId'])
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow();
await context.asset.upsertExif({ assetId: asset.id, make: 'Canon 2' });
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon 2' });
const after = await context.db
const after = await defaultDatabase
.selectFrom('exif')
.select(['updatedAt', 'updateId'])
.where('assetId', '=', asset.id)

View File

@ -0,0 +1,142 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { DB } from 'src/db';
import { AssetFileType } from 'src/enum';
import { UserRepository } from 'src/repositories/user.repository';
import { MemoryService } from 'src/services/memory.service';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
describe(MemoryService.name, () => {
let defaultDatabase: Kysely<DB>;
const createSut = (db?: Kysely<DB>) => {
return newMediumService(MemoryService, {
database: db || defaultDatabase,
repos: {
asset: 'real',
memory: 'real',
user: 'real',
systemMetadata: 'real',
partner: 'real',
},
});
};
beforeEach(async () => {
defaultDatabase = await getKyselyDB();
const userRepo = new UserRepository(defaultDatabase);
const admin = mediumFactory.userInsert({ isAdmin: true });
await userRepo.create(admin);
});
describe('onMemoryCreate', () => {
it('should work on an empty database', async () => {
const { sut } = createSut();
await expect(sut.onMemoriesCreate()).resolves.not.toThrow();
});
it('should create a memory from an asset', async () => {
const { sut, repos, getRepository } = createSut();
const now = DateTime.fromObject({ year: 2025, month: 2, day: 25 }, { zone: 'utc' });
const user = mediumFactory.userInsert();
const asset = mediumFactory.assetInsert({ ownerId: user.id, localDateTime: now.minus({ years: 1 }).toISO() });
const jobStatus = mediumFactory.assetJobStatusInsert({ assetId: asset.id });
const userRepo = getRepository('user');
const assetRepo = getRepository('asset');
await userRepo.create(user);
await assetRepo.create(asset);
await Promise.all([
assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }),
assetRepo.upsertFiles([
{ assetId: asset.id, type: AssetFileType.PREVIEW, path: '/path/to/preview.jpg' },
{ assetId: asset.id, type: AssetFileType.THUMBNAIL, path: '/path/to/thumbnail.jpg' },
]),
assetRepo.upsertJobStatus(jobStatus),
]);
vi.setSystemTime(now.toJSDate());
await sut.onMemoriesCreate();
const memories = await repos.memory.search(user.id, {});
expect(memories.length).toBe(1);
expect(memories[0]).toEqual(
expect.objectContaining({
id: expect.any(String),
createdAt: expect.any(Date),
memoryAt: expect.any(Date),
updatedAt: expect.any(Date),
deletedAt: null,
ownerId: user.id,
assets: expect.arrayContaining([expect.objectContaining({ id: asset.id })]),
isSaved: false,
showAt: now.startOf('day').toJSDate(),
hideAt: now.endOf('day').toJSDate(),
seenAt: null,
type: 'on_this_day',
data: { year: 2024 },
}),
);
});
it('should not generate a memory twice for the same day', async () => {
const { sut, repos, getRepository } = createSut();
const now = DateTime.fromObject({ year: 2025, month: 2, day: 20 }, { zone: 'utc' });
const assetRepo = getRepository('asset');
const memoryRepo = getRepository('memory');
const user = mediumFactory.userInsert();
await repos.user.create(user);
for (const dto of [
{
ownerId: user.id,
localDateTime: now.minus({ year: 1 }).plus({ days: 3 }).toISO(),
},
{
ownerId: user.id,
localDateTime: now.minus({ year: 1 }).plus({ days: 4 }).toISO(),
},
{
ownerId: user.id,
localDateTime: now.minus({ year: 1 }).plus({ days: 5 }).toISO(),
},
]) {
const asset = mediumFactory.assetInsert(dto);
await assetRepo.create(asset);
await Promise.all([
assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }),
assetRepo.upsertJobStatus(mediumFactory.assetJobStatusInsert({ assetId: asset.id })),
assetRepo.upsertFiles([
{ assetId: asset.id, type: AssetFileType.PREVIEW, path: '/path/to/preview.jpg' },
{ assetId: asset.id, type: AssetFileType.THUMBNAIL, path: '/path/to/thumbnail.jpg' },
]),
]);
}
vi.setSystemTime(now.toJSDate());
await sut.onMemoriesCreate();
const memories = await memoryRepo.search(user.id, {});
expect(memories.length).toBe(1);
await sut.onMemoriesCreate();
const memoriesAfter = await memoryRepo.search(user.id, {});
expect(memoriesAfter.length).toBe(1);
});
});
describe('onMemoriesCleanup', () => {
it('should run without error', async () => {
const { sut } = createSut();
await expect(sut.onMemoriesCleanup()).resolves.not.toThrow();
});
});
});

View File

@ -1,51 +1,60 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { DB } from 'src/db';
import { JobName, JobStatus } from 'src/enum';
import { ImmichEnvironment, JobName, JobStatus } from 'src/enum';
import { UserService } from 'src/services/user.service';
import { TestContext, TestFactory } from 'test/factory';
import { getKyselyDB, newTestService, ServiceMocks } from 'test/utils';
const setup = async (db: Kysely<DB>) => {
const context = await TestContext.from(db).withUser({ isAdmin: true }).create();
const { sut, mocks } = newTestService(UserService, context);
return { sut, mocks, context };
};
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
describe(UserService.name, () => {
let sut: UserService;
let context: TestContext;
let mocks: ServiceMocks;
let defaultDatabase: Kysely<DB>;
const createSut = (db?: Kysely<DB>) => {
process.env.IMMICH_ENV = ImmichEnvironment.TESTING;
return newMediumService(UserService, {
database: db || defaultDatabase,
repos: {
user: 'real',
crypto: 'real',
config: 'real',
job: 'mock',
systemMetadata: 'real',
},
});
};
beforeAll(async () => {
({ sut, context, mocks } = await setup(await getKyselyDB()));
defaultDatabase = await getKyselyDB();
const { repos } = createSut();
await repos.user.create({ isAdmin: true, email: 'admin@immich.cloud' });
});
describe('create', () => {
it('should create a user', async () => {
const userDto = TestFactory.user();
const { sut } = createSut();
const user = mediumFactory.userInsert();
await expect(sut.createUser(userDto)).resolves.toEqual(
expect.objectContaining({
id: userDto.id,
name: userDto.name,
email: userDto.email,
}),
await expect(sut.createUser({ name: user.name, email: user.email })).resolves.toEqual(
expect.objectContaining({ name: user.name, email: user.email }),
);
});
it('should reject user with duplicate email', async () => {
const userDto = TestFactory.user();
const userDto2 = TestFactory.user({ email: userDto.email });
const { sut } = createSut();
await sut.createUser(userDto);
const user = mediumFactory.userInsert();
await expect(sut.createUser(userDto2)).rejects.toThrow('User exists');
await expect(sut.createUser({ email: user.email })).resolves.toMatchObject({ email: user.email });
await expect(sut.createUser({ email: user.email })).rejects.toThrow('User exists');
});
it('should not return password', async () => {
const user = await sut.createUser(TestFactory.user());
const { sut } = createSut();
const dto = mediumFactory.userInsert({ password: 'password' });
const user = await sut.createUser({ email: dto.email, password: 'password' });
expect((user as any).password).toBeUndefined();
});
@ -53,79 +62,72 @@ describe(UserService.name, () => {
describe('get', () => {
it('should get a user', async () => {
const userDto = TestFactory.user();
const { sut, repos } = createSut();
const user = mediumFactory.userInsert();
await context.createUser(userDto);
await repos.user.create(user);
await expect(sut.get(userDto.id)).resolves.toEqual(
await expect(sut.get(user.id)).resolves.toEqual(
expect.objectContaining({
id: userDto.id,
name: userDto.name,
email: userDto.email,
id: user.id,
name: user.name,
email: user.email,
}),
);
});
it('should not return password', async () => {
const { id } = await context.createUser();
const { sut, repos } = createSut();
const user = mediumFactory.userInsert();
const user = await sut.get(id);
await repos.user.create(user);
expect((user as any).password).toBeUndefined();
const result = await sut.get(user.id);
expect((result as any).password).toBeUndefined();
});
});
describe('updateMe', () => {
it('should update a user', async () => {
const userDto = TestFactory.user();
const sessionDto = TestFactory.session({ userId: userDto.id });
const authDto = TestFactory.auth({ user: userDto });
const { sut, repos: repositories } = createSut();
const before = await context.createUser(userDto);
await context.createSession(sessionDto);
const newUserDto = TestFactory.user();
const after = await sut.updateMe(authDto, { name: newUserDto.name, email: newUserDto.email });
if (!before || !after) {
expect.fail('User should be found');
}
const before = await repositories.user.create(mediumFactory.userInsert());
const auth = factory.auth({ user: { id: before.id } });
const after = await sut.updateMe(auth, { name: `${before.name} Updated` });
expect(before.updatedAt).toBeDefined();
expect(after.updatedAt).toBeDefined();
expect(before.updatedAt).not.toEqual(after.updatedAt);
expect(after).toEqual(expect.objectContaining({ name: newUserDto.name, email: newUserDto.email }));
});
});
describe('setLicense', () => {
const userLicense = {
licenseKey: 'IMCL-FF69-TUK1-RWZU-V9Q8-QGQS-S5GC-X4R2-UFK4',
activationKey:
'KuX8KsktrBSiXpQMAH0zLgA5SpijXVr_PDkzLdWUlAogCTMBZ0I3KCHXK0eE9EEd7harxup8_EHMeqAWeHo5VQzol6LGECpFv585U9asXD4Zc-UXt3mhJr2uhazqipBIBwJA2YhmUCDy8hiyiGsukDQNu9Rg9C77UeoKuZBWVjWUBWG0mc1iRqfvF0faVM20w53czAzlhaMxzVGc3Oimbd7xi_CAMSujF_2y8QpA3X2fOVkQkzdcH9lV0COejl7IyH27zQQ9HrlrXv3Lai5Hw67kNkaSjmunVBxC5PS0TpKoc9SfBJMaAGWnaDbjhjYUrm-8nIDQnoeEAidDXVAdPw',
};
it('should set a license', async () => {
const userDto = TestFactory.user();
const sessionDto = TestFactory.session({ userId: userDto.id });
const authDto = TestFactory.auth({ user: userDto });
const license = {
licenseKey: 'IMCL-FF69-TUK1-RWZU-V9Q8-QGQS-S5GC-X4R2-UFK4',
activationKey:
'KuX8KsktrBSiXpQMAH0zLgA5SpijXVr_PDkzLdWUlAogCTMBZ0I3KCHXK0eE9EEd7harxup8_EHMeqAWeHo5VQzol6LGECpFv585U9asXD4Zc-UXt3mhJr2uhazqipBIBwJA2YhmUCDy8hiyiGsukDQNu9Rg9C77UeoKuZBWVjWUBWG0mc1iRqfvF0faVM20w53czAzlhaMxzVGc3Oimbd7xi_CAMSujF_2y8QpA3X2fOVkQkzdcH9lV0COejl7IyH27zQQ9HrlrXv3Lai5Hw67kNkaSjmunVBxC5PS0TpKoc9SfBJMaAGWnaDbjhjYUrm-8nIDQnoeEAidDXVAdPw',
};
const { sut, repos } = createSut();
const user = mediumFactory.userInsert();
await repos.user.create(user);
const auth = factory.auth({ user: { id: user.id } });
await context.getFactory().withUser(userDto).withSession(sessionDto).create();
await expect(sut.getLicense(auth)).rejects.toThrowError();
const after = await sut.setLicense(auth, license);
await expect(sut.getLicense(authDto)).rejects.toThrowError();
expect(after.licenseKey).toEqual(license.licenseKey);
expect(after.activationKey).toEqual(license.activationKey);
const after = await sut.setLicense(authDto, userLicense);
expect(after.licenseKey).toEqual(userLicense.licenseKey);
expect(after.activationKey).toEqual(userLicense.activationKey);
const getResponse = await sut.getLicense(authDto);
const getResponse = await sut.getLicense(auth);
expect(getResponse).toEqual(after);
});
});
describe.sequential('handleUserDeleteCheck', () => {
beforeEach(async () => {
const { sut } = createSut();
// These tests specifically have to be sequential otherwise we hit race conditions with config changes applying in incorrect tests
const config = await sut.getConfig({ withCache: false });
config.user.deleteDelay = 7;
@ -133,16 +135,19 @@ describe(UserService.name, () => {
});
it('should work when there are no deleted users', async () => {
const { sut, mocks } = createSut();
mocks.job.queueAll.mockResolvedValue(void 0);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS);
expect(mocks.job.queueAll).toHaveBeenCalledExactlyOnceWith([]);
});
it('should work when there is a user to delete', async () => {
const { sut, context, mocks } = await setup(await getKyselyDB());
const user = TestFactory.user({ deletedAt: DateTime.now().minus({ days: 60 }).toJSDate() });
await context.createUser(user);
const { sut, repos, mocks } = createSut(await getKyselyDB());
mocks.job.queueAll.mockResolvedValue(void 0);
const user = mediumFactory.userInsert({ deletedAt: DateTime.now().minus({ days: 60 }).toJSDate() });
await repos.user.create(user);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS);
@ -152,10 +157,10 @@ describe(UserService.name, () => {
});
it('should skip a recently deleted user', async () => {
const { sut, context, mocks } = await setup(await getKyselyDB());
const user = TestFactory.user({ deletedAt: DateTime.now().minus({ days: 5 }).toJSDate() });
await context.createUser(user);
const { sut, repos, mocks } = createSut(await getKyselyDB());
mocks.job.queueAll.mockResolvedValue(void 0);
const user = mediumFactory.userInsert({ deletedAt: DateTime.now().minus({ days: 5 }).toJSDate() });
await repos.user.create(user);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS);
@ -163,10 +168,10 @@ describe(UserService.name, () => {
});
it('should respect a custom user delete delay', async () => {
const db = await getKyselyDB();
const { sut, context, mocks } = await setup(db);
const user = TestFactory.user({ deletedAt: DateTime.now().minus({ days: 25 }).toJSDate() });
await context.createUser(user);
const { sut, repos, mocks } = createSut(await getKyselyDB());
mocks.job.queueAll.mockResolvedValue(void 0);
const user = mediumFactory.userInsert({ deletedAt: DateTime.now().minus({ days: 25 }).toJSDate() });
await repos.user.create(user);
const config = await sut.getConfig({ withCache: false });
config.user.deleteDelay = 30;

View File

@ -1,32 +1,39 @@
import { Kysely } from 'kysely';
import { serverVersion } from 'src/constants';
import { DB } from 'src/db';
import { JobName } from 'src/enum';
import { VersionService } from 'src/services/version.service';
import { TestContext } from 'test/factory';
import { getKyselyDB, newTestService } from 'test/utils';
const setup = async () => {
const db = await getKyselyDB();
const context = await TestContext.from(db).create();
const { sut, mocks } = newTestService(VersionService, context);
return {
context,
sut,
jobMock: mocks.job,
};
};
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
describe(VersionService.name, () => {
describe.concurrent('onBootstrap', () => {
it('record the current version on startup', async () => {
const { context, sut } = await setup();
let defaultDatabase: Kysely<DB>;
const itemsBefore = await context.versionHistory.getAll();
const setup = (db?: Kysely<DB>) => {
return newMediumService(VersionService, {
database: db || defaultDatabase,
repos: {
job: 'mock',
database: 'real',
versionHistory: 'real',
},
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe('onBootstrap', () => {
it('record the current version on startup', async () => {
const { sut, repos } = setup();
const itemsBefore = await repos.versionHistory.getAll();
expect(itemsBefore).toHaveLength(0);
await sut.onBootstrap();
const itemsAfter = await context.versionHistory.getAll();
const itemsAfter = await repos.versionHistory.getAll();
expect(itemsAfter).toHaveLength(1);
expect(itemsAfter[0]).toEqual({
createdAt: expect.any(Date),
@ -36,21 +43,22 @@ describe(VersionService.name, () => {
});
it('should queue memory creation when upgrading from 1.128.0', async () => {
const { context, jobMock, sut } = await setup();
const { sut, repos, mocks } = setup();
mocks.job.queue.mockResolvedValue(void 0);
await context.versionHistory.create({ version: 'v1.128.0' });
await repos.versionHistory.create({ version: 'v1.128.0' });
await sut.onBootstrap();
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.MEMORIES_CREATE });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.MEMORIES_CREATE });
});
it('should not queue memory creation when upgrading from 1.129.0', async () => {
const { context, jobMock, sut } = await setup();
const { sut, repos, mocks } = setup();
await context.versionHistory.create({ version: 'v1.129.0' });
await repos.versionHistory.create({ version: 'v1.129.0' });
await sut.onBootstrap();
expect(jobMock.queue).not.toHaveBeenCalled();
expect(mocks.job.queue).not.toHaveBeenCalled();
});
});
});