refactor: duplicate queries (#19136)

This commit is contained in:
Jason Rasmussen 2025-06-12 14:23:02 -04:00 committed by GitHub
parent 144cc8ab6d
commit 5cd186d3d4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 280 additions and 270 deletions

View File

@ -185,16 +185,6 @@ set
where
"id" = any ($2::uuid[])
-- AssetRepository.updateDuplicates
update "assets"
set
"duplicateId" = $1
where
(
"duplicateId" = any ($2::uuid[])
or "id" = any ($3::uuid[])
)
-- AssetRepository.getByChecksum
select
"assets".*
@ -349,66 +339,6 @@ select
from
"agg"
-- AssetRepository.getDuplicates
with
"duplicates" as (
select
"assets"."duplicateId",
json_agg(
"asset"
order by
"assets"."localDateTime" asc
) as "assets"
from
"assets"
left join lateral (
select
"assets".*,
"exif" as "exifInfo"
from
"exif"
where
"exif"."assetId" = "assets"."id"
) as "asset" on true
where
"assets"."visibility" in ('archive', 'timeline')
and "assets"."ownerId" = $1::uuid
and "assets"."duplicateId" is not null
and "assets"."deletedAt" is null
and "assets"."stackId" is null
group by
"assets"."duplicateId"
),
"unique" as (
select
"duplicateId"
from
"duplicates"
where
json_array_length("assets") = $2
),
"removed_unique" as (
update "assets"
set
"duplicateId" = $3
from
"unique"
where
"assets"."duplicateId" = "unique"."duplicateId"
)
select
*
from
"duplicates"
where
not exists (
select
from
"unique"
where
"unique"."duplicateId" = "duplicates"."duplicateId"
)
-- AssetRepository.getAssetIdByCity
with
"cities" as (

View File

@ -0,0 +1,103 @@
-- NOTE: This file is auto generated by ./sql-generator
-- DuplicateRepository.getAll
with
"duplicates" as (
select
"assets"."duplicateId",
json_agg(
"asset"
order by
"assets"."localDateTime" asc
) as "assets"
from
"assets"
left join lateral (
select
"assets".*,
"exif" as "exifInfo"
from
"exif"
where
"exif"."assetId" = "assets"."id"
) as "asset" on true
where
"assets"."visibility" in ('archive', 'timeline')
and "assets"."ownerId" = $1::uuid
and "assets"."duplicateId" is not null
and "assets"."deletedAt" is null
and "assets"."stackId" is null
group by
"assets"."duplicateId"
),
"unique" as (
select
"duplicateId"
from
"duplicates"
where
json_array_length("assets") = $2
),
"removed_unique" as (
update "assets"
set
"duplicateId" = $3
from
"unique"
where
"assets"."duplicateId" = "unique"."duplicateId"
)
select
*
from
"duplicates"
where
not exists (
select
from
"unique"
where
"unique"."duplicateId" = "duplicates"."duplicateId"
)
-- DuplicateRepository.search
begin
set
local vchordrq.probes = 1
with
"cte" as (
select
"assets"."id" as "assetId",
"assets"."duplicateId",
smart_search.embedding <=> $1 as "distance"
from
"assets"
inner join "smart_search" on "assets"."id" = "smart_search"."assetId"
where
"assets"."visibility" in ('archive', 'timeline')
and "assets"."ownerId" = any ($2::uuid[])
and "assets"."deletedAt" is null
and "assets"."type" = $3
and "assets"."id" != $4::uuid
and "assets"."stackId" is null
order by
"distance"
limit
$5
)
select
*
from
"cte"
where
"cte"."distance" <= $6
commit
-- DuplicateRepository.merge
update "assets"
set
where
(
"duplicateId" = any ($1::uuid[])
or "id" = any ($2::uuid[])
)

View File

@ -102,39 +102,6 @@ offset
$8
commit
-- SearchRepository.searchDuplicates
begin
set
local vchordrq.probes = 1
with
"cte" as (
select
"assets"."id" as "assetId",
"assets"."duplicateId",
smart_search.embedding <=> $1 as "distance"
from
"assets"
inner join "smart_search" on "assets"."id" = "smart_search"."assetId"
where
"assets"."visibility" in ('archive', 'timeline')
and "assets"."ownerId" = any ($2::uuid[])
and "assets"."deletedAt" is null
and "assets"."type" = $3
and "assets"."id" != $4::uuid
and "assets"."stackId" is null
order by
"distance"
limit
$5
)
select
*
from
"cte"
where
"cte"."distance" <= $6
commit
-- SearchRepository.searchFaces
begin
set

View File

@ -5,7 +5,6 @@ import { InjectKysely } from 'nestjs-kysely';
import { Stack } from 'src/database';
import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db';
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetFileType, AssetOrder, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
import {
anyUuid,
@ -29,13 +28,13 @@ import { globToSqlPattern } from 'src/utils/misc';
export type AssetStats = Record<AssetType, number>;
export interface AssetStatsOptions {
interface AssetStatsOptions {
isFavorite?: boolean;
isTrashed?: boolean;
visibility?: AssetVisibility;
}
export interface LivePhotoSearchOptions {
interface LivePhotoSearchOptions {
ownerId: string;
libraryId?: string | null;
livePhotoCID: string;
@ -43,16 +42,12 @@ export interface LivePhotoSearchOptions {
type: AssetType;
}
export enum WithProperty {
SIDECAR = 'sidecar',
}
export enum TimeBucketSize {
DAY = 'DAY',
MONTH = 'MONTH',
}
export interface AssetBuilderOptions {
interface AssetBuilderOptions {
isFavorite?: boolean;
isTrashed?: boolean;
isDuplicate?: boolean;
@ -81,43 +76,31 @@ export interface MonthDay {
month: number;
}
export interface AssetExploreFieldOptions {
interface AssetExploreFieldOptions {
maxFields: number;
minAssetsPerField: number;
}
export interface AssetFullSyncOptions {
interface AssetFullSyncOptions {
ownerId: string;
lastId?: string;
updatedUntil: Date;
limit: number;
}
export interface AssetDeltaSyncOptions {
interface AssetDeltaSyncOptions {
userIds: string[];
updatedAfter: Date;
limit: number;
}
export interface AssetUpdateDuplicateOptions {
targetDuplicateId: string | null;
assetIds: string[];
duplicateIds: string[];
}
export interface UpsertFileOptions {
assetId: string;
type: AssetFileType;
path: string;
}
export interface AssetGetByChecksumOptions {
interface AssetGetByChecksumOptions {
ownerId: string;
checksum: Buffer;
libraryId?: string;
}
export interface GetByIdsRelations {
interface GetByIdsRelations {
exifInfo?: boolean;
faces?: { person?: boolean; withDeleted?: boolean };
files?: boolean;
@ -128,16 +111,6 @@ export interface GetByIdsRelations {
tags?: boolean;
}
export interface DuplicateGroup {
duplicateId: string;
assets: MapAsset[];
}
export interface DayOfYearAssets {
yearsAgo: number;
assets: MapAsset[];
}
@Injectable()
export class AssetRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@ -418,19 +391,6 @@ export class AssetRepository {
await this.db.updateTable('assets').set(options).where('libraryId', '=', asUuid(libraryId)).execute();
}
@GenerateSql({
params: [{ targetDuplicateId: DummyValue.UUID, duplicateIds: [DummyValue.UUID], assetIds: [DummyValue.UUID] }],
})
async updateDuplicates(options: AssetUpdateDuplicateOptions): Promise<void> {
await this.db
.updateTable('assets')
.set({ duplicateId: options.targetDuplicateId })
.where((eb) =>
eb.or([eb('duplicateId', '=', anyUuid(options.duplicateIds)), eb('id', '=', anyUuid(options.assetIds))]),
)
.execute();
}
async update(asset: Updateable<Assets> & { id: string }) {
const value = omitBy(asset, isUndefined);
delete value.id;
@ -696,58 +656,6 @@ export class AssetRepository {
return query.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [DummyValue.UUID] })
getDuplicates(userId: string) {
return (
this.db
.with('duplicates', (qb) =>
qb
.selectFrom('assets')
.$call(withDefaultVisibility)
.leftJoinLateral(
(qb) =>
qb
.selectFrom('exif')
.selectAll('assets')
.select((eb) => eb.table('exif').as('exifInfo'))
.whereRef('exif.assetId', '=', 'assets.id')
.as('asset'),
(join) => join.onTrue(),
)
.select('assets.duplicateId')
.select((eb) =>
eb.fn.jsonAgg('asset').orderBy('assets.localDateTime', 'asc').$castTo<MapAsset[]>().as('assets'),
)
.where('assets.ownerId', '=', asUuid(userId))
.where('assets.duplicateId', 'is not', null)
.$narrowType<{ duplicateId: NotNull }>()
.where('assets.deletedAt', 'is', null)
.where('assets.stackId', 'is', null)
.groupBy('assets.duplicateId'),
)
.with('unique', (qb) =>
qb
.selectFrom('duplicates')
.select('duplicateId')
.where((eb) => eb(eb.fn('json_array_length', ['assets']), '=', 1)),
)
.with('removed_unique', (qb) =>
qb
.updateTable('assets')
.set({ duplicateId: null })
.from('unique')
.whereRef('assets.duplicateId', '=', 'unique.duplicateId'),
)
.selectFrom('duplicates')
.selectAll()
// TODO: compare with filtering by json_array_length > 1
.where(({ not, exists }) =>
not(exists((eb) => eb.selectFrom('unique').whereRef('unique.duplicateId', '=', 'duplicates.duplicateId'))),
)
.execute()
);
}
@GenerateSql({ params: [DummyValue.UUID, { minAssetsPerField: 5, maxFields: 12 }] })
async getAssetIdByCity(ownerId: string, { minAssetsPerField, maxFields }: AssetExploreFieldOptions) {
const items = await this.db

View File

@ -0,0 +1,133 @@
import { Injectable } from '@nestjs/common';
import { Kysely, NotNull, sql } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { DB } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetType, VectorIndex } from 'src/enum';
import { probes } from 'src/repositories/database.repository';
import { anyUuid, asUuid, withDefaultVisibility } from 'src/utils/database';
interface DuplicateSearch {
assetId: string;
embedding: string;
maxDistance: number;
type: AssetType;
userIds: string[];
}
interface DuplicateMergeOptions {
targetId: string | null;
assetIds: string[];
sourceIds: string[];
}
@Injectable()
export class DuplicateRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({ params: [DummyValue.UUID] })
getAll(userId: string) {
return (
this.db
.with('duplicates', (qb) =>
qb
.selectFrom('assets')
.$call(withDefaultVisibility)
.leftJoinLateral(
(qb) =>
qb
.selectFrom('exif')
.selectAll('assets')
.select((eb) => eb.table('exif').as('exifInfo'))
.whereRef('exif.assetId', '=', 'assets.id')
.as('asset'),
(join) => join.onTrue(),
)
.select('assets.duplicateId')
.select((eb) =>
eb.fn.jsonAgg('asset').orderBy('assets.localDateTime', 'asc').$castTo<MapAsset[]>().as('assets'),
)
.where('assets.ownerId', '=', asUuid(userId))
.where('assets.duplicateId', 'is not', null)
.$narrowType<{ duplicateId: NotNull }>()
.where('assets.deletedAt', 'is', null)
.where('assets.stackId', 'is', null)
.groupBy('assets.duplicateId'),
)
.with('unique', (qb) =>
qb
.selectFrom('duplicates')
.select('duplicateId')
.where((eb) => eb(eb.fn('json_array_length', ['assets']), '=', 1)),
)
.with('removed_unique', (qb) =>
qb
.updateTable('assets')
.set({ duplicateId: null })
.from('unique')
.whereRef('assets.duplicateId', '=', 'unique.duplicateId'),
)
.selectFrom('duplicates')
.selectAll()
// TODO: compare with filtering by json_array_length > 1
.where(({ not, exists }) =>
not(exists((eb) => eb.selectFrom('unique').whereRef('unique.duplicateId', '=', 'duplicates.duplicateId'))),
)
.execute()
);
}
@GenerateSql({
params: [
{
assetId: DummyValue.UUID,
embedding: DummyValue.VECTOR,
maxDistance: 0.6,
type: AssetType.IMAGE,
userIds: [DummyValue.UUID],
},
],
})
search({ assetId, embedding, maxDistance, type, userIds }: DuplicateSearch) {
return this.db.transaction().execute(async (trx) => {
await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.CLIP])}`.execute(trx);
return await trx
.with('cte', (qb) =>
qb
.selectFrom('assets')
.$call(withDefaultVisibility)
.select([
'assets.id as assetId',
'assets.duplicateId',
sql<number>`smart_search.embedding <=> ${embedding}`.as('distance'),
])
.innerJoin('smart_search', 'assets.id', 'smart_search.assetId')
.where('assets.ownerId', '=', anyUuid(userIds))
.where('assets.deletedAt', 'is', null)
.where('assets.type', '=', type)
.where('assets.id', '!=', asUuid(assetId))
.where('assets.stackId', 'is', null)
.orderBy('distance')
.limit(64),
)
.selectFrom('cte')
.selectAll()
.where('cte.distance', '<=', maxDistance as number)
.execute();
});
}
@GenerateSql({
params: [{ targetDuplicateId: DummyValue.UUID, duplicateIds: [DummyValue.UUID], assetIds: [DummyValue.UUID] }],
})
async merge(options: DuplicateMergeOptions): Promise<void> {
await this.db
.updateTable('assets')
.set({ duplicateId: options.targetId })
.where((eb) =>
eb.or([eb('duplicateId', '=', anyUuid(options.sourceIds)), eb('id', '=', anyUuid(options.assetIds))]),
)
.execute();
}
}

View File

@ -11,6 +11,7 @@ import { CronRepository } from 'src/repositories/cron.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { DownloadRepository } from 'src/repositories/download.repository';
import { DuplicateRepository } from 'src/repositories/duplicate.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
@ -56,6 +57,7 @@ export const repositories = [
CryptoRepository,
DatabaseRepository,
DownloadRepository,
DuplicateRepository,
EmailRepository,
EventRepository,
JobRepository,

View File

@ -7,7 +7,7 @@ import { DummyValue, GenerateSql } from 'src/decorators';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetStatus, AssetType, AssetVisibility, VectorIndex } from 'src/enum';
import { probes } from 'src/repositories/database.repository';
import { anyUuid, asUuid, searchAssetBuilder, withDefaultVisibility } from 'src/utils/database';
import { anyUuid, searchAssetBuilder } from 'src/utils/database';
import { paginationHelper } from 'src/utils/pagination';
import { isValidInteger } from 'src/validation';
@ -135,14 +135,6 @@ export interface FaceEmbeddingSearch extends SearchEmbeddingOptions {
minBirthDate?: Date | null;
}
export interface AssetDuplicateSearch {
assetId: string;
embedding: string;
maxDistance: number;
type: AssetType;
userIds: string[];
}
export interface FaceSearchResult {
distance: number;
id: string;
@ -275,46 +267,6 @@ export class SearchRepository {
});
}
@GenerateSql({
params: [
{
assetId: DummyValue.UUID,
embedding: DummyValue.VECTOR,
maxDistance: 0.6,
type: AssetType.IMAGE,
userIds: [DummyValue.UUID],
},
],
})
searchDuplicates({ assetId, embedding, maxDistance, type, userIds }: AssetDuplicateSearch) {
return this.db.transaction().execute(async (trx) => {
await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.CLIP])}`.execute(trx);
return await trx
.with('cte', (qb) =>
qb
.selectFrom('assets')
.$call(withDefaultVisibility)
.select([
'assets.id as assetId',
'assets.duplicateId',
sql<number>`smart_search.embedding <=> ${embedding}`.as('distance'),
])
.innerJoin('smart_search', 'assets.id', 'smart_search.assetId')
.where('assets.ownerId', '=', anyUuid(userIds))
.where('assets.deletedAt', 'is', null)
.where('assets.type', '=', type)
.where('assets.id', '!=', asUuid(assetId))
.where('assets.stackId', 'is', null)
.orderBy('distance')
.limit(64),
)
.selectFrom('cte')
.selectAll()
.where('cte.distance', '<=', maxDistance as number)
.execute();
});
}
@GenerateSql({
params: [
{

View File

@ -18,6 +18,7 @@ import { CronRepository } from 'src/repositories/cron.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { DownloadRepository } from 'src/repositories/download.repository';
import { DuplicateRepository } from 'src/repositories/duplicate.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
@ -71,6 +72,7 @@ export class BaseService {
protected cryptoRepository: CryptoRepository,
protected databaseRepository: DatabaseRepository,
protected downloadRepository: DownloadRepository,
protected duplicateRepository: DuplicateRepository,
protected emailRepository: EmailRepository,
protected eventRepository: EventRepository,
protected jobRepository: JobRepository,

View File

@ -38,7 +38,7 @@ describe(SearchService.name, () => {
describe('getDuplicates', () => {
it('should get duplicates', async () => {
mocks.asset.getDuplicates.mockResolvedValue([
mocks.duplicateRepository.getAll.mockResolvedValue([
{
duplicateId: 'duplicate-id',
assets: [assetStub.image, assetStub.image],
@ -218,25 +218,26 @@ describe(SearchService.name, () => {
it('should search for duplicates and update asset with duplicateId', async () => {
mocks.assetJob.getForSearchDuplicatesJob.mockResolvedValue(hasEmbedding);
mocks.search.searchDuplicates.mockResolvedValue([
mocks.duplicateRepository.search.mockResolvedValue([
{ assetId: assetStub.image.id, distance: 0.01, duplicateId: null },
]);
mocks.duplicateRepository.merge.mockResolvedValue();
const expectedAssetIds = [assetStub.image.id, hasEmbedding.id];
const result = await sut.handleSearchDuplicates({ id: hasEmbedding.id });
expect(result).toBe(JobStatus.SUCCESS);
expect(mocks.search.searchDuplicates).toHaveBeenCalledWith({
expect(mocks.duplicateRepository.search).toHaveBeenCalledWith({
assetId: hasEmbedding.id,
embedding: hasEmbedding.embedding,
maxDistance: 0.01,
type: hasEmbedding.type,
userIds: [hasEmbedding.ownerId],
});
expect(mocks.asset.updateDuplicates).toHaveBeenCalledWith({
expect(mocks.duplicateRepository.merge).toHaveBeenCalledWith({
assetIds: expectedAssetIds,
targetDuplicateId: expect.any(String),
duplicateIds: [],
targetId: expect.any(String),
sourceIds: [],
});
expect(mocks.asset.upsertJobStatus).toHaveBeenCalledWith(
...expectedAssetIds.map((assetId) => ({ assetId, duplicatesDetectedAt: expect.any(Date) })),
@ -246,23 +247,24 @@ describe(SearchService.name, () => {
it('should use existing duplicate ID among matched duplicates', async () => {
const duplicateId = hasDupe.duplicateId;
mocks.assetJob.getForSearchDuplicatesJob.mockResolvedValue(hasEmbedding);
mocks.search.searchDuplicates.mockResolvedValue([{ assetId: hasDupe.id, distance: 0.01, duplicateId }]);
mocks.duplicateRepository.search.mockResolvedValue([{ assetId: hasDupe.id, distance: 0.01, duplicateId }]);
mocks.duplicateRepository.merge.mockResolvedValue();
const expectedAssetIds = [hasEmbedding.id];
const result = await sut.handleSearchDuplicates({ id: hasEmbedding.id });
expect(result).toBe(JobStatus.SUCCESS);
expect(mocks.search.searchDuplicates).toHaveBeenCalledWith({
expect(mocks.duplicateRepository.search).toHaveBeenCalledWith({
assetId: hasEmbedding.id,
embedding: hasEmbedding.embedding,
maxDistance: 0.01,
type: hasEmbedding.type,
userIds: [hasEmbedding.ownerId],
});
expect(mocks.asset.updateDuplicates).toHaveBeenCalledWith({
expect(mocks.duplicateRepository.merge).toHaveBeenCalledWith({
assetIds: expectedAssetIds,
targetDuplicateId: duplicateId,
duplicateIds: [],
targetId: duplicateId,
sourceIds: [],
});
expect(mocks.asset.upsertJobStatus).toHaveBeenCalledWith(
...expectedAssetIds.map((assetId) => ({ assetId, duplicatesDetectedAt: expect.any(Date) })),
@ -271,7 +273,7 @@ describe(SearchService.name, () => {
it('should remove duplicateId if no duplicates found and asset has duplicateId', async () => {
mocks.assetJob.getForSearchDuplicatesJob.mockResolvedValue(hasDupe);
mocks.search.searchDuplicates.mockResolvedValue([]);
mocks.duplicateRepository.search.mockResolvedValue([]);
const result = await sut.handleSearchDuplicates({ id: hasDupe.id });

View File

@ -13,7 +13,7 @@ import { isDuplicateDetectionEnabled } from 'src/utils/misc';
@Injectable()
export class DuplicateService extends BaseService {
async getDuplicates(auth: AuthDto): Promise<DuplicateResponseDto[]> {
const duplicates = await this.assetRepository.getDuplicates(auth.user.id);
const duplicates = await this.duplicateRepository.getAll(auth.user.id);
return duplicates.map(({ duplicateId, assets }) => ({
duplicateId,
assets: assets.map((asset) => mapAsset(asset, { auth })),
@ -74,7 +74,7 @@ export class DuplicateService extends BaseService {
return JobStatus.FAILED;
}
const duplicateAssets = await this.searchRepository.searchDuplicates({
const duplicateAssets = await this.duplicateRepository.search({
assetId: asset.id,
embedding: asset.embedding,
maxDistance: machineLearning.duplicateDetection.maxDistance,
@ -117,7 +117,11 @@ export class DuplicateService extends BaseService {
.map((duplicate) => duplicate.assetId);
assetIdsToUpdate.push(asset.id);
await this.assetRepository.updateDuplicates({ targetDuplicateId, assetIds: assetIdsToUpdate, duplicateIds });
await this.duplicateRepository.merge({
targetId: targetDuplicateId,
assetIds: assetIdsToUpdate,
sourceIds: duplicateIds,
});
return assetIdsToUpdate;
}
}

View File

@ -24,7 +24,6 @@ import {
VideoCodec,
VideoContainer,
} from 'src/enum';
import { UpsertFileOptions } from 'src/repositories/asset.repository';
import { BoundingBox } from 'src/repositories/machine-learning.repository';
import { BaseService } from 'src/services/base.service';
import {
@ -42,6 +41,11 @@ import { getAssetFiles } from 'src/utils/asset.util';
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
import { mimeTypes } from 'src/utils/mime-types';
import { clamp, isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
interface UpsertFileOptions {
assetId: string;
type: AssetFileType;
path: string;
}
@Injectable()
export class MediaService extends BaseService {

View File

@ -390,6 +390,7 @@ export const asDeps = (repositories: ServiceOverrides) => {
repositories.crypto || getRepositoryMock('crypto'),
repositories.database || getRepositoryMock('database'),
repositories.downloadRepository,
repositories.duplicateRepository,
repositories.email || getRepositoryMock('email'),
repositories.event,
repositories.job || getRepositoryMock('job'),

View File

@ -22,7 +22,6 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
getLivePhotoCount: vitest.fn(),
getLibraryAssetCount: vitest.fn(),
updateAll: vitest.fn(),
updateDuplicates: vitest.fn(),
getByLibraryIdAndOriginalPath: vitest.fn(),
deleteAll: vitest.fn(),
update: vitest.fn(),
@ -34,7 +33,6 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
getAssetIdByCity: vitest.fn(),
getAllForUserFullSync: vitest.fn(),
getChangedDeltaSync: vitest.fn(),
getDuplicates: vitest.fn(),
upsertFile: vitest.fn(),
upsertFiles: vitest.fn(),
deleteFiles: vitest.fn(),

View File

@ -24,6 +24,7 @@ import { CronRepository } from 'src/repositories/cron.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { DownloadRepository } from 'src/repositories/download.repository';
import { DuplicateRepository } from 'src/repositories/duplicate.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
@ -193,6 +194,7 @@ export type ServiceOverrides = {
crypto: CryptoRepository;
database: DatabaseRepository;
downloadRepository: DownloadRepository;
duplicateRepository: DuplicateRepository;
email: EmailRepository;
event: EventRepository;
job: JobRepository;
@ -260,6 +262,7 @@ export const newTestService = <T extends BaseService>(
config: newConfigRepositoryMock(),
database: newDatabaseRepositoryMock(),
downloadRepository: automock(DownloadRepository, { strict: false }),
duplicateRepository: automock(DuplicateRepository),
email: automock(EmailRepository, { args: [loggerMock] }),
// eslint-disable-next-line no-sparse-arrays
event: automock(EventRepository, { args: [, , loggerMock], strict: false }),
@ -311,6 +314,7 @@ export const newTestService = <T extends BaseService>(
overrides.crypto || (mocks.crypto as As<CryptoRepository>),
overrides.database || (mocks.database as As<DatabaseRepository>),
overrides.downloadRepository || (mocks.downloadRepository as As<DownloadRepository>),
overrides.duplicateRepository || (mocks.duplicateRepository as As<DuplicateRepository>),
overrides.email || (mocks.email as As<EmailRepository>),
overrides.event || (mocks.event as As<EventRepository>),
overrides.job || (mocks.job as As<JobRepository>),