feat(server): library refresh go brrr (#14456)

* feat: brr

---------
Co-authored-by: mertalev <101130780+mertalev@users.noreply.github.com>
This commit is contained in:
Jonathan Jogenfors 2025-03-06 16:00:18 +01:00 committed by GitHub
parent bc61497461
commit 3af26ee94a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 855 additions and 531 deletions

View File

@ -37,7 +37,7 @@ To validate that Immich can reach your external library, start a shell inside th
### Exclusion Patterns ### Exclusion Patterns
By default, all files in the import paths will be added to the library. If there are files that should not be added, exclusion patterns can be used to exclude them. Exclusion patterns are glob patterns are matched against the full file path. If a file matches an exclusion pattern, it will not be added to the library. Exclusion patterns can be added in the Scan Settings page for each library. Under the hood, Immich uses the [glob](https://www.npmjs.com/package/glob) package to match patterns, so please refer to [their documentation](https://github.com/isaacs/node-glob#glob-primer) to see what patterns are supported. By default, all files in the import paths will be added to the library. If there are files that should not be added, exclusion patterns can be used to exclude them. Exclusion patterns are glob patterns are matched against the full file path. If a file matches an exclusion pattern, it will not be added to the library. Exclusion patterns can be added in the Scan Settings page for each library.
Some basic examples: Some basic examples:
@ -48,7 +48,11 @@ Some basic examples:
Special characters such as @ should be escaped, for instance: Special characters such as @ should be escaped, for instance:
- `**/\@eadir/**` will exclude all files in any directory named `@eadir` - `**/\@eaDir/**` will exclude all files in any directory named `@eaDir`
:::info
Internally, Immich uses the [glob](https://www.npmjs.com/package/glob) package to process exclusion patterns, and sometimes those patterns are translated into [Postgres LIKE patterns](https://www.postgresql.org/docs/current/functions-matching.html). The intention is to support basic folder exclusions but we recommend against advanced usage since those can't reliably be translated to the Postgres syntax. Please refer to the [glob documentation](https://github.com/isaacs/node-glob#glob-primer) for a basic overview on glob patterns.
:::
### Automatic watching (EXPERIMENTAL) ### Automatic watching (EXPERIMENTAL)

View File

@ -490,7 +490,7 @@ describe('/libraries', () => {
utils.removeImageFile(`${testAssetDir}/temp/reimport/asset.jpg`); utils.removeImageFile(`${testAssetDir}/temp/reimport/asset.jpg`);
}); });
it('should not reimport unmodified files', async () => { it('should not reimport a file with unchanged timestamp', async () => {
const library = await utils.createLibrary(admin.accessToken, { const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId, ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/reimport`], importPaths: [`${testAssetDirInternal}/temp/reimport`],
@ -933,6 +933,8 @@ describe('/libraries', () => {
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(1);
utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`); utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`);
await utils.scan(admin.accessToken, library.id); await utils.scan(admin.accessToken, library.id);
@ -963,6 +965,58 @@ describe('/libraries', () => {
} }
}); });
it('should set a trashed offline asset to online but keep it in trash', async () => {
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline`],
});
await utils.scan(admin.accessToken, library.id);
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(1);
await utils.deleteAssets(admin.accessToken, [assets.items[0].id]);
{
const trashedAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
expect(trashedAsset.isTrashed).toBe(true);
}
utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`);
await utils.scan(admin.accessToken, library.id);
const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
expect(offlineAsset.isTrashed).toBe(true);
expect(offlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`);
expect(offlineAsset.isOffline).toBe(true);
{
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true });
expect(assets.count).toBe(1);
}
utils.renameImageFile(`${testAssetDir}/temp/offline.png`, `${testAssetDir}/temp/offline/offline.png`);
await utils.scan(admin.accessToken, library.id);
const backOnlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
expect(backOnlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`);
expect(backOnlineAsset.isOffline).toBe(false);
expect(backOnlineAsset.isTrashed).toBe(true);
{
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true });
expect(assets.count).toBe(1);
}
});
it('should not set an offline asset to online if its file exists, is not covered by an exclusion pattern, but is outside of all import paths', async () => { it('should not set an offline asset to online if its file exists, is not covered by an exclusion pattern, but is outside of all import paths', async () => {
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`); utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
@ -1024,16 +1078,17 @@ describe('/libraries', () => {
await utils.scan(admin.accessToken, library.id); await utils.scan(admin.accessToken, library.id);
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); {
const { assets: assetsBefore } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
expect(assetsBefore.count).toBe(1);
}
utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`); utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`);
await utils.scan(admin.accessToken, library.id); await utils.scan(admin.accessToken, library.id);
{ const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true });
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); expect(assets.count).toBe(1);
expect(assets.count).toBe(1);
}
const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);

View File

@ -57,6 +57,7 @@ export default [
'unicorn/no-thenable': 'off', 'unicorn/no-thenable': 'off',
'unicorn/import-style': 'off', 'unicorn/import-style': 'off',
'unicorn/prefer-structured-clone': 'off', 'unicorn/prefer-structured-clone': 'off',
'unicorn/no-for-loop': 'off',
'@typescript-eslint/await-thenable': 'error', '@typescript-eslint/await-thenable': 'error',
'@typescript-eslint/no-misused-promises': 'error', '@typescript-eslint/no-misused-promises': 'error',
'require-await': 'off', 'require-await': 'off',

View File

@ -469,8 +469,9 @@ export enum JobName {
// library management // library management
LIBRARY_QUEUE_SYNC_FILES = 'library-queue-sync-files', LIBRARY_QUEUE_SYNC_FILES = 'library-queue-sync-files',
LIBRARY_QUEUE_SYNC_ASSETS = 'library-queue-sync-assets', LIBRARY_QUEUE_SYNC_ASSETS = 'library-queue-sync-assets',
LIBRARY_SYNC_FILE = 'library-sync-file', LIBRARY_SYNC_FILES = 'library-sync-files',
LIBRARY_SYNC_ASSET = 'library-sync-asset', LIBRARY_SYNC_ASSETS = 'library-sync-assets',
LIBRARY_ASSET_REMOVAL = 'handle-library-file-deletion',
LIBRARY_DELETE = 'library-delete', LIBRARY_DELETE = 'library-delete',
LIBRARY_QUEUE_SCAN_ALL = 'library-queue-scan-all', LIBRARY_QUEUE_SCAN_ALL = 'library-queue-scan-all',
LIBRARY_QUEUE_CLEANUP = 'library-queue-cleanup', LIBRARY_QUEUE_CLEANUP = 'library-queue-cleanup',

View File

@ -1,5 +1,5 @@
import { Injectable } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import { Insertable, Kysely, Updateable, sql } from 'kysely'; import { Insertable, Kysely, UpdateResult, Updateable, sql } from 'kysely';
import { isEmpty, isUndefined, omitBy } from 'lodash'; import { isEmpty, isUndefined, omitBy } from 'lodash';
import { InjectKysely } from 'nestjs-kysely'; import { InjectKysely } from 'nestjs-kysely';
import { ASSET_FILE_CONFLICT_KEYS, EXIF_CONFLICT_KEYS, JOB_STATUS_CONFLICT_KEYS } from 'src/constants'; import { ASSET_FILE_CONFLICT_KEYS, EXIF_CONFLICT_KEYS, JOB_STATUS_CONFLICT_KEYS } from 'src/constants';
@ -24,7 +24,8 @@ import {
} from 'src/entities/asset.entity'; } from 'src/entities/asset.entity';
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum'; import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository'; import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
import { anyUuid, asUuid, mapUpsertColumns } from 'src/utils/database'; import { anyUuid, asUuid, mapUpsertColumns, unnest } from 'src/utils/database';
import { globToSqlPattern } from 'src/utils/misc';
import { Paginated, PaginationOptions, paginationHelper } from 'src/utils/pagination'; import { Paginated, PaginationOptions, paginationHelper } from 'src/utils/pagination';
export type AssetStats = Record<AssetType, number>; export type AssetStats = Record<AssetType, number>;
@ -191,6 +192,10 @@ export class AssetRepository {
.executeTakeFirst() as any as Promise<AssetEntityPlaceholder>; .executeTakeFirst() as any as Promise<AssetEntityPlaceholder>;
} }
createAll(assets: Insertable<Assets>[]): Promise<AssetEntity[]> {
return this.db.insertInto('assets').values(assets).returningAll().execute() as any as Promise<AssetEntity[]>;
}
@GenerateSql({ params: [DummyValue.UUID, { day: 1, month: 1 }] }) @GenerateSql({ params: [DummyValue.UUID, { day: 1, month: 1 }] })
getByDayOfYear(ownerIds: string[], { day, month }: MonthDay) { getByDayOfYear(ownerIds: string[], { day, month }: MonthDay) {
return this.db return this.db
@ -384,6 +389,17 @@ export class AssetRepository {
return paginationHelper(items as any as AssetEntity[], pagination.take); return paginationHelper(items as any as AssetEntity[], pagination.take);
} }
async getAllInLibrary(pagination: PaginationOptions, libraryId: string): Paginated<AssetEntity> {
const builder = this.db
.selectFrom('assets')
.select('id')
.where('libraryId', '=', asUuid(libraryId))
.limit(pagination.take + 1)
.offset(pagination.skip ?? 0);
const items = await builder.execute();
return paginationHelper(items as any as AssetEntity[], pagination.take);
}
/** /**
* Get assets by device's Id on the database * Get assets by device's Id on the database
* @param ownerId * @param ownerId
@ -470,6 +486,10 @@ export class AssetRepository {
await this.db.updateTable('assets').set(options).where('id', '=', anyUuid(ids)).execute(); await this.db.updateTable('assets').set(options).where('id', '=', anyUuid(ids)).execute();
} }
async updateByLibraryId(libraryId: string, options: Updateable<Assets>): Promise<void> {
await this.db.updateTable('assets').set(options).where('libraryId', '=', asUuid(libraryId)).execute();
}
@GenerateSql({ @GenerateSql({
params: [{ targetDuplicateId: DummyValue.UUID, duplicateIds: [DummyValue.UUID], assetIds: [DummyValue.UUID] }], params: [{ targetDuplicateId: DummyValue.UUID, duplicateIds: [DummyValue.UUID], assetIds: [DummyValue.UUID] }],
}) })
@ -939,4 +959,64 @@ export class AssetRepository {
) )
.execute(); .execute();
} }
@GenerateSql({
params: [{ libraryId: DummyValue.UUID, importPaths: [DummyValue.STRING], exclusionPatterns: [DummyValue.STRING] }],
})
async detectOfflineExternalAssets(
libraryId: string,
importPaths: string[],
exclusionPatterns: string[],
): Promise<UpdateResult> {
const paths = importPaths.map((importPath) => `${importPath}%`);
const exclusions = exclusionPatterns.map((pattern) => globToSqlPattern(pattern));
return this.db
.updateTable('assets')
.set({
isOffline: true,
deletedAt: new Date(),
})
.where('isOffline', '=', false)
.where('isExternal', '=', true)
.where('libraryId', '=', asUuid(libraryId))
.where((eb) =>
eb.or([eb('originalPath', 'not like', paths.join('|')), eb('originalPath', 'like', exclusions.join('|'))]),
)
.executeTakeFirstOrThrow();
}
@GenerateSql({
params: [{ libraryId: DummyValue.UUID, paths: [DummyValue.STRING] }],
})
async filterNewExternalAssetPaths(libraryId: string, paths: string[]): Promise<string[]> {
const result = await this.db
.selectFrom(unnest(paths).as('path'))
.select('path')
.where((eb) =>
eb.not(
eb.exists(
this.db
.selectFrom('assets')
.select('originalPath')
.whereRef('assets.originalPath', '=', eb.ref('path'))
.where('libraryId', '=', asUuid(libraryId))
.where('isExternal', '=', true),
),
),
)
.execute();
return result.map((row) => row.path as string);
}
async getLibraryAssetCount(libraryId: string): Promise<number> {
const { count } = await this.db
.selectFrom('assets')
.select((eb) => eb.fn.countAll().as('count'))
.where('libraryId', '=', asUuid(libraryId))
.executeTakeFirstOrThrow();
return Number(count);
}
} }

View File

@ -26,6 +26,13 @@ const userColumns = [
'users.profileChangedAt', 'users.profileChangedAt',
] as const; ] as const;
export enum AssetSyncResult {
DO_NOTHING,
UPDATE,
OFFLINE,
CHECK_OFFLINE,
}
const withOwner = (eb: ExpressionBuilder<DB, 'libraries'>) => { const withOwner = (eb: ExpressionBuilder<DB, 'libraries'>) => {
return jsonObjectFrom(eb.selectFrom('users').whereRef('users.id', '=', 'libraries.ownerId').select(userColumns)).as( return jsonObjectFrom(eb.selectFrom('users').whereRef('users.id', '=', 'libraries.ownerId').select(userColumns)).as(
'owner', 'owner',

View File

@ -6,7 +6,7 @@ import { mapLibrary } from 'src/dtos/library.dto';
import { UserEntity } from 'src/entities/user.entity'; import { UserEntity } from 'src/entities/user.entity';
import { AssetType, ImmichWorker, JobName, JobStatus } from 'src/enum'; import { AssetType, ImmichWorker, JobName, JobStatus } from 'src/enum';
import { LibraryService } from 'src/services/library.service'; import { LibraryService } from 'src/services/library.service';
import { ILibraryAssetJob, ILibraryFileJob } from 'src/types'; import { ILibraryBulkIdsJob, ILibraryFileJob } from 'src/types';
import { assetStub } from 'test/fixtures/asset.stub'; import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub'; import { authStub } from 'test/fixtures/auth.stub';
import { libraryStub } from 'test/fixtures/library.stub'; import { libraryStub } from 'test/fixtures/library.stub';
@ -22,10 +22,11 @@ async function* mockWalk() {
describe(LibraryService.name, () => { describe(LibraryService.name, () => {
let sut: LibraryService; let sut: LibraryService;
let mocks: ServiceMocks; let mocks: ServiceMocks;
beforeEach(() => { beforeEach(() => {
({ sut, mocks } = newTestService(LibraryService)); ({ sut, mocks } = newTestService(LibraryService, {}));
mocks.database.tryLock.mockResolvedValue(true); mocks.database.tryLock.mockResolvedValue(true);
mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES); mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
@ -152,21 +153,74 @@ describe(LibraryService.name, () => {
describe('handleQueueSyncFiles', () => { describe('handleQueueSyncFiles', () => {
it('should queue refresh of a new asset', async () => { it('should queue refresh of a new asset', async () => {
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1); mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
mocks.storage.walk.mockImplementation(mockWalk); mocks.storage.walk.mockImplementation(mockWalk);
mocks.storage.stat.mockResolvedValue({ isDirectory: () => true } as Stats);
mocks.storage.checkFileExists.mockResolvedValue(true);
mocks.asset.filterNewExternalAssetPaths.mockResolvedValue(['/data/user1/photo.jpg']);
await sut.handleQueueSyncFiles({ id: libraryStub.externalLibrary1.id }); await sut.handleQueueSyncFiles({ id: libraryStub.externalLibraryWithImportPaths1.id });
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queue).toHaveBeenCalledWith({
{ name: JobName.LIBRARY_SYNC_FILES,
name: JobName.LIBRARY_SYNC_FILE, data: {
data: { libraryId: libraryStub.externalLibraryWithImportPaths1.id,
id: libraryStub.externalLibrary1.id, paths: ['/data/user1/photo.jpg'],
ownerId: libraryStub.externalLibrary1.owner.id, progressCounter: 1,
assetPath: '/data/user1/photo.jpg',
},
}, },
]); });
});
it("should fail when library can't be found", async () => {
await expect(sut.handleQueueSyncFiles({ id: libraryStub.externalLibraryWithImportPaths1.id })).resolves.toBe(
JobStatus.SKIPPED,
);
});
it('should ignore import paths that do not exist', async () => {
mocks.storage.stat.mockImplementation((path): Promise<Stats> => {
if (path === libraryStub.externalLibraryWithImportPaths1.importPaths[0]) {
const error = { code: 'ENOENT' } as any;
throw error;
}
return Promise.resolve({
isDirectory: () => true,
} as Stats);
});
mocks.storage.checkFileExists.mockResolvedValue(true);
mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
await sut.handleQueueSyncFiles({ id: libraryStub.externalLibraryWithImportPaths1.id });
expect(mocks.storage.walk).toHaveBeenCalledWith({
pathsToCrawl: [libraryStub.externalLibraryWithImportPaths1.importPaths[1]],
exclusionPatterns: [],
includeHidden: false,
take: JOBS_LIBRARY_PAGINATION_SIZE,
});
});
});
describe('handleQueueSyncFiles', () => {
it('should queue refresh of a new asset', async () => {
mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
mocks.storage.walk.mockImplementation(mockWalk);
mocks.storage.stat.mockResolvedValue({ isDirectory: () => true } as Stats);
mocks.storage.checkFileExists.mockResolvedValue(true);
mocks.asset.filterNewExternalAssetPaths.mockResolvedValue(['/data/user1/photo.jpg']);
await sut.handleQueueSyncFiles({ id: libraryStub.externalLibraryWithImportPaths1.id });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_SYNC_FILES,
data: {
libraryId: libraryStub.externalLibraryWithImportPaths1.id,
paths: ['/data/user1/photo.jpg'],
progressCounter: 1,
},
});
}); });
it("should fail when library can't be found", async () => { it("should fail when library can't be found", async () => {
@ -199,142 +253,226 @@ describe(LibraryService.name, () => {
}); });
}); });
describe('handleQueueRemoveDeleted', () => { describe('handleQueueSyncAssets', () => {
it('should queue online check of existing assets', async () => { it('should call the offline check', async () => {
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1); mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
mocks.storage.walk.mockImplementation(async function* generator() {}); mocks.storage.walk.mockImplementation(async function* generator() {});
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false }); mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
mocks.asset.getLibraryAssetCount.mockResolvedValue(1);
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id }); const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id });
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(response).toBe(JobStatus.SUCCESS);
{ expect(mocks.asset.detectOfflineExternalAssets).toHaveBeenCalledWith(
name: JobName.LIBRARY_SYNC_ASSET, libraryStub.externalLibrary1.id,
data: { libraryStub.externalLibrary1.importPaths,
id: assetStub.external.id, libraryStub.externalLibrary1.exclusionPatterns,
importPaths: libraryStub.externalLibrary1.importPaths, );
exclusionPatterns: [],
},
},
]);
}); });
it("should fail when library can't be found", async () => { it('should skip an empty library', async () => {
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
mocks.storage.walk.mockImplementation(async function* generator() {});
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
mocks.asset.getLibraryAssetCount.mockResolvedValue(0);
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id });
expect(response).toBe(JobStatus.SUCCESS);
expect(mocks.asset.detectOfflineExternalAssets).not.toHaveBeenCalled();
});
it('should queue asset sync', async () => {
mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
mocks.storage.walk.mockImplementation(async function* generator() {});
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
mocks.asset.getLibraryAssetCount.mockResolvedValue(1);
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(0) });
mocks.asset.getAllInLibrary.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibraryWithImportPaths1.id });
expect(mocks.job.queue).toBeCalledWith({
name: JobName.LIBRARY_SYNC_ASSETS,
data: {
libraryId: libraryStub.externalLibraryWithImportPaths1.id,
importPaths: libraryStub.externalLibraryWithImportPaths1.importPaths,
exclusionPatterns: libraryStub.externalLibraryWithImportPaths1.exclusionPatterns,
assetIds: [assetStub.external.id],
progressCounter: 1,
totalAssets: 1,
},
});
expect(response).toBe(JobStatus.SUCCESS);
expect(mocks.asset.detectOfflineExternalAssets).toHaveBeenCalledWith(
libraryStub.externalLibraryWithImportPaths1.id,
libraryStub.externalLibraryWithImportPaths1.importPaths,
libraryStub.externalLibraryWithImportPaths1.exclusionPatterns,
);
});
it("should fail if library can't be found", async () => {
await expect(sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SKIPPED); await expect(sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SKIPPED);
}); });
}); });
describe('handleSyncAsset', () => { describe('handleSyncAssets', () => {
it('should skip missing assets', async () => {
const mockAssetJob: ILibraryAssetJob = {
id: assetStub.external.id,
importPaths: ['/'],
exclusionPatterns: [],
};
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SKIPPED);
expect(mocks.asset.remove).not.toHaveBeenCalled();
});
it('should offline assets no longer on disk', async () => { it('should offline assets no longer on disk', async () => {
const mockAssetJob: ILibraryAssetJob = { const mockAssetJob: ILibraryBulkIdsJob = {
id: assetStub.external.id, assetIds: [assetStub.external.id],
libraryId: libraryStub.externalLibrary1.id,
importPaths: ['/'], importPaths: ['/'],
exclusionPatterns: [], exclusionPatterns: [],
totalAssets: 1,
progressCounter: 0,
}; };
mocks.asset.getById.mockResolvedValue(assetStub.external); mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
mocks.storage.stat.mockRejectedValue(new Error('ENOENT, no such file or directory')); mocks.storage.stat.mockRejectedValue(new Error('ENOENT, no such file or directory'));
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], { expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
isOffline: true, isOffline: true,
deletedAt: expect.any(Date), deletedAt: expect.anything(),
}); });
}); });
it('should offline assets matching an exclusion pattern', async () => { it('should set assets deleted from disk as offline', async () => {
const mockAssetJob: ILibraryAssetJob = { const mockAssetJob: ILibraryBulkIdsJob = {
id: assetStub.external.id, assetIds: [assetStub.external.id],
importPaths: ['/'], libraryId: libraryStub.externalLibrary1.id,
exclusionPatterns: ['**/user1/**'],
};
mocks.asset.getById.mockResolvedValue(assetStub.external);
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
isOffline: true,
deletedAt: expect.any(Date),
});
});
it('should set assets outside of import paths as offline', async () => {
const mockAssetJob: ILibraryAssetJob = {
id: assetStub.external.id,
importPaths: ['/data/user2'], importPaths: ['/data/user2'],
exclusionPatterns: [], exclusionPatterns: [],
totalAssets: 1,
progressCounter: 0,
}; };
mocks.asset.getById.mockResolvedValue(assetStub.external); mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
mocks.storage.checkFileExists.mockResolvedValue(true); mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], { expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
isOffline: true, isOffline: true,
deletedAt: expect.any(Date), deletedAt: expect.anything(),
}); });
}); });
it('should do nothing with online assets', async () => { it('should do nothing with offline assets deleted from disk', async () => {
const mockAssetJob: ILibraryAssetJob = { const mockAssetJob: ILibraryBulkIdsJob = {
id: assetStub.external.id, assetIds: [assetStub.trashedOffline.id],
importPaths: ['/'], libraryId: libraryStub.externalLibrary1.id,
importPaths: ['/data/user2'],
exclusionPatterns: [], exclusionPatterns: [],
totalAssets: 1,
progressCounter: 0,
}; };
mocks.asset.getById.mockResolvedValue(assetStub.external); mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.updateAll).not.toHaveBeenCalled(); expect(mocks.asset.updateAll).not.toHaveBeenCalled();
}); });
it('should un-trash an asset previously marked as offline', async () => { it('should un-trash an asset previously marked as offline', async () => {
const mockAssetJob: ILibraryAssetJob = { const mockAssetJob: ILibraryBulkIdsJob = {
id: assetStub.external.id, assetIds: [assetStub.trashedOffline.id],
importPaths: ['/'], libraryId: libraryStub.externalLibrary1.id,
importPaths: ['/original/'],
exclusionPatterns: [], exclusionPatterns: [],
totalAssets: 1,
progressCounter: 0,
}; };
mocks.asset.getById.mockResolvedValue(assetStub.trashedOffline); mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats); mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.trashedOffline.id], { expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
deletedAt: null,
fileModifiedAt: assetStub.trashedOffline.fileModifiedAt,
isOffline: false, isOffline: false,
originalFileName: 'path.jpg', deletedAt: null,
}); });
}); });
it('should not touch fileCreatedAt when un-trashing an asset previously marked as offline', async () => { it('should do nothing with offline asset if covered by exclusion pattern', async () => {
const mockAssetJob: ILibraryAssetJob = { const mockAssetJob: ILibraryBulkIdsJob = {
id: assetStub.external.id, assetIds: [assetStub.trashedOffline.id],
importPaths: ['/'], libraryId: libraryStub.externalLibrary1.id,
exclusionPatterns: [], importPaths: ['/original/'],
exclusionPatterns: ['**/path.jpg'],
totalAssets: 1,
progressCounter: 0,
}; };
mocks.asset.getById.mockResolvedValue(assetStub.trashedOffline); mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
expect(mocks.job.queueAll).not.toHaveBeenCalled();
});
it('should do nothing with offline asset if not in import path', async () => {
const mockAssetJob: ILibraryBulkIdsJob = {
assetIds: [assetStub.trashedOffline.id],
libraryId: libraryStub.externalLibrary1.id,
importPaths: ['/import/'],
exclusionPatterns: [],
totalAssets: 1,
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
expect(mocks.job.queueAll).not.toHaveBeenCalled();
});
it('should do nothing with unchanged online assets', async () => {
const mockAssetJob: ILibraryBulkIdsJob = {
assetIds: [assetStub.external.id],
libraryId: libraryStub.externalLibrary1.id,
importPaths: ['/'],
exclusionPatterns: [],
totalAssets: 1,
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
});
it('should not touch fileCreatedAt when un-trashing an asset previously marked as offline', async () => {
const mockAssetJob: ILibraryBulkIdsJob = {
assetIds: [assetStub.trashedOffline.id],
libraryId: libraryStub.externalLibrary1.id,
importPaths: ['/'],
exclusionPatterns: [],
totalAssets: 1,
progressCounter: 0,
};
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats); mocks.storage.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats);
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.updateAll).toHaveBeenCalledWith( expect(mocks.asset.updateAll).toHaveBeenCalledWith(
[assetStub.trashedOffline.id], [assetStub.trashedOffline.id],
@ -343,30 +481,41 @@ describe(LibraryService.name, () => {
}), }),
); );
}); });
});
it('should update file when mtime has changed', async () => { it('should update with online assets that have changed', async () => {
const mockAssetJob: ILibraryAssetJob = { const mockAssetJob: ILibraryBulkIdsJob = {
id: assetStub.external.id, assetIds: [assetStub.external.id],
importPaths: ['/'], libraryId: libraryStub.externalLibrary1.id,
exclusionPatterns: [], importPaths: ['/'],
}; exclusionPatterns: [],
totalAssets: 1,
progressCounter: 0,
};
const newMTime = new Date(); if (assetStub.external.fileModifiedAt == null) {
mocks.asset.getById.mockResolvedValue(assetStub.external); throw new Error('fileModifiedAt is null');
mocks.storage.stat.mockResolvedValue({ mtime: newMTime } as Stats); }
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); const mtime = new Date(assetStub.external.fileModifiedAt.getDate() + 1);
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], { mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
fileModifiedAt: newMTime, mocks.storage.stat.mockResolvedValue({ mtime } as Stats);
isOffline: false,
originalFileName: 'photo.jpg', await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
deletedAt: null,
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SIDECAR_DISCOVERY,
data: {
id: assetStub.external.id,
source: 'upload',
},
},
]);
}); });
}); });
describe('handleSyncFile', () => { describe('handleSyncFiles', () => {
let mockUser: UserEntity; let mockUser: UserEntity;
beforeEach(() => { beforeEach(() => {
@ -381,187 +530,57 @@ describe(LibraryService.name, () => {
it('should import a new asset', async () => { it('should import a new asset', async () => {
const mockLibraryJob: ILibraryFileJob = { const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id, libraryId: libraryStub.externalLibrary1.id,
ownerId: mockUser.id, paths: ['/data/user1/photo.jpg'],
assetPath: '/data/user1/photo.jpg',
}; };
mocks.asset.create.mockResolvedValue(assetStub.image); mocks.asset.createAll.mockResolvedValue([assetStub.image]);
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1); mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS); await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.create.mock.calls).toEqual([ expect(mocks.asset.createAll.mock.calls).toEqual([
[ [
{ [
ownerId: mockUser.id, expect.objectContaining({
libraryId: libraryStub.externalLibrary1.id, ownerId: mockUser.id,
checksum: expect.any(Buffer), libraryId: libraryStub.externalLibrary1.id,
originalPath: '/data/user1/photo.jpg', originalPath: '/data/user1/photo.jpg',
deviceAssetId: expect.any(String), deviceId: 'Library Import',
deviceId: 'Library Import', type: AssetType.IMAGE,
fileCreatedAt: expect.any(Date), originalFileName: 'photo.jpg',
fileModifiedAt: expect.any(Date), isExternal: true,
localDateTime: expect.any(Date), }),
type: AssetType.IMAGE, ],
originalFileName: 'photo.jpg',
isExternal: true,
},
], ],
]); ]);
expect(mocks.job.queue.mock.calls).toEqual([ expect(mocks.job.queueAll.mock.calls).toEqual([
[ [
{ [
name: JobName.SIDECAR_DISCOVERY, {
data: { name: JobName.SIDECAR_DISCOVERY,
id: assetStub.image.id, data: {
source: 'upload', id: assetStub.image.id,
source: 'upload',
},
}, },
}, ],
],
]);
});
it('should import a new video', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: '/data/user1/video.mp4',
};
mocks.asset.create.mockResolvedValue(assetStub.video);
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.create.mock.calls).toEqual([
[
{
ownerId: mockUser.id,
libraryId: libraryStub.externalLibrary1.id,
checksum: expect.any(Buffer),
originalPath: '/data/user1/video.mp4',
deviceAssetId: expect.any(String),
deviceId: 'Library Import',
fileCreatedAt: expect.any(Date),
fileModifiedAt: expect.any(Date),
localDateTime: expect.any(Date),
type: AssetType.VIDEO,
originalFileName: 'video.mp4',
isExternal: true,
},
],
]);
expect(mocks.job.queue.mock.calls).toEqual([
[
{
name: JobName.SIDECAR_DISCOVERY,
data: {
id: assetStub.image.id,
source: 'upload',
},
},
], ],
]); ]);
}); });
it('should not import an asset to a soft deleted library', async () => { it('should not import an asset to a soft deleted library', async () => {
const mockLibraryJob: ILibraryFileJob = { const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id, libraryId: libraryStub.externalLibrary1.id,
ownerId: mockUser.id, paths: ['/data/user1/photo.jpg'],
assetPath: '/data/user1/photo.jpg',
}; };
mocks.asset.create.mockResolvedValue(assetStub.image);
mocks.library.get.mockResolvedValue({ ...libraryStub.externalLibrary1, deletedAt: new Date() }); mocks.library.get.mockResolvedValue({ ...libraryStub.externalLibrary1, deletedAt: new Date() });
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.FAILED); await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
expect(mocks.asset.create.mock.calls).toEqual([]); expect(mocks.asset.createAll.mock.calls).toEqual([]);
});
it('should not refresh a file whose mtime matches existing asset', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: assetStub.hasFileExtension.originalPath,
};
mocks.storage.stat.mockResolvedValue({
size: 100,
mtime: assetStub.hasFileExtension.fileModifiedAt,
ctime: new Date('2023-01-01'),
} as Stats);
mocks.asset.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.hasFileExtension);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
expect(mocks.job.queue).not.toHaveBeenCalled();
expect(mocks.job.queueAll).not.toHaveBeenCalled();
});
it('should skip existing asset', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: '/data/user1/photo.jpg',
};
mocks.asset.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
});
it('should not refresh an asset trashed by user', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: assetStub.hasFileExtension.originalPath,
};
mocks.asset.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.trashed);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
expect(mocks.job.queue).not.toHaveBeenCalled();
expect(mocks.job.queueAll).not.toHaveBeenCalled();
});
it('should fail when the file could not be read', async () => {
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: userStub.admin.id,
assetPath: '/data/user1/photo.jpg',
};
mocks.asset.create.mockResolvedValue(assetStub.image);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
expect(mocks.library.get).not.toHaveBeenCalled();
expect(mocks.asset.create).not.toHaveBeenCalled();
});
it('should skip if the file could not be found', async () => {
const error = new Error('File not found') as any;
error.code = 'ENOENT';
mocks.storage.stat.mockRejectedValue(error);
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: userStub.admin.id,
assetPath: '/data/user1/photo.jpg',
};
mocks.asset.create.mockResolvedValue(assetStub.image);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
expect(mocks.library.get).not.toHaveBeenCalled();
expect(mocks.asset.create).not.toHaveBeenCalled();
}); });
}); });
@ -641,10 +660,6 @@ describe(LibraryService.name, () => {
expect(mocks.library.getStatistics).toHaveBeenCalledWith(libraryStub.externalLibrary1.id); expect(mocks.library.getStatistics).toHaveBeenCalledWith(libraryStub.externalLibrary1.id);
}); });
it('should throw an error if the library could not be found', async () => {
await expect(sut.getStatistics('foo')).rejects.toBeInstanceOf(BadRequestException);
});
}); });
describe('create', () => { describe('create', () => {
@ -898,19 +913,13 @@ describe(LibraryService.name, () => {
await sut.watchAll(); await sut.watchAll();
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queue).toHaveBeenCalledWith({
{ name: JobName.LIBRARY_SYNC_FILES,
name: JobName.LIBRARY_SYNC_FILE, data: {
data: { libraryId: libraryStub.externalLibraryWithImportPaths1.id,
id: libraryStub.externalLibraryWithImportPaths1.id, paths: ['/foo/photo.jpg'],
assetPath: '/foo/photo.jpg',
ownerId: libraryStub.externalLibraryWithImportPaths1.owner.id,
},
}, },
]); });
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) },
]);
}); });
it('should handle a file change event', async () => { it('should handle a file change event', async () => {
@ -923,19 +932,13 @@ describe(LibraryService.name, () => {
await sut.watchAll(); await sut.watchAll();
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queue).toHaveBeenCalledWith({
{ name: JobName.LIBRARY_SYNC_FILES,
name: JobName.LIBRARY_SYNC_FILE, data: {
data: { libraryId: libraryStub.externalLibraryWithImportPaths1.id,
id: libraryStub.externalLibraryWithImportPaths1.id, paths: ['/foo/photo.jpg'],
assetPath: '/foo/photo.jpg',
ownerId: libraryStub.externalLibraryWithImportPaths1.owner.id,
},
}, },
]); });
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) },
]);
}); });
it('should handle a file unlink event', async () => { it('should handle a file unlink event', async () => {
@ -943,14 +946,18 @@ describe(LibraryService.name, () => {
mocks.library.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]); mocks.library.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
mocks.asset.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image); mocks.asset.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
mocks.storage.watch.mockImplementation( mocks.storage.watch.mockImplementation(
makeMockWatcher({ items: [{ event: 'unlink', value: '/foo/photo.jpg' }] }), makeMockWatcher({ items: [{ event: 'unlink', value: assetStub.image.originalPath }] }),
); );
await sut.watchAll(); await sut.watchAll();
expect(mocks.job.queueAll).toHaveBeenCalledWith([ expect(mocks.job.queue).toHaveBeenCalledWith({
{ name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) }, name: JobName.LIBRARY_ASSET_REMOVAL,
]); data: {
libraryId: libraryStub.externalLibraryWithImportPaths1.id,
paths: [assetStub.image.originalPath],
},
});
}); });
it('should handle an error event', async () => { it('should handle an error event', async () => {
@ -966,10 +973,10 @@ describe(LibraryService.name, () => {
await expect(sut.watchAll()).resolves.toBeUndefined(); await expect(sut.watchAll()).resolves.toBeUndefined();
}); });
it('should ignore unknown extensions', async () => { it('should not import a file with unknown extension', async () => {
mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1); mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
mocks.library.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]); mocks.library.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
mocks.storage.watch.mockImplementation(makeMockWatcher({ items: [{ event: 'add', value: '/foo/photo.jpg' }] })); mocks.storage.watch.mockImplementation(makeMockWatcher({ items: [{ event: 'add', value: '/foo/photo.xyz' }] }));
await sut.watchAll(); await sut.watchAll();
@ -1100,27 +1107,6 @@ describe(LibraryService.name, () => {
}); });
}); });
describe('handleQueueAssetOfflineCheck', () => {
it('should queue removal jobs', async () => {
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.image1], hasNextPage: false });
mocks.asset.getById.mockResolvedValue(assetStub.image1);
await expect(sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.LIBRARY_SYNC_ASSET,
data: {
id: assetStub.image1.id,
importPaths: libraryStub.externalLibrary1.importPaths,
exclusionPatterns: libraryStub.externalLibrary1.exclusionPatterns,
},
},
]);
});
});
describe('validate', () => { describe('validate', () => {
it('should not require import paths', async () => { it('should not require import paths', async () => {
await expect(sut.validate('library-id', {})).resolves.toEqual({ importPaths: [] }); await expect(sut.validate('library-id', {})).resolves.toEqual({ importPaths: [] });

View File

@ -1,5 +1,6 @@
import { BadRequestException, Injectable } from '@nestjs/common'; import { BadRequestException, Injectable } from '@nestjs/common';
import { R_OK } from 'node:constants'; import { R_OK } from 'node:constants';
import { Stats } from 'node:fs';
import path, { basename, isAbsolute, parse } from 'node:path'; import path, { basename, isAbsolute, parse } from 'node:path';
import picomatch from 'picomatch'; import picomatch from 'picomatch';
import { JOBS_LIBRARY_PAGINATION_SIZE } from 'src/constants'; import { JOBS_LIBRARY_PAGINATION_SIZE } from 'src/constants';
@ -16,9 +17,9 @@ import {
ValidateLibraryResponseDto, ValidateLibraryResponseDto,
} from 'src/dtos/library.dto'; } from 'src/dtos/library.dto';
import { AssetEntity } from 'src/entities/asset.entity'; import { AssetEntity } from 'src/entities/asset.entity';
import { LibraryEntity } from 'src/entities/library.entity'; import { AssetStatus, AssetType, DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName } from 'src/enum';
import { AssetType, DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName } from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository'; import { ArgOf } from 'src/repositories/event.repository';
import { AssetSyncResult } from 'src/repositories/library.repository';
import { BaseService } from 'src/services/base.service'; import { BaseService } from 'src/services/base.service';
import { JobOf } from 'src/types'; import { JobOf } from 'src/types';
import { mimeTypes } from 'src/utils/mime-types'; import { mimeTypes } from 'src/utils/mime-types';
@ -98,6 +99,26 @@ export class LibraryService extends BaseService {
let _resolve: () => void; let _resolve: () => void;
const ready$ = new Promise<void>((resolve) => (_resolve = resolve)); const ready$ = new Promise<void>((resolve) => (_resolve = resolve));
const handler = async (event: string, path: string) => {
if (matcher(path)) {
this.logger.debug(`File ${event} event received for ${path} in library ${library.id}}`);
await this.jobRepository.queue({
name: JobName.LIBRARY_SYNC_FILES,
data: { libraryId: library.id, paths: [path] },
});
} else {
this.logger.verbose(`Ignoring file ${event} event for ${path} in library ${library.id}`);
}
};
const deletionHandler = async (path: string) => {
this.logger.debug(`File unlink event received for ${path} in library ${library.id}}`);
await this.jobRepository.queue({
name: JobName.LIBRARY_ASSET_REMOVAL,
data: { libraryId: library.id, paths: [path] },
});
};
this.watchers[id] = this.storageRepository.watch( this.watchers[id] = this.storageRepository.watch(
library.importPaths, library.importPaths,
{ {
@ -107,43 +128,13 @@ export class LibraryService extends BaseService {
{ {
onReady: () => _resolve(), onReady: () => _resolve(),
onAdd: (path) => { onAdd: (path) => {
const handler = async () => { return handlePromiseError(handler('add', path), this.logger);
this.logger.debug(`File add event received for ${path} in library ${library.id}}`);
if (matcher(path)) {
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
if (asset) {
await this.syncAssets(library, [asset.id]);
}
if (matcher(path)) {
await this.syncFiles(library, [path]);
}
}
};
return handlePromiseError(handler(), this.logger);
}, },
onChange: (path) => { onChange: (path) => {
const handler = async () => { return handlePromiseError(handler('change', path), this.logger);
this.logger.debug(`Detected file change for ${path} in library ${library.id}`);
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
if (asset) {
await this.syncAssets(library, [asset.id]);
}
if (matcher(path)) {
// Note: if the changed file was not previously imported, it will be imported now.
await this.syncFiles(library, [path]);
}
};
return handlePromiseError(handler(), this.logger);
}, },
onUnlink: (path) => { onUnlink: (path) => {
const handler = async () => { return handlePromiseError(deletionHandler(path), this.logger);
this.logger.debug(`Detected deleted file at ${path} in library ${library.id}`);
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
if (asset) {
await this.syncAssets(library, [asset.id]);
}
};
return handlePromiseError(handler(), this.logger);
}, },
onError: (error) => { onError: (error) => {
this.logger.error(`Library watcher for library ${library.id} encountered error: ${error}`); this.logger.error(`Library watcher for library ${library.id} encountered error: ${error}`);
@ -234,26 +225,38 @@ export class LibraryService extends BaseService {
return mapLibrary(library); return mapLibrary(library);
} }
private async syncFiles({ id, ownerId }: LibraryEntity, assetPaths: string[]) { @OnJob({ name: JobName.LIBRARY_SYNC_FILES, queue: QueueName.LIBRARY })
await this.jobRepository.queueAll( async handleSyncFiles(job: JobOf<JobName.LIBRARY_SYNC_FILES>): Promise<JobStatus> {
assetPaths.map((assetPath) => ({ const library = await this.libraryRepository.get(job.libraryId);
name: JobName.LIBRARY_SYNC_FILE, // We need to check if the library still exists as it could have been deleted after the scan was queued
data: { if (!library) {
id, this.logger.debug(`Library ${job.libraryId} not found, skipping file import`);
assetPath, return JobStatus.FAILED;
ownerId, } else if (library.deletedAt) {
}, this.logger.debug(`Library ${job.libraryId} is deleted, won't import assets into it`);
})), return JobStatus.FAILED;
); }
}
private async syncAssets({ importPaths, exclusionPatterns }: LibraryEntity, assetIds: string[]) { const assetImports = job.paths.map((assetPath) => this.processEntity(assetPath, library.ownerId, job.libraryId));
await this.jobRepository.queueAll(
assetIds.map((assetId) => ({ const assetIds: string[] = [];
name: JobName.LIBRARY_SYNC_ASSET,
data: { id: assetId, importPaths, exclusionPatterns }, for (let i = 0; i < assetImports.length; i += 5000) {
})), // Chunk the imports to avoid the postgres limit of max parameters at once
); const chunk = assetImports.slice(i, i + 5000);
await this.assetRepository.createAll(chunk).then((assets) => assetIds.push(...assets.map((asset) => asset.id)));
}
const progressMessage =
job.progressCounter && job.totalAssets
? `(${job.progressCounter} of ${job.totalAssets})`
: `(${job.progressCounter} done so far)`;
this.logger.log(`Imported ${assetIds.length} ${progressMessage} file(s) into library ${job.libraryId}`);
await this.queuePostSyncJobs(assetIds);
return JobStatus.SUCCESS;
} }
private async validateImportPath(importPath: string): Promise<ValidateLibraryImportPathResponseDto> { private async validateImportPath(importPath: string): Promise<ValidateLibraryImportPathResponseDto> {
@ -336,6 +339,8 @@ export class LibraryService extends BaseService {
async handleDeleteLibrary(job: JobOf<JobName.LIBRARY_DELETE>): Promise<JobStatus> { async handleDeleteLibrary(job: JobOf<JobName.LIBRARY_DELETE>): Promise<JobStatus> {
const libraryId = job.id; const libraryId = job.id;
await this.assetRepository.updateByLibraryId(libraryId, { deletedAt: new Date() });
const assetPagination = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) => const assetPagination = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getAll(pagination, { libraryId, withDeleted: true }), this.assetRepository.getAll(pagination, { libraryId, withDeleted: true }),
); );
@ -367,84 +372,52 @@ export class LibraryService extends BaseService {
return JobStatus.SUCCESS; return JobStatus.SUCCESS;
} }
@OnJob({ name: JobName.LIBRARY_SYNC_FILE, queue: QueueName.LIBRARY }) private processEntity(filePath: string, ownerId: string, libraryId: string) {
async handleSyncFile(job: JobOf<JobName.LIBRARY_SYNC_FILE>): Promise<JobStatus> { const assetPath = path.normalize(filePath);
// Only needs to handle new assets
const assetPath = path.normalize(job.assetPath);
let asset = await this.assetRepository.getByLibraryIdAndOriginalPath(job.id, assetPath); return {
if (asset) { ownerId,
return JobStatus.SKIPPED; libraryId,
} checksum: this.cryptoRepository.hashSha1(`path:${assetPath}`),
let stat;
try {
stat = await this.storageRepository.stat(assetPath);
} catch (error: any) {
if (error.code === 'ENOENT') {
this.logger.error(`File not found: ${assetPath}`);
return JobStatus.SKIPPED;
}
this.logger.error(`Error reading file: ${assetPath}. Error: ${error}`);
return JobStatus.FAILED;
}
this.logger.log(`Importing new library asset: ${assetPath}`);
const library = await this.libraryRepository.get(job.id, true);
if (!library || library.deletedAt) {
this.logger.error('Cannot import asset into deleted library');
return JobStatus.FAILED;
}
// TODO: device asset id is deprecated, remove it
const deviceAssetId = `${basename(assetPath)}`.replaceAll(/\s+/g, '');
const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`);
const assetType = mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE;
const mtime = stat.mtime;
asset = await this.assetRepository.create({
ownerId: job.ownerId,
libraryId: job.id,
checksum: pathHash,
originalPath: assetPath, originalPath: assetPath,
deviceAssetId,
fileCreatedAt: null,
fileModifiedAt: null,
localDateTime: null,
// TODO: device asset id is deprecated, remove it
deviceAssetId: `${basename(assetPath)}`.replaceAll(/\s+/g, ''),
deviceId: 'Library Import', deviceId: 'Library Import',
fileCreatedAt: mtime, type: mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE,
fileModifiedAt: mtime,
localDateTime: mtime,
type: assetType,
originalFileName: parse(assetPath).base, originalFileName: parse(assetPath).base,
isExternal: true, isExternal: true,
}); livePhotoVideoId: null,
};
await this.queuePostSyncJobs(asset);
return JobStatus.SUCCESS;
} }
async queuePostSyncJobs(asset: AssetEntity) { async queuePostSyncJobs(assetIds: string[]) {
this.logger.debug(`Queueing metadata extraction for: ${asset.originalPath}`); this.logger.debug(`Queuing sidecar discovery for ${assetIds.length} asset(s)`);
// We queue a sidecar discovery which, in turn, queues metadata extraction // We queue a sidecar discovery which, in turn, queues metadata extraction
await this.jobRepository.queue({ await this.jobRepository.queueAll(
name: JobName.SIDECAR_DISCOVERY, assetIds.map((assetId) => ({
data: { id: asset.id, source: 'upload' }, name: JobName.SIDECAR_DISCOVERY,
}); data: { id: assetId, source: 'upload' },
})),
);
} }
async queueScan(id: string) { async queueScan(id: string) {
await this.findOrFail(id); await this.findOrFail(id);
this.logger.log(`Starting to scan library ${id}`);
await this.jobRepository.queue({ await this.jobRepository.queue({
name: JobName.LIBRARY_QUEUE_SYNC_FILES, name: JobName.LIBRARY_QUEUE_SYNC_FILES,
data: { data: {
id, id,
}, },
}); });
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, data: { id } }); await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, data: { id } });
} }
@ -454,11 +427,12 @@ export class LibraryService extends BaseService {
@OnJob({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, queue: QueueName.LIBRARY }) @OnJob({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, queue: QueueName.LIBRARY })
async handleQueueScanAll(): Promise<JobStatus> { async handleQueueScanAll(): Promise<JobStatus> {
this.logger.log(`Refreshing all external libraries`); this.logger.log(`Initiating scan of all external libraries...`);
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_CLEANUP, data: {} }); await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_CLEANUP, data: {} });
const libraries = await this.libraryRepository.getAll(true); const libraries = await this.libraryRepository.getAll(true);
await this.jobRepository.queueAll( await this.jobRepository.queueAll(
libraries.map((library) => ({ libraries.map((library) => ({
name: JobName.LIBRARY_QUEUE_SYNC_FILES, name: JobName.LIBRARY_QUEUE_SYNC_FILES,
@ -475,64 +449,141 @@ export class LibraryService extends BaseService {
}, },
})), })),
); );
return JobStatus.SUCCESS; return JobStatus.SUCCESS;
} }
@OnJob({ name: JobName.LIBRARY_SYNC_ASSET, queue: QueueName.LIBRARY }) @OnJob({ name: JobName.LIBRARY_SYNC_ASSETS, queue: QueueName.LIBRARY })
async handleSyncAsset(job: JobOf<JobName.LIBRARY_SYNC_ASSET>): Promise<JobStatus> { async handleSyncAssets(job: JobOf<JobName.LIBRARY_SYNC_ASSETS>): Promise<JobStatus> {
const asset = await this.assetRepository.getById(job.id); const assets = await this.assetRepository.getByIds(job.assetIds);
if (!asset) {
return JobStatus.SKIPPED;
}
const markOffline = async (explanation: string) => { const assetIdsToOffline: string[] = [];
if (!asset.isOffline) { const trashedAssetIdsToOffline: string[] = [];
this.logger.debug(`${explanation}, removing: ${asset.originalPath}`); const assetIdsToOnline: string[] = [];
await this.assetRepository.updateAll([asset.id], { isOffline: true, deletedAt: new Date() }); const trashedAssetIdsToOnline: string[] = [];
const assetIdsToUpdate: string[] = [];
this.logger.debug(`Checking batch of ${assets.length} existing asset(s) in library ${job.libraryId}`);
const stats = await Promise.all(
assets.map((asset) => this.storageRepository.stat(asset.originalPath).catch(() => null)),
);
for (let i = 0; i < assets.length; i++) {
const asset = assets[i];
const stat = stats[i];
const action = this.checkExistingAsset(asset, stat);
switch (action) {
case AssetSyncResult.OFFLINE: {
if (asset.status === AssetStatus.TRASHED) {
trashedAssetIdsToOffline.push(asset.id);
} else {
assetIdsToOffline.push(asset.id);
}
break;
}
case AssetSyncResult.UPDATE: {
assetIdsToUpdate.push(asset.id);
break;
}
case AssetSyncResult.CHECK_OFFLINE: {
const isInImportPath = job.importPaths.find((path) => asset.originalPath.startsWith(path));
if (!isInImportPath) {
this.logger.verbose(
`Offline asset ${asset.originalPath} is still not in any import path, keeping offline in library ${job.libraryId}`,
);
break;
}
const isExcluded = job.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern));
if (!isExcluded) {
this.logger.debug(`Offline asset ${asset.originalPath} is now online in library ${job.libraryId}`);
if (asset.status === AssetStatus.TRASHED) {
trashedAssetIdsToOnline.push(asset.id);
} else {
assetIdsToOnline.push(asset.id);
}
break;
}
this.logger.verbose(
`Offline asset ${asset.originalPath} is in an import path but still covered by exclusion pattern, keeping offline in library ${job.libraryId}`,
);
break;
}
} }
};
const isInPath = job.importPaths.find((path) => asset.originalPath.startsWith(path));
if (!isInPath) {
await markOffline('Asset is no longer in an import path');
return JobStatus.SUCCESS;
} }
const isExcluded = job.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern)); const promises = [];
if (isExcluded) { if (assetIdsToOffline.length > 0) {
await markOffline('Asset is covered by an exclusion pattern'); promises.push(this.assetRepository.updateAll(assetIdsToOffline, { isOffline: true, deletedAt: new Date() }));
return JobStatus.SUCCESS;
} }
let stat; if (trashedAssetIdsToOffline.length > 0) {
try { promises.push(this.assetRepository.updateAll(trashedAssetIdsToOffline, { isOffline: true }));
stat = await this.storageRepository.stat(asset.originalPath);
} catch {
await markOffline('Asset is no longer on disk or is inaccessible because of permissions');
return JobStatus.SUCCESS;
} }
const mtime = stat.mtime; if (assetIdsToOnline.length > 0) {
const isAssetModified = !asset.fileModifiedAt || mtime.toISOString() !== asset.fileModifiedAt.toISOString(); promises.push(this.assetRepository.updateAll(assetIdsToOnline, { isOffline: false, deletedAt: null }));
if (asset.isOffline || isAssetModified) {
this.logger.debug(`Asset was offline or modified, updating asset record ${asset.originalPath}`);
//TODO: When we have asset status, we need to leave deletedAt as is when status is trashed
await this.assetRepository.updateAll([asset.id], {
isOffline: false,
deletedAt: null,
fileModifiedAt: mtime,
originalFileName: parse(asset.originalPath).base,
});
} }
if (isAssetModified) { if (trashedAssetIdsToOnline.length > 0) {
this.logger.debug(`Asset was modified, queuing metadata extraction for: ${asset.originalPath}`); promises.push(this.assetRepository.updateAll(trashedAssetIdsToOnline, { isOffline: false }));
await this.queuePostSyncJobs(asset);
} }
if (assetIdsToUpdate.length > 0) {
promises.push(this.queuePostSyncJobs(assetIdsToUpdate));
}
await Promise.all(promises);
const remainingCount = assets.length - assetIdsToOffline.length - assetIdsToUpdate.length - assetIdsToOnline.length;
const cumulativePercentage = ((100 * job.progressCounter) / job.totalAssets).toFixed(1);
this.logger.log(
`Checked existing asset(s): ${assetIdsToOffline.length + trashedAssetIdsToOffline.length} offlined, ${assetIdsToOnline.length + trashedAssetIdsToOnline.length} onlined, ${assetIdsToUpdate.length} updated, ${remainingCount} unchanged of current batch of ${assets.length} (Total progress: ${job.progressCounter} of ${job.totalAssets}, ${cumulativePercentage} %) in library ${job.libraryId}.`,
);
return JobStatus.SUCCESS; return JobStatus.SUCCESS;
} }
private checkExistingAsset(asset: AssetEntity, stat: Stats | null): AssetSyncResult {
if (!stat) {
// File not found on disk or permission error
if (asset.isOffline) {
this.logger.verbose(
`Asset ${asset.originalPath} is still not accessible, keeping offline in library ${asset.libraryId}`,
);
return AssetSyncResult.DO_NOTHING;
}
this.logger.debug(
`Asset ${asset.originalPath} is no longer on disk or is inaccessible because of permissions, marking offline in library ${asset.libraryId}`,
);
return AssetSyncResult.OFFLINE;
}
if (asset.isOffline && asset.status !== AssetStatus.DELETED) {
// Only perform the expensive check if the asset is offline
return AssetSyncResult.CHECK_OFFLINE;
}
if (
!asset.fileCreatedAt ||
!asset.localDateTime ||
!asset.fileModifiedAt ||
stat.mtime.valueOf() !== asset.fileModifiedAt.valueOf()
) {
this.logger.verbose(`Asset ${asset.originalPath} needs metadata extraction in library ${asset.libraryId}`);
return AssetSyncResult.UPDATE;
}
return AssetSyncResult.DO_NOTHING;
}
@OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_FILES, queue: QueueName.LIBRARY }) @OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_FILES, queue: QueueName.LIBRARY })
async handleQueueSyncFiles(job: JobOf<JobName.LIBRARY_QUEUE_SYNC_FILES>): Promise<JobStatus> { async handleQueueSyncFiles(job: JobOf<JobName.LIBRARY_QUEUE_SYNC_FILES>): Promise<JobStatus> {
const library = await this.libraryRepository.get(job.id); const library = await this.libraryRepository.get(job.id);
@ -541,7 +592,7 @@ export class LibraryService extends BaseService {
return JobStatus.SKIPPED; return JobStatus.SKIPPED;
} }
this.logger.log(`Refreshing library ${library.id} for new assets`); this.logger.debug(`Validating import paths for library ${library.id}...`);
const validImportPaths: string[] = []; const validImportPaths: string[] = [];
@ -556,35 +607,67 @@ export class LibraryService extends BaseService {
if (validImportPaths.length === 0) { if (validImportPaths.length === 0) {
this.logger.warn(`No valid import paths found for library ${library.id}`); this.logger.warn(`No valid import paths found for library ${library.id}`);
return JobStatus.SKIPPED;
} }
const assetsOnDisk = this.storageRepository.walk({ const pathsOnDisk = this.storageRepository.walk({
pathsToCrawl: validImportPaths, pathsToCrawl: validImportPaths,
includeHidden: false, includeHidden: false,
exclusionPatterns: library.exclusionPatterns, exclusionPatterns: library.exclusionPatterns,
take: JOBS_LIBRARY_PAGINATION_SIZE, take: JOBS_LIBRARY_PAGINATION_SIZE,
}); });
let count = 0; let importCount = 0;
let crawlCount = 0;
for await (const assetBatch of assetsOnDisk) { this.logger.log(`Starting disk crawl of ${validImportPaths.length} import path(s) for library ${library.id}...`);
count += assetBatch.length;
this.logger.debug(`Discovered ${count} asset(s) on disk for library ${library.id}...`); for await (const pathBatch of pathsOnDisk) {
await this.syncFiles(library, assetBatch); crawlCount += pathBatch.length;
this.logger.verbose(`Queued scan of ${assetBatch.length} crawled asset(s) in library ${library.id}...`); const paths = await this.assetRepository.filterNewExternalAssetPaths(library.id, pathBatch);
if (paths.length > 0) {
importCount += paths.length;
await this.jobRepository.queue({
name: JobName.LIBRARY_SYNC_FILES,
data: {
libraryId: library.id,
paths,
progressCounter: crawlCount,
},
});
}
this.logger.log(
`Crawled ${crawlCount} file(s) so far: ${paths.length} of current batch of ${pathBatch.length} will be imported to library ${library.id}...`,
);
} }
if (count > 0) { this.logger.log(
this.logger.debug(`Finished queueing scan of ${count} assets on disk for library ${library.id}`); `Finished disk crawl, ${crawlCount} file(s) found on disk and queued ${importCount} file(s) for import into ${library.id}`,
} else if (validImportPaths.length > 0) { );
this.logger.debug(`No non-excluded assets found in any import path for library ${library.id}`);
}
await this.libraryRepository.update(job.id, { refreshedAt: new Date() }); await this.libraryRepository.update(job.id, { refreshedAt: new Date() });
return JobStatus.SUCCESS; return JobStatus.SUCCESS;
} }
@OnJob({ name: JobName.LIBRARY_ASSET_REMOVAL, queue: QueueName.LIBRARY })
async handleAssetRemoval(job: JobOf<JobName.LIBRARY_ASSET_REMOVAL>): Promise<JobStatus> {
// This is only for handling file unlink events via the file watcher
this.logger.verbose(`Deleting asset(s) ${job.paths} from library ${job.libraryId}`);
for (const assetPath of job.paths) {
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(job.libraryId, assetPath);
if (asset) {
await this.assetRepository.remove(asset);
}
}
return JobStatus.SUCCESS;
}
@OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, queue: QueueName.LIBRARY }) @OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, queue: QueueName.LIBRARY })
async handleQueueSyncAssets(job: JobOf<JobName.LIBRARY_QUEUE_SYNC_ASSETS>): Promise<JobStatus> { async handleQueueSyncAssets(job: JobOf<JobName.LIBRARY_QUEUE_SYNC_ASSETS>): Promise<JobStatus> {
const library = await this.libraryRepository.get(job.id); const library = await this.libraryRepository.get(job.id);
@ -592,27 +675,68 @@ export class LibraryService extends BaseService {
return JobStatus.SKIPPED; return JobStatus.SKIPPED;
} }
this.logger.log(`Scanning library ${library.id} for removed assets`); const assetCount = await this.assetRepository.getLibraryAssetCount(job.id);
const onlineAssets = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) => if (!assetCount) {
this.assetRepository.getAll(pagination, { libraryId: job.id, withDeleted: true }), this.logger.log(`Library ${library.id} is empty, no need to check assets`);
); return JobStatus.SUCCESS;
let assetCount = 0;
for await (const assets of onlineAssets) {
assetCount += assets.length;
this.logger.debug(`Discovered ${assetCount} asset(s) in library ${library.id}...`);
await this.jobRepository.queueAll(
assets.map((asset) => ({
name: JobName.LIBRARY_SYNC_ASSET,
data: { id: asset.id, importPaths: library.importPaths, exclusionPatterns: library.exclusionPatterns },
})),
);
this.logger.debug(`Queued check of ${assets.length} asset(s) in library ${library.id}...`);
} }
if (assetCount) { this.logger.log(
this.logger.log(`Finished queueing check of ${assetCount} assets for library ${library.id}`); `Checking ${assetCount} asset(s) against import paths and exclusion patterns in library ${library.id}...`,
);
const offlineResult = await this.assetRepository.detectOfflineExternalAssets(
library.id,
library.importPaths,
library.exclusionPatterns,
);
const affectedAssetCount = Number(offlineResult.numUpdatedRows);
this.logger.log(
`${affectedAssetCount} asset(s) out of ${assetCount} were offlined due to import paths and/or exclusion pattern(s) in library ${library.id}`,
);
if (affectedAssetCount === assetCount) {
return JobStatus.SUCCESS;
}
this.logger.log(`Scanning library ${library.id} for assets missing from disk...`);
const existingAssets = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getAllInLibrary(pagination, job.id),
);
let currentAssetCount = 0;
for await (const assets of existingAssets) {
if (assets.length === 0) {
throw new BadRequestException(`Failed to get assets for library ${job.id}`);
}
currentAssetCount += assets.length;
await this.jobRepository.queue({
name: JobName.LIBRARY_SYNC_ASSETS,
data: {
libraryId: library.id,
importPaths: library.importPaths,
exclusionPatterns: library.exclusionPatterns,
assetIds: assets.map(({ id }) => id),
progressCounter: currentAssetCount,
totalAssets: assetCount,
},
});
const completePercentage = ((100 * currentAssetCount) / assetCount).toFixed(1);
this.logger.log(
`Queued check of ${currentAssetCount} of ${assetCount} (${completePercentage} %) existing asset(s) so far in library ${library.id}`,
);
}
if (currentAssetCount) {
this.logger.log(`Finished queuing ${currentAssetCount} asset check(s) for library ${library.id}`);
} }
return JobStatus.SUCCESS; return JobStatus.SUCCESS;

View File

@ -662,10 +662,10 @@ export class MetadataService extends BaseService {
let dateTimeOriginal = dateTime?.toDate(); let dateTimeOriginal = dateTime?.toDate();
let localDateTime = dateTime?.toDateTime().setZone('UTC', { keepLocalTime: true }).toJSDate(); let localDateTime = dateTime?.toDateTime().setZone('UTC', { keepLocalTime: true }).toJSDate();
if (!localDateTime || !dateTimeOriginal) { if (!localDateTime || !dateTimeOriginal) {
this.logger.debug(
`No exif date time found, falling back on earliest of file creation and modification for assset ${asset.id}: ${asset.originalPath}`,
);
const earliestDate = this.earliestDate(asset.fileModifiedAt, asset.fileCreatedAt); const earliestDate = this.earliestDate(asset.fileModifiedAt, asset.fileCreatedAt);
this.logger.debug(
`No exif date time found, falling back on ${earliestDate.toISOString()}, earliest of file creation and modification for assset ${asset.id}: ${asset.originalPath}`,
);
dateTimeOriginal = earliestDate; dateTimeOriginal = earliestDate;
localDateTime = earliestDate; localDateTime = earliestDate;
} }

View File

@ -208,17 +208,23 @@ export interface IAssetDeleteJob extends IEntityJob {
deleteOnDisk: boolean; deleteOnDisk: boolean;
} }
export interface ILibraryFileJob extends IEntityJob { export interface ILibraryFileJob {
ownerId: string; libraryId: string;
assetPath: string; paths: string[];
progressCounter?: number;
totalAssets?: number;
} }
export interface ILibraryAssetJob extends IEntityJob { export interface ILibraryBulkIdsJob {
libraryId: string;
importPaths: string[]; importPaths: string[];
exclusionPatterns: string[]; exclusionPatterns: string[];
assetIds: string[];
progressCounter: number;
totalAssets: number;
} }
export interface IBulkEntityJob extends IBaseJob { export interface IBulkEntityJob {
ids: string[]; ids: string[];
} }
@ -354,10 +360,11 @@ export type JobItem =
| { name: JobName.ASSET_DELETION_CHECK; data?: IBaseJob } | { name: JobName.ASSET_DELETION_CHECK; data?: IBaseJob }
// Library Management // Library Management
| { name: JobName.LIBRARY_SYNC_FILE; data: ILibraryFileJob } | { name: JobName.LIBRARY_SYNC_FILES; data: ILibraryFileJob }
| { name: JobName.LIBRARY_QUEUE_SYNC_FILES; data: IEntityJob } | { name: JobName.LIBRARY_QUEUE_SYNC_FILES; data: IEntityJob }
| { name: JobName.LIBRARY_QUEUE_SYNC_ASSETS; data: IEntityJob } | { name: JobName.LIBRARY_QUEUE_SYNC_ASSETS; data: IEntityJob }
| { name: JobName.LIBRARY_SYNC_ASSET; data: ILibraryAssetJob } | { name: JobName.LIBRARY_SYNC_ASSETS; data: ILibraryBulkIdsJob }
| { name: JobName.LIBRARY_ASSET_REMOVAL; data: ILibraryFileJob }
| { name: JobName.LIBRARY_DELETE; data: IEntityJob } | { name: JobName.LIBRARY_DELETE; data: IEntityJob }
| { name: JobName.LIBRARY_QUEUE_SCAN_ALL; data?: IBaseJob } | { name: JobName.LIBRARY_QUEUE_SCAN_ALL; data?: IBaseJob }
| { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob } | { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob }

View File

@ -44,6 +44,8 @@ export const anyUuid = (ids: string[]) => sql<string>`any(${`{${ids}}`}::uuid[])
export const asVector = (embedding: number[]) => sql<string>`${`[${embedding}]`}::vector`; export const asVector = (embedding: number[]) => sql<string>`${`[${embedding}]`}::vector`;
export const unnest = (array: string[]) => sql<Record<string, string>>`unnest(array[${sql.join(array)}]::text[])`;
/** /**
* Mainly for type debugging to make VS Code display a more useful tooltip. * Mainly for type debugging to make VS Code display a more useful tooltip.
* Source: https://stackoverflow.com/a/69288824 * Source: https://stackoverflow.com/a/69288824

View File

@ -1,4 +1,4 @@
import { getKeysDeep, unsetDeep } from 'src/utils/misc'; import { getKeysDeep, globToSqlPattern, unsetDeep } from 'src/utils/misc';
import { describe, expect, it } from 'vitest'; import { describe, expect, it } from 'vitest';
describe('getKeysDeep', () => { describe('getKeysDeep', () => {
@ -51,3 +51,19 @@ describe('unsetDeep', () => {
expect(unsetDeep({ foo: 'bar', nested: { enabled: true } }, 'nested.enabled')).toEqual({ foo: 'bar' }); expect(unsetDeep({ foo: 'bar', nested: { enabled: true } }, 'nested.enabled')).toEqual({ foo: 'bar' });
}); });
}); });
describe('globToSqlPattern', () => {
const testCases = [
['**/Raw/**', '%/Raw/%'],
['**/abc/*.tif', '%/abc/%.tif'],
['**/*.tif', '%/%.tif'],
['**/*.jp?', '%/%.jp_'],
['**/@eaDir/**', '%/@eaDir/%'],
['**/._*', `%/._%`],
['/absolute/path/**', `/absolute/path/%`],
];
it.each(testCases)('should convert %s to %s', (input, expected) => {
expect(globToSqlPattern(input)).toEqual(expected);
});
});

View File

@ -10,6 +10,8 @@ import { ReferenceObject, SchemaObject } from '@nestjs/swagger/dist/interfaces/o
import _ from 'lodash'; import _ from 'lodash';
import { writeFileSync } from 'node:fs'; import { writeFileSync } from 'node:fs';
import path from 'node:path'; import path from 'node:path';
import picomatch from 'picomatch';
import parse from 'picomatch/lib/parse';
import { SystemConfig } from 'src/config'; import { SystemConfig } from 'src/config';
import { CLIP_MODEL_INFO, serverVersion } from 'src/constants'; import { CLIP_MODEL_INFO, serverVersion } from 'src/constants';
import { extraSyncModels } from 'src/dtos/sync.dto'; import { extraSyncModels } from 'src/dtos/sync.dto';
@ -268,3 +270,35 @@ export const useSwagger = (app: INestApplication, { write }: { write: boolean })
writeFileSync(outputPath, JSON.stringify(patchOpenAPI(specification), null, 2), { encoding: 'utf8' }); writeFileSync(outputPath, JSON.stringify(patchOpenAPI(specification), null, 2), { encoding: 'utf8' });
} }
}; };
const convertTokenToSqlPattern = (token: parse.Token): string => {
switch (token.type) {
case 'slash': {
return '/';
}
case 'text': {
return token.value;
}
case 'globstar':
case 'star': {
return '%';
}
case 'underscore': {
return String.raw`\_`;
}
case 'qmark': {
return '_';
}
case 'dot': {
return '.';
}
default: {
return '';
}
}
};
export const globToSqlPattern = (glob: string) => {
const tokens = picomatch.parse(glob).tokens;
return tokens.map((token) => convertTokenToSqlPattern(token)).join('');
};

View File

@ -296,6 +296,7 @@ export const assetStub = {
isFavorite: false, isFavorite: false,
isArchived: false, isArchived: false,
duration: null, duration: null,
libraryId: 'library-id',
isVisible: true, isVisible: true,
isExternal: false, isExternal: false,
livePhotoVideo: null, livePhotoVideo: null,

View File

@ -5,6 +5,7 @@ import { Mocked, vitest } from 'vitest';
export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetRepository>> => { export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetRepository>> => {
return { return {
create: vitest.fn(), create: vitest.fn(),
createAll: vitest.fn(),
upsertExif: vitest.fn(), upsertExif: vitest.fn(),
upsertJobStatus: vitest.fn(), upsertJobStatus: vitest.fn(),
getByDayOfYear: vitest.fn(), getByDayOfYear: vitest.fn(),
@ -23,6 +24,8 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
getAll: vitest.fn().mockResolvedValue({ items: [], hasNextPage: false }), getAll: vitest.fn().mockResolvedValue({ items: [], hasNextPage: false }),
getAllByDeviceId: vitest.fn(), getAllByDeviceId: vitest.fn(),
getLivePhotoCount: vitest.fn(), getLivePhotoCount: vitest.fn(),
getAllInLibrary: vitest.fn(),
getLibraryAssetCount: vitest.fn(),
updateAll: vitest.fn(), updateAll: vitest.fn(),
updateDuplicates: vitest.fn(), updateDuplicates: vitest.fn(),
getByLibraryIdAndOriginalPath: vitest.fn(), getByLibraryIdAndOriginalPath: vitest.fn(),
@ -39,5 +42,8 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
getDuplicates: vitest.fn(), getDuplicates: vitest.fn(),
upsertFile: vitest.fn(), upsertFile: vitest.fn(),
upsertFiles: vitest.fn(), upsertFiles: vitest.fn(),
detectOfflineExternalAssets: vitest.fn(),
filterNewExternalAssetPaths: vitest.fn(),
updateByLibraryId: vitest.fn(),
}; };
}; };