mirror of
https://github.com/immich-app/immich.git
synced 2025-07-09 03:04:16 -04:00
feat(server): library refresh go brrr (#14456)
* feat: brr --------- Co-authored-by: mertalev <101130780+mertalev@users.noreply.github.com>
This commit is contained in:
parent
bc61497461
commit
3af26ee94a
@ -37,7 +37,7 @@ To validate that Immich can reach your external library, start a shell inside th
|
||||
|
||||
### Exclusion Patterns
|
||||
|
||||
By default, all files in the import paths will be added to the library. If there are files that should not be added, exclusion patterns can be used to exclude them. Exclusion patterns are glob patterns are matched against the full file path. If a file matches an exclusion pattern, it will not be added to the library. Exclusion patterns can be added in the Scan Settings page for each library. Under the hood, Immich uses the [glob](https://www.npmjs.com/package/glob) package to match patterns, so please refer to [their documentation](https://github.com/isaacs/node-glob#glob-primer) to see what patterns are supported.
|
||||
By default, all files in the import paths will be added to the library. If there are files that should not be added, exclusion patterns can be used to exclude them. Exclusion patterns are glob patterns are matched against the full file path. If a file matches an exclusion pattern, it will not be added to the library. Exclusion patterns can be added in the Scan Settings page for each library.
|
||||
|
||||
Some basic examples:
|
||||
|
||||
@ -48,7 +48,11 @@ Some basic examples:
|
||||
|
||||
Special characters such as @ should be escaped, for instance:
|
||||
|
||||
- `**/\@eadir/**` will exclude all files in any directory named `@eadir`
|
||||
- `**/\@eaDir/**` will exclude all files in any directory named `@eaDir`
|
||||
|
||||
:::info
|
||||
Internally, Immich uses the [glob](https://www.npmjs.com/package/glob) package to process exclusion patterns, and sometimes those patterns are translated into [Postgres LIKE patterns](https://www.postgresql.org/docs/current/functions-matching.html). The intention is to support basic folder exclusions but we recommend against advanced usage since those can't reliably be translated to the Postgres syntax. Please refer to the [glob documentation](https://github.com/isaacs/node-glob#glob-primer) for a basic overview on glob patterns.
|
||||
:::
|
||||
|
||||
### Automatic watching (EXPERIMENTAL)
|
||||
|
||||
|
@ -490,7 +490,7 @@ describe('/libraries', () => {
|
||||
utils.removeImageFile(`${testAssetDir}/temp/reimport/asset.jpg`);
|
||||
});
|
||||
|
||||
it('should not reimport unmodified files', async () => {
|
||||
it('should not reimport a file with unchanged timestamp', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp/reimport`],
|
||||
@ -933,6 +933,8 @@ describe('/libraries', () => {
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
|
||||
expect(assets.count).toBe(1);
|
||||
|
||||
utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`);
|
||||
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
@ -963,6 +965,58 @@ describe('/libraries', () => {
|
||||
}
|
||||
});
|
||||
|
||||
it('should set a trashed offline asset to online but keep it in trash', async () => {
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp/offline`],
|
||||
});
|
||||
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
|
||||
expect(assets.count).toBe(1);
|
||||
|
||||
await utils.deleteAssets(admin.accessToken, [assets.items[0].id]);
|
||||
|
||||
{
|
||||
const trashedAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
|
||||
|
||||
expect(trashedAsset.isTrashed).toBe(true);
|
||||
}
|
||||
|
||||
utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`);
|
||||
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
|
||||
expect(offlineAsset.isTrashed).toBe(true);
|
||||
expect(offlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`);
|
||||
expect(offlineAsset.isOffline).toBe(true);
|
||||
|
||||
{
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true });
|
||||
expect(assets.count).toBe(1);
|
||||
}
|
||||
|
||||
utils.renameImageFile(`${testAssetDir}/temp/offline.png`, `${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const backOnlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
|
||||
|
||||
expect(backOnlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`);
|
||||
expect(backOnlineAsset.isOffline).toBe(false);
|
||||
expect(backOnlineAsset.isTrashed).toBe(true);
|
||||
|
||||
{
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true });
|
||||
expect(assets.count).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
it('should not set an offline asset to online if its file exists, is not covered by an exclusion pattern, but is outside of all import paths', async () => {
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
@ -1024,16 +1078,17 @@ describe('/libraries', () => {
|
||||
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
{
|
||||
const { assets: assetsBefore } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
expect(assetsBefore.count).toBe(1);
|
||||
}
|
||||
|
||||
utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`);
|
||||
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
{
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true });
|
||||
expect(assets.count).toBe(1);
|
||||
}
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true });
|
||||
expect(assets.count).toBe(1);
|
||||
|
||||
const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
|
||||
|
||||
|
@ -57,6 +57,7 @@ export default [
|
||||
'unicorn/no-thenable': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
'unicorn/prefer-structured-clone': 'off',
|
||||
'unicorn/no-for-loop': 'off',
|
||||
'@typescript-eslint/await-thenable': 'error',
|
||||
'@typescript-eslint/no-misused-promises': 'error',
|
||||
'require-await': 'off',
|
||||
|
@ -469,8 +469,9 @@ export enum JobName {
|
||||
// library management
|
||||
LIBRARY_QUEUE_SYNC_FILES = 'library-queue-sync-files',
|
||||
LIBRARY_QUEUE_SYNC_ASSETS = 'library-queue-sync-assets',
|
||||
LIBRARY_SYNC_FILE = 'library-sync-file',
|
||||
LIBRARY_SYNC_ASSET = 'library-sync-asset',
|
||||
LIBRARY_SYNC_FILES = 'library-sync-files',
|
||||
LIBRARY_SYNC_ASSETS = 'library-sync-assets',
|
||||
LIBRARY_ASSET_REMOVAL = 'handle-library-file-deletion',
|
||||
LIBRARY_DELETE = 'library-delete',
|
||||
LIBRARY_QUEUE_SCAN_ALL = 'library-queue-scan-all',
|
||||
LIBRARY_QUEUE_CLEANUP = 'library-queue-cleanup',
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Insertable, Kysely, Updateable, sql } from 'kysely';
|
||||
import { Insertable, Kysely, UpdateResult, Updateable, sql } from 'kysely';
|
||||
import { isEmpty, isUndefined, omitBy } from 'lodash';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { ASSET_FILE_CONFLICT_KEYS, EXIF_CONFLICT_KEYS, JOB_STATUS_CONFLICT_KEYS } from 'src/constants';
|
||||
@ -24,7 +24,8 @@ import {
|
||||
} from 'src/entities/asset.entity';
|
||||
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
|
||||
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
|
||||
import { anyUuid, asUuid, mapUpsertColumns } from 'src/utils/database';
|
||||
import { anyUuid, asUuid, mapUpsertColumns, unnest } from 'src/utils/database';
|
||||
import { globToSqlPattern } from 'src/utils/misc';
|
||||
import { Paginated, PaginationOptions, paginationHelper } from 'src/utils/pagination';
|
||||
|
||||
export type AssetStats = Record<AssetType, number>;
|
||||
@ -191,6 +192,10 @@ export class AssetRepository {
|
||||
.executeTakeFirst() as any as Promise<AssetEntityPlaceholder>;
|
||||
}
|
||||
|
||||
createAll(assets: Insertable<Assets>[]): Promise<AssetEntity[]> {
|
||||
return this.db.insertInto('assets').values(assets).returningAll().execute() as any as Promise<AssetEntity[]>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, { day: 1, month: 1 }] })
|
||||
getByDayOfYear(ownerIds: string[], { day, month }: MonthDay) {
|
||||
return this.db
|
||||
@ -384,6 +389,17 @@ export class AssetRepository {
|
||||
return paginationHelper(items as any as AssetEntity[], pagination.take);
|
||||
}
|
||||
|
||||
async getAllInLibrary(pagination: PaginationOptions, libraryId: string): Paginated<AssetEntity> {
|
||||
const builder = this.db
|
||||
.selectFrom('assets')
|
||||
.select('id')
|
||||
.where('libraryId', '=', asUuid(libraryId))
|
||||
.limit(pagination.take + 1)
|
||||
.offset(pagination.skip ?? 0);
|
||||
const items = await builder.execute();
|
||||
return paginationHelper(items as any as AssetEntity[], pagination.take);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get assets by device's Id on the database
|
||||
* @param ownerId
|
||||
@ -470,6 +486,10 @@ export class AssetRepository {
|
||||
await this.db.updateTable('assets').set(options).where('id', '=', anyUuid(ids)).execute();
|
||||
}
|
||||
|
||||
async updateByLibraryId(libraryId: string, options: Updateable<Assets>): Promise<void> {
|
||||
await this.db.updateTable('assets').set(options).where('libraryId', '=', asUuid(libraryId)).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({
|
||||
params: [{ targetDuplicateId: DummyValue.UUID, duplicateIds: [DummyValue.UUID], assetIds: [DummyValue.UUID] }],
|
||||
})
|
||||
@ -939,4 +959,64 @@ export class AssetRepository {
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql({
|
||||
params: [{ libraryId: DummyValue.UUID, importPaths: [DummyValue.STRING], exclusionPatterns: [DummyValue.STRING] }],
|
||||
})
|
||||
async detectOfflineExternalAssets(
|
||||
libraryId: string,
|
||||
importPaths: string[],
|
||||
exclusionPatterns: string[],
|
||||
): Promise<UpdateResult> {
|
||||
const paths = importPaths.map((importPath) => `${importPath}%`);
|
||||
const exclusions = exclusionPatterns.map((pattern) => globToSqlPattern(pattern));
|
||||
|
||||
return this.db
|
||||
.updateTable('assets')
|
||||
.set({
|
||||
isOffline: true,
|
||||
deletedAt: new Date(),
|
||||
})
|
||||
.where('isOffline', '=', false)
|
||||
.where('isExternal', '=', true)
|
||||
.where('libraryId', '=', asUuid(libraryId))
|
||||
.where((eb) =>
|
||||
eb.or([eb('originalPath', 'not like', paths.join('|')), eb('originalPath', 'like', exclusions.join('|'))]),
|
||||
)
|
||||
.executeTakeFirstOrThrow();
|
||||
}
|
||||
|
||||
@GenerateSql({
|
||||
params: [{ libraryId: DummyValue.UUID, paths: [DummyValue.STRING] }],
|
||||
})
|
||||
async filterNewExternalAssetPaths(libraryId: string, paths: string[]): Promise<string[]> {
|
||||
const result = await this.db
|
||||
.selectFrom(unnest(paths).as('path'))
|
||||
.select('path')
|
||||
.where((eb) =>
|
||||
eb.not(
|
||||
eb.exists(
|
||||
this.db
|
||||
.selectFrom('assets')
|
||||
.select('originalPath')
|
||||
.whereRef('assets.originalPath', '=', eb.ref('path'))
|
||||
.where('libraryId', '=', asUuid(libraryId))
|
||||
.where('isExternal', '=', true),
|
||||
),
|
||||
),
|
||||
)
|
||||
.execute();
|
||||
|
||||
return result.map((row) => row.path as string);
|
||||
}
|
||||
|
||||
async getLibraryAssetCount(libraryId: string): Promise<number> {
|
||||
const { count } = await this.db
|
||||
.selectFrom('assets')
|
||||
.select((eb) => eb.fn.countAll().as('count'))
|
||||
.where('libraryId', '=', asUuid(libraryId))
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
return Number(count);
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,13 @@ const userColumns = [
|
||||
'users.profileChangedAt',
|
||||
] as const;
|
||||
|
||||
export enum AssetSyncResult {
|
||||
DO_NOTHING,
|
||||
UPDATE,
|
||||
OFFLINE,
|
||||
CHECK_OFFLINE,
|
||||
}
|
||||
|
||||
const withOwner = (eb: ExpressionBuilder<DB, 'libraries'>) => {
|
||||
return jsonObjectFrom(eb.selectFrom('users').whereRef('users.id', '=', 'libraries.ownerId').select(userColumns)).as(
|
||||
'owner',
|
||||
|
@ -6,7 +6,7 @@ import { mapLibrary } from 'src/dtos/library.dto';
|
||||
import { UserEntity } from 'src/entities/user.entity';
|
||||
import { AssetType, ImmichWorker, JobName, JobStatus } from 'src/enum';
|
||||
import { LibraryService } from 'src/services/library.service';
|
||||
import { ILibraryAssetJob, ILibraryFileJob } from 'src/types';
|
||||
import { ILibraryBulkIdsJob, ILibraryFileJob } from 'src/types';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { libraryStub } from 'test/fixtures/library.stub';
|
||||
@ -22,10 +22,11 @@ async function* mockWalk() {
|
||||
|
||||
describe(LibraryService.name, () => {
|
||||
let sut: LibraryService;
|
||||
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(LibraryService));
|
||||
({ sut, mocks } = newTestService(LibraryService, {}));
|
||||
|
||||
mocks.database.tryLock.mockResolvedValue(true);
|
||||
mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
|
||||
@ -152,21 +153,74 @@ describe(LibraryService.name, () => {
|
||||
|
||||
describe('handleQueueSyncFiles', () => {
|
||||
it('should queue refresh of a new asset', async () => {
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
mocks.storage.walk.mockImplementation(mockWalk);
|
||||
mocks.storage.stat.mockResolvedValue({ isDirectory: () => true } as Stats);
|
||||
mocks.storage.checkFileExists.mockResolvedValue(true);
|
||||
mocks.asset.filterNewExternalAssetPaths.mockResolvedValue(['/data/user1/photo.jpg']);
|
||||
|
||||
await sut.handleQueueSyncFiles({ id: libraryStub.externalLibrary1.id });
|
||||
await sut.handleQueueSyncFiles({ id: libraryStub.externalLibraryWithImportPaths1.id });
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.LIBRARY_SYNC_FILE,
|
||||
data: {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
ownerId: libraryStub.externalLibrary1.owner.id,
|
||||
assetPath: '/data/user1/photo.jpg',
|
||||
},
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.LIBRARY_SYNC_FILES,
|
||||
data: {
|
||||
libraryId: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
paths: ['/data/user1/photo.jpg'],
|
||||
progressCounter: 1,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it("should fail when library can't be found", async () => {
|
||||
await expect(sut.handleQueueSyncFiles({ id: libraryStub.externalLibraryWithImportPaths1.id })).resolves.toBe(
|
||||
JobStatus.SKIPPED,
|
||||
);
|
||||
});
|
||||
|
||||
it('should ignore import paths that do not exist', async () => {
|
||||
mocks.storage.stat.mockImplementation((path): Promise<Stats> => {
|
||||
if (path === libraryStub.externalLibraryWithImportPaths1.importPaths[0]) {
|
||||
const error = { code: 'ENOENT' } as any;
|
||||
throw error;
|
||||
}
|
||||
return Promise.resolve({
|
||||
isDirectory: () => true,
|
||||
} as Stats);
|
||||
});
|
||||
|
||||
mocks.storage.checkFileExists.mockResolvedValue(true);
|
||||
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
|
||||
await sut.handleQueueSyncFiles({ id: libraryStub.externalLibraryWithImportPaths1.id });
|
||||
|
||||
expect(mocks.storage.walk).toHaveBeenCalledWith({
|
||||
pathsToCrawl: [libraryStub.externalLibraryWithImportPaths1.importPaths[1]],
|
||||
exclusionPatterns: [],
|
||||
includeHidden: false,
|
||||
take: JOBS_LIBRARY_PAGINATION_SIZE,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueSyncFiles', () => {
|
||||
it('should queue refresh of a new asset', async () => {
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
mocks.storage.walk.mockImplementation(mockWalk);
|
||||
mocks.storage.stat.mockResolvedValue({ isDirectory: () => true } as Stats);
|
||||
mocks.storage.checkFileExists.mockResolvedValue(true);
|
||||
mocks.asset.filterNewExternalAssetPaths.mockResolvedValue(['/data/user1/photo.jpg']);
|
||||
|
||||
await sut.handleQueueSyncFiles({ id: libraryStub.externalLibraryWithImportPaths1.id });
|
||||
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.LIBRARY_SYNC_FILES,
|
||||
data: {
|
||||
libraryId: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
paths: ['/data/user1/photo.jpg'],
|
||||
progressCounter: 1,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should fail when library can't be found", async () => {
|
||||
@ -199,142 +253,226 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueRemoveDeleted', () => {
|
||||
it('should queue online check of existing assets', async () => {
|
||||
describe('handleQueueSyncAssets', () => {
|
||||
it('should call the offline check', async () => {
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
mocks.storage.walk.mockImplementation(async function* generator() {});
|
||||
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
|
||||
mocks.asset.getLibraryAssetCount.mockResolvedValue(1);
|
||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
|
||||
|
||||
await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id });
|
||||
const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id });
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.LIBRARY_SYNC_ASSET,
|
||||
data: {
|
||||
id: assetStub.external.id,
|
||||
importPaths: libraryStub.externalLibrary1.importPaths,
|
||||
exclusionPatterns: [],
|
||||
},
|
||||
},
|
||||
]);
|
||||
expect(response).toBe(JobStatus.SUCCESS);
|
||||
expect(mocks.asset.detectOfflineExternalAssets).toHaveBeenCalledWith(
|
||||
libraryStub.externalLibrary1.id,
|
||||
libraryStub.externalLibrary1.importPaths,
|
||||
libraryStub.externalLibrary1.exclusionPatterns,
|
||||
);
|
||||
});
|
||||
|
||||
it("should fail when library can't be found", async () => {
|
||||
it('should skip an empty library', async () => {
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
mocks.storage.walk.mockImplementation(async function* generator() {});
|
||||
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
|
||||
mocks.asset.getLibraryAssetCount.mockResolvedValue(0);
|
||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
|
||||
|
||||
const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id });
|
||||
|
||||
expect(response).toBe(JobStatus.SUCCESS);
|
||||
expect(mocks.asset.detectOfflineExternalAssets).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should queue asset sync', async () => {
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
mocks.storage.walk.mockImplementation(async function* generator() {});
|
||||
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
|
||||
mocks.asset.getLibraryAssetCount.mockResolvedValue(1);
|
||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(0) });
|
||||
mocks.asset.getAllInLibrary.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
|
||||
|
||||
const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibraryWithImportPaths1.id });
|
||||
|
||||
expect(mocks.job.queue).toBeCalledWith({
|
||||
name: JobName.LIBRARY_SYNC_ASSETS,
|
||||
data: {
|
||||
libraryId: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
importPaths: libraryStub.externalLibraryWithImportPaths1.importPaths,
|
||||
exclusionPatterns: libraryStub.externalLibraryWithImportPaths1.exclusionPatterns,
|
||||
assetIds: [assetStub.external.id],
|
||||
progressCounter: 1,
|
||||
totalAssets: 1,
|
||||
},
|
||||
});
|
||||
|
||||
expect(response).toBe(JobStatus.SUCCESS);
|
||||
expect(mocks.asset.detectOfflineExternalAssets).toHaveBeenCalledWith(
|
||||
libraryStub.externalLibraryWithImportPaths1.id,
|
||||
libraryStub.externalLibraryWithImportPaths1.importPaths,
|
||||
libraryStub.externalLibraryWithImportPaths1.exclusionPatterns,
|
||||
);
|
||||
});
|
||||
|
||||
it("should fail if library can't be found", async () => {
|
||||
await expect(sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SKIPPED);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleSyncAsset', () => {
|
||||
it('should skip missing assets', async () => {
|
||||
const mockAssetJob: ILibraryAssetJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/'],
|
||||
exclusionPatterns: [],
|
||||
};
|
||||
|
||||
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SKIPPED);
|
||||
|
||||
expect(mocks.asset.remove).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
describe('handleSyncAssets', () => {
|
||||
it('should offline assets no longer on disk', async () => {
|
||||
const mockAssetJob: ILibraryAssetJob = {
|
||||
id: assetStub.external.id,
|
||||
const mockAssetJob: ILibraryBulkIdsJob = {
|
||||
assetIds: [assetStub.external.id],
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
importPaths: ['/'],
|
||||
exclusionPatterns: [],
|
||||
totalAssets: 1,
|
||||
progressCounter: 0,
|
||||
};
|
||||
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.external);
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
|
||||
mocks.storage.stat.mockRejectedValue(new Error('ENOENT, no such file or directory'));
|
||||
|
||||
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
|
||||
isOffline: true,
|
||||
deletedAt: expect.any(Date),
|
||||
deletedAt: expect.anything(),
|
||||
});
|
||||
});
|
||||
|
||||
it('should offline assets matching an exclusion pattern', async () => {
|
||||
const mockAssetJob: ILibraryAssetJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/'],
|
||||
exclusionPatterns: ['**/user1/**'],
|
||||
};
|
||||
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.external);
|
||||
|
||||
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
|
||||
isOffline: true,
|
||||
deletedAt: expect.any(Date),
|
||||
});
|
||||
});
|
||||
|
||||
it('should set assets outside of import paths as offline', async () => {
|
||||
const mockAssetJob: ILibraryAssetJob = {
|
||||
id: assetStub.external.id,
|
||||
it('should set assets deleted from disk as offline', async () => {
|
||||
const mockAssetJob: ILibraryBulkIdsJob = {
|
||||
assetIds: [assetStub.external.id],
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
importPaths: ['/data/user2'],
|
||||
exclusionPatterns: [],
|
||||
totalAssets: 1,
|
||||
progressCounter: 0,
|
||||
};
|
||||
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.external);
|
||||
mocks.storage.checkFileExists.mockResolvedValue(true);
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
|
||||
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
|
||||
|
||||
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
|
||||
isOffline: true,
|
||||
deletedAt: expect.any(Date),
|
||||
deletedAt: expect.anything(),
|
||||
});
|
||||
});
|
||||
|
||||
it('should do nothing with online assets', async () => {
|
||||
const mockAssetJob: ILibraryAssetJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/'],
|
||||
it('should do nothing with offline assets deleted from disk', async () => {
|
||||
const mockAssetJob: ILibraryBulkIdsJob = {
|
||||
assetIds: [assetStub.trashedOffline.id],
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
importPaths: ['/data/user2'],
|
||||
exclusionPatterns: [],
|
||||
totalAssets: 1,
|
||||
progressCounter: 0,
|
||||
};
|
||||
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.external);
|
||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
|
||||
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
|
||||
|
||||
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should un-trash an asset previously marked as offline', async () => {
|
||||
const mockAssetJob: ILibraryAssetJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/'],
|
||||
const mockAssetJob: ILibraryBulkIdsJob = {
|
||||
assetIds: [assetStub.trashedOffline.id],
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
importPaths: ['/original/'],
|
||||
exclusionPatterns: [],
|
||||
totalAssets: 1,
|
||||
progressCounter: 0,
|
||||
};
|
||||
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.trashedOffline);
|
||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats);
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
|
||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
||||
|
||||
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.trashedOffline.id], {
|
||||
deletedAt: null,
|
||||
fileModifiedAt: assetStub.trashedOffline.fileModifiedAt,
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
|
||||
isOffline: false,
|
||||
originalFileName: 'path.jpg',
|
||||
deletedAt: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not touch fileCreatedAt when un-trashing an asset previously marked as offline', async () => {
|
||||
const mockAssetJob: ILibraryAssetJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/'],
|
||||
exclusionPatterns: [],
|
||||
it('should do nothing with offline asset if covered by exclusion pattern', async () => {
|
||||
const mockAssetJob: ILibraryBulkIdsJob = {
|
||||
assetIds: [assetStub.trashedOffline.id],
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
importPaths: ['/original/'],
|
||||
exclusionPatterns: ['**/path.jpg'],
|
||||
totalAssets: 1,
|
||||
progressCounter: 0,
|
||||
};
|
||||
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.trashedOffline);
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
|
||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
||||
|
||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
|
||||
|
||||
expect(mocks.job.queueAll).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing with offline asset if not in import path', async () => {
|
||||
const mockAssetJob: ILibraryBulkIdsJob = {
|
||||
assetIds: [assetStub.trashedOffline.id],
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
importPaths: ['/import/'],
|
||||
exclusionPatterns: [],
|
||||
totalAssets: 1,
|
||||
progressCounter: 0,
|
||||
};
|
||||
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
|
||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
||||
|
||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
|
||||
|
||||
expect(mocks.job.queueAll).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing with unchanged online assets', async () => {
|
||||
const mockAssetJob: ILibraryBulkIdsJob = {
|
||||
assetIds: [assetStub.external.id],
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
importPaths: ['/'],
|
||||
exclusionPatterns: [],
|
||||
totalAssets: 1,
|
||||
progressCounter: 0,
|
||||
};
|
||||
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
|
||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
|
||||
|
||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not touch fileCreatedAt when un-trashing an asset previously marked as offline', async () => {
|
||||
const mockAssetJob: ILibraryBulkIdsJob = {
|
||||
assetIds: [assetStub.trashedOffline.id],
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
importPaths: ['/'],
|
||||
exclusionPatterns: [],
|
||||
totalAssets: 1,
|
||||
progressCounter: 0,
|
||||
};
|
||||
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.trashedOffline]);
|
||||
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats);
|
||||
|
||||
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith(
|
||||
[assetStub.trashedOffline.id],
|
||||
@ -343,30 +481,41 @@ describe(LibraryService.name, () => {
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should update file when mtime has changed', async () => {
|
||||
const mockAssetJob: ILibraryAssetJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/'],
|
||||
exclusionPatterns: [],
|
||||
};
|
||||
it('should update with online assets that have changed', async () => {
|
||||
const mockAssetJob: ILibraryBulkIdsJob = {
|
||||
assetIds: [assetStub.external.id],
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
importPaths: ['/'],
|
||||
exclusionPatterns: [],
|
||||
totalAssets: 1,
|
||||
progressCounter: 0,
|
||||
};
|
||||
|
||||
const newMTime = new Date();
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.external);
|
||||
mocks.storage.stat.mockResolvedValue({ mtime: newMTime } as Stats);
|
||||
if (assetStub.external.fileModifiedAt == null) {
|
||||
throw new Error('fileModifiedAt is null');
|
||||
}
|
||||
|
||||
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
const mtime = new Date(assetStub.external.fileModifiedAt.getDate() + 1);
|
||||
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
|
||||
fileModifiedAt: newMTime,
|
||||
isOffline: false,
|
||||
originalFileName: 'photo.jpg',
|
||||
deletedAt: null,
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.external]);
|
||||
mocks.storage.stat.mockResolvedValue({ mtime } as Stats);
|
||||
|
||||
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.SIDECAR_DISCOVERY,
|
||||
data: {
|
||||
id: assetStub.external.id,
|
||||
source: 'upload',
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleSyncFile', () => {
|
||||
describe('handleSyncFiles', () => {
|
||||
let mockUser: UserEntity;
|
||||
|
||||
beforeEach(() => {
|
||||
@ -381,187 +530,57 @@ describe(LibraryService.name, () => {
|
||||
|
||||
it('should import a new asset', async () => {
|
||||
const mockLibraryJob: ILibraryFileJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
ownerId: mockUser.id,
|
||||
assetPath: '/data/user1/photo.jpg',
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
paths: ['/data/user1/photo.jpg'],
|
||||
};
|
||||
|
||||
mocks.asset.create.mockResolvedValue(assetStub.image);
|
||||
mocks.asset.createAll.mockResolvedValue([assetStub.image]);
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
|
||||
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.asset.create.mock.calls).toEqual([
|
||||
expect(mocks.asset.createAll.mock.calls).toEqual([
|
||||
[
|
||||
{
|
||||
ownerId: mockUser.id,
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
checksum: expect.any(Buffer),
|
||||
originalPath: '/data/user1/photo.jpg',
|
||||
deviceAssetId: expect.any(String),
|
||||
deviceId: 'Library Import',
|
||||
fileCreatedAt: expect.any(Date),
|
||||
fileModifiedAt: expect.any(Date),
|
||||
localDateTime: expect.any(Date),
|
||||
type: AssetType.IMAGE,
|
||||
originalFileName: 'photo.jpg',
|
||||
isExternal: true,
|
||||
},
|
||||
[
|
||||
expect.objectContaining({
|
||||
ownerId: mockUser.id,
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
originalPath: '/data/user1/photo.jpg',
|
||||
deviceId: 'Library Import',
|
||||
type: AssetType.IMAGE,
|
||||
originalFileName: 'photo.jpg',
|
||||
isExternal: true,
|
||||
}),
|
||||
],
|
||||
],
|
||||
]);
|
||||
|
||||
expect(mocks.job.queue.mock.calls).toEqual([
|
||||
expect(mocks.job.queueAll.mock.calls).toEqual([
|
||||
[
|
||||
{
|
||||
name: JobName.SIDECAR_DISCOVERY,
|
||||
data: {
|
||||
id: assetStub.image.id,
|
||||
source: 'upload',
|
||||
[
|
||||
{
|
||||
name: JobName.SIDECAR_DISCOVERY,
|
||||
data: {
|
||||
id: assetStub.image.id,
|
||||
source: 'upload',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should import a new video', async () => {
|
||||
const mockLibraryJob: ILibraryFileJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
ownerId: mockUser.id,
|
||||
assetPath: '/data/user1/video.mp4',
|
||||
};
|
||||
|
||||
mocks.asset.create.mockResolvedValue(assetStub.video);
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
|
||||
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.asset.create.mock.calls).toEqual([
|
||||
[
|
||||
{
|
||||
ownerId: mockUser.id,
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
checksum: expect.any(Buffer),
|
||||
originalPath: '/data/user1/video.mp4',
|
||||
deviceAssetId: expect.any(String),
|
||||
deviceId: 'Library Import',
|
||||
fileCreatedAt: expect.any(Date),
|
||||
fileModifiedAt: expect.any(Date),
|
||||
localDateTime: expect.any(Date),
|
||||
type: AssetType.VIDEO,
|
||||
originalFileName: 'video.mp4',
|
||||
isExternal: true,
|
||||
},
|
||||
],
|
||||
]);
|
||||
|
||||
expect(mocks.job.queue.mock.calls).toEqual([
|
||||
[
|
||||
{
|
||||
name: JobName.SIDECAR_DISCOVERY,
|
||||
data: {
|
||||
id: assetStub.image.id,
|
||||
source: 'upload',
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not import an asset to a soft deleted library', async () => {
|
||||
const mockLibraryJob: ILibraryFileJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
ownerId: mockUser.id,
|
||||
assetPath: '/data/user1/photo.jpg',
|
||||
libraryId: libraryStub.externalLibrary1.id,
|
||||
paths: ['/data/user1/photo.jpg'],
|
||||
};
|
||||
|
||||
mocks.asset.create.mockResolvedValue(assetStub.image);
|
||||
mocks.library.get.mockResolvedValue({ ...libraryStub.externalLibrary1, deletedAt: new Date() });
|
||||
|
||||
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
|
||||
await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
|
||||
|
||||
expect(mocks.asset.create.mock.calls).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not refresh a file whose mtime matches existing asset', async () => {
|
||||
const mockLibraryJob: ILibraryFileJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
ownerId: mockUser.id,
|
||||
assetPath: assetStub.hasFileExtension.originalPath,
|
||||
};
|
||||
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 100,
|
||||
mtime: assetStub.hasFileExtension.fileModifiedAt,
|
||||
ctime: new Date('2023-01-01'),
|
||||
} as Stats);
|
||||
|
||||
mocks.asset.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.hasFileExtension);
|
||||
|
||||
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
|
||||
|
||||
expect(mocks.job.queue).not.toHaveBeenCalled();
|
||||
expect(mocks.job.queueAll).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip existing asset', async () => {
|
||||
const mockLibraryJob: ILibraryFileJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
ownerId: mockUser.id,
|
||||
assetPath: '/data/user1/photo.jpg',
|
||||
};
|
||||
|
||||
mocks.asset.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
|
||||
|
||||
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
|
||||
});
|
||||
|
||||
it('should not refresh an asset trashed by user', async () => {
|
||||
const mockLibraryJob: ILibraryFileJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
ownerId: mockUser.id,
|
||||
assetPath: assetStub.hasFileExtension.originalPath,
|
||||
};
|
||||
|
||||
mocks.asset.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.trashed);
|
||||
|
||||
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
|
||||
|
||||
expect(mocks.job.queue).not.toHaveBeenCalled();
|
||||
expect(mocks.job.queueAll).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail when the file could not be read', async () => {
|
||||
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
|
||||
|
||||
const mockLibraryJob: ILibraryFileJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
ownerId: userStub.admin.id,
|
||||
assetPath: '/data/user1/photo.jpg',
|
||||
};
|
||||
|
||||
mocks.asset.create.mockResolvedValue(assetStub.image);
|
||||
|
||||
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
|
||||
expect(mocks.library.get).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip if the file could not be found', async () => {
|
||||
const error = new Error('File not found') as any;
|
||||
error.code = 'ENOENT';
|
||||
mocks.storage.stat.mockRejectedValue(error);
|
||||
|
||||
const mockLibraryJob: ILibraryFileJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
ownerId: userStub.admin.id,
|
||||
assetPath: '/data/user1/photo.jpg',
|
||||
};
|
||||
|
||||
mocks.asset.create.mockResolvedValue(assetStub.image);
|
||||
|
||||
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
|
||||
expect(mocks.library.get).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.create).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.createAll.mock.calls).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -641,10 +660,6 @@ describe(LibraryService.name, () => {
|
||||
|
||||
expect(mocks.library.getStatistics).toHaveBeenCalledWith(libraryStub.externalLibrary1.id);
|
||||
});
|
||||
|
||||
it('should throw an error if the library could not be found', async () => {
|
||||
await expect(sut.getStatistics('foo')).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
@ -898,19 +913,13 @@ describe(LibraryService.name, () => {
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.LIBRARY_SYNC_FILE,
|
||||
data: {
|
||||
id: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
assetPath: '/foo/photo.jpg',
|
||||
ownerId: libraryStub.externalLibraryWithImportPaths1.owner.id,
|
||||
},
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.LIBRARY_SYNC_FILES,
|
||||
data: {
|
||||
libraryId: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
paths: ['/foo/photo.jpg'],
|
||||
},
|
||||
]);
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{ name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle a file change event', async () => {
|
||||
@ -923,19 +932,13 @@ describe(LibraryService.name, () => {
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.LIBRARY_SYNC_FILE,
|
||||
data: {
|
||||
id: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
assetPath: '/foo/photo.jpg',
|
||||
ownerId: libraryStub.externalLibraryWithImportPaths1.owner.id,
|
||||
},
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.LIBRARY_SYNC_FILES,
|
||||
data: {
|
||||
libraryId: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
paths: ['/foo/photo.jpg'],
|
||||
},
|
||||
]);
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{ name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle a file unlink event', async () => {
|
||||
@ -943,14 +946,18 @@ describe(LibraryService.name, () => {
|
||||
mocks.library.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
mocks.asset.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
|
||||
mocks.storage.watch.mockImplementation(
|
||||
makeMockWatcher({ items: [{ event: 'unlink', value: '/foo/photo.jpg' }] }),
|
||||
makeMockWatcher({ items: [{ event: 'unlink', value: assetStub.image.originalPath }] }),
|
||||
);
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{ name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) },
|
||||
]);
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.LIBRARY_ASSET_REMOVAL,
|
||||
data: {
|
||||
libraryId: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
paths: [assetStub.image.originalPath],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle an error event', async () => {
|
||||
@ -966,10 +973,10 @@ describe(LibraryService.name, () => {
|
||||
await expect(sut.watchAll()).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it('should ignore unknown extensions', async () => {
|
||||
it('should not import a file with unknown extension', async () => {
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
mocks.library.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
|
||||
mocks.storage.watch.mockImplementation(makeMockWatcher({ items: [{ event: 'add', value: '/foo/photo.jpg' }] }));
|
||||
mocks.storage.watch.mockImplementation(makeMockWatcher({ items: [{ event: 'add', value: '/foo/photo.xyz' }] }));
|
||||
|
||||
await sut.watchAll();
|
||||
|
||||
@ -1100,27 +1107,6 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueAssetOfflineCheck', () => {
|
||||
it('should queue removal jobs', async () => {
|
||||
mocks.library.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.image1], hasNextPage: false });
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.image1);
|
||||
|
||||
await expect(sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.LIBRARY_SYNC_ASSET,
|
||||
data: {
|
||||
id: assetStub.image1.id,
|
||||
importPaths: libraryStub.externalLibrary1.importPaths,
|
||||
exclusionPatterns: libraryStub.externalLibrary1.exclusionPatterns,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validate', () => {
|
||||
it('should not require import paths', async () => {
|
||||
await expect(sut.validate('library-id', {})).resolves.toEqual({ importPaths: [] });
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import { R_OK } from 'node:constants';
|
||||
import { Stats } from 'node:fs';
|
||||
import path, { basename, isAbsolute, parse } from 'node:path';
|
||||
import picomatch from 'picomatch';
|
||||
import { JOBS_LIBRARY_PAGINATION_SIZE } from 'src/constants';
|
||||
@ -16,9 +17,9 @@ import {
|
||||
ValidateLibraryResponseDto,
|
||||
} from 'src/dtos/library.dto';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { LibraryEntity } from 'src/entities/library.entity';
|
||||
import { AssetType, DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName } from 'src/enum';
|
||||
import { AssetStatus, AssetType, DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { AssetSyncResult } from 'src/repositories/library.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobOf } from 'src/types';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
@ -98,6 +99,26 @@ export class LibraryService extends BaseService {
|
||||
let _resolve: () => void;
|
||||
const ready$ = new Promise<void>((resolve) => (_resolve = resolve));
|
||||
|
||||
const handler = async (event: string, path: string) => {
|
||||
if (matcher(path)) {
|
||||
this.logger.debug(`File ${event} event received for ${path} in library ${library.id}}`);
|
||||
await this.jobRepository.queue({
|
||||
name: JobName.LIBRARY_SYNC_FILES,
|
||||
data: { libraryId: library.id, paths: [path] },
|
||||
});
|
||||
} else {
|
||||
this.logger.verbose(`Ignoring file ${event} event for ${path} in library ${library.id}`);
|
||||
}
|
||||
};
|
||||
|
||||
const deletionHandler = async (path: string) => {
|
||||
this.logger.debug(`File unlink event received for ${path} in library ${library.id}}`);
|
||||
await this.jobRepository.queue({
|
||||
name: JobName.LIBRARY_ASSET_REMOVAL,
|
||||
data: { libraryId: library.id, paths: [path] },
|
||||
});
|
||||
};
|
||||
|
||||
this.watchers[id] = this.storageRepository.watch(
|
||||
library.importPaths,
|
||||
{
|
||||
@ -107,43 +128,13 @@ export class LibraryService extends BaseService {
|
||||
{
|
||||
onReady: () => _resolve(),
|
||||
onAdd: (path) => {
|
||||
const handler = async () => {
|
||||
this.logger.debug(`File add event received for ${path} in library ${library.id}}`);
|
||||
if (matcher(path)) {
|
||||
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
|
||||
if (asset) {
|
||||
await this.syncAssets(library, [asset.id]);
|
||||
}
|
||||
if (matcher(path)) {
|
||||
await this.syncFiles(library, [path]);
|
||||
}
|
||||
}
|
||||
};
|
||||
return handlePromiseError(handler(), this.logger);
|
||||
return handlePromiseError(handler('add', path), this.logger);
|
||||
},
|
||||
onChange: (path) => {
|
||||
const handler = async () => {
|
||||
this.logger.debug(`Detected file change for ${path} in library ${library.id}`);
|
||||
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
|
||||
if (asset) {
|
||||
await this.syncAssets(library, [asset.id]);
|
||||
}
|
||||
if (matcher(path)) {
|
||||
// Note: if the changed file was not previously imported, it will be imported now.
|
||||
await this.syncFiles(library, [path]);
|
||||
}
|
||||
};
|
||||
return handlePromiseError(handler(), this.logger);
|
||||
return handlePromiseError(handler('change', path), this.logger);
|
||||
},
|
||||
onUnlink: (path) => {
|
||||
const handler = async () => {
|
||||
this.logger.debug(`Detected deleted file at ${path} in library ${library.id}`);
|
||||
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
|
||||
if (asset) {
|
||||
await this.syncAssets(library, [asset.id]);
|
||||
}
|
||||
};
|
||||
return handlePromiseError(handler(), this.logger);
|
||||
return handlePromiseError(deletionHandler(path), this.logger);
|
||||
},
|
||||
onError: (error) => {
|
||||
this.logger.error(`Library watcher for library ${library.id} encountered error: ${error}`);
|
||||
@ -234,26 +225,38 @@ export class LibraryService extends BaseService {
|
||||
return mapLibrary(library);
|
||||
}
|
||||
|
||||
private async syncFiles({ id, ownerId }: LibraryEntity, assetPaths: string[]) {
|
||||
await this.jobRepository.queueAll(
|
||||
assetPaths.map((assetPath) => ({
|
||||
name: JobName.LIBRARY_SYNC_FILE,
|
||||
data: {
|
||||
id,
|
||||
assetPath,
|
||||
ownerId,
|
||||
},
|
||||
})),
|
||||
);
|
||||
}
|
||||
@OnJob({ name: JobName.LIBRARY_SYNC_FILES, queue: QueueName.LIBRARY })
|
||||
async handleSyncFiles(job: JobOf<JobName.LIBRARY_SYNC_FILES>): Promise<JobStatus> {
|
||||
const library = await this.libraryRepository.get(job.libraryId);
|
||||
// We need to check if the library still exists as it could have been deleted after the scan was queued
|
||||
if (!library) {
|
||||
this.logger.debug(`Library ${job.libraryId} not found, skipping file import`);
|
||||
return JobStatus.FAILED;
|
||||
} else if (library.deletedAt) {
|
||||
this.logger.debug(`Library ${job.libraryId} is deleted, won't import assets into it`);
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
private async syncAssets({ importPaths, exclusionPatterns }: LibraryEntity, assetIds: string[]) {
|
||||
await this.jobRepository.queueAll(
|
||||
assetIds.map((assetId) => ({
|
||||
name: JobName.LIBRARY_SYNC_ASSET,
|
||||
data: { id: assetId, importPaths, exclusionPatterns },
|
||||
})),
|
||||
);
|
||||
const assetImports = job.paths.map((assetPath) => this.processEntity(assetPath, library.ownerId, job.libraryId));
|
||||
|
||||
const assetIds: string[] = [];
|
||||
|
||||
for (let i = 0; i < assetImports.length; i += 5000) {
|
||||
// Chunk the imports to avoid the postgres limit of max parameters at once
|
||||
const chunk = assetImports.slice(i, i + 5000);
|
||||
await this.assetRepository.createAll(chunk).then((assets) => assetIds.push(...assets.map((asset) => asset.id)));
|
||||
}
|
||||
|
||||
const progressMessage =
|
||||
job.progressCounter && job.totalAssets
|
||||
? `(${job.progressCounter} of ${job.totalAssets})`
|
||||
: `(${job.progressCounter} done so far)`;
|
||||
|
||||
this.logger.log(`Imported ${assetIds.length} ${progressMessage} file(s) into library ${job.libraryId}`);
|
||||
|
||||
await this.queuePostSyncJobs(assetIds);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
private async validateImportPath(importPath: string): Promise<ValidateLibraryImportPathResponseDto> {
|
||||
@ -336,6 +339,8 @@ export class LibraryService extends BaseService {
|
||||
async handleDeleteLibrary(job: JobOf<JobName.LIBRARY_DELETE>): Promise<JobStatus> {
|
||||
const libraryId = job.id;
|
||||
|
||||
await this.assetRepository.updateByLibraryId(libraryId, { deletedAt: new Date() });
|
||||
|
||||
const assetPagination = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getAll(pagination, { libraryId, withDeleted: true }),
|
||||
);
|
||||
@ -367,84 +372,52 @@ export class LibraryService extends BaseService {
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.LIBRARY_SYNC_FILE, queue: QueueName.LIBRARY })
|
||||
async handleSyncFile(job: JobOf<JobName.LIBRARY_SYNC_FILE>): Promise<JobStatus> {
|
||||
// Only needs to handle new assets
|
||||
const assetPath = path.normalize(job.assetPath);
|
||||
private processEntity(filePath: string, ownerId: string, libraryId: string) {
|
||||
const assetPath = path.normalize(filePath);
|
||||
|
||||
let asset = await this.assetRepository.getByLibraryIdAndOriginalPath(job.id, assetPath);
|
||||
if (asset) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
let stat;
|
||||
try {
|
||||
stat = await this.storageRepository.stat(assetPath);
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
this.logger.error(`File not found: ${assetPath}`);
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
this.logger.error(`Error reading file: ${assetPath}. Error: ${error}`);
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
this.logger.log(`Importing new library asset: ${assetPath}`);
|
||||
|
||||
const library = await this.libraryRepository.get(job.id, true);
|
||||
if (!library || library.deletedAt) {
|
||||
this.logger.error('Cannot import asset into deleted library');
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
// TODO: device asset id is deprecated, remove it
|
||||
const deviceAssetId = `${basename(assetPath)}`.replaceAll(/\s+/g, '');
|
||||
|
||||
const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`);
|
||||
|
||||
const assetType = mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE;
|
||||
|
||||
const mtime = stat.mtime;
|
||||
|
||||
asset = await this.assetRepository.create({
|
||||
ownerId: job.ownerId,
|
||||
libraryId: job.id,
|
||||
checksum: pathHash,
|
||||
return {
|
||||
ownerId,
|
||||
libraryId,
|
||||
checksum: this.cryptoRepository.hashSha1(`path:${assetPath}`),
|
||||
originalPath: assetPath,
|
||||
deviceAssetId,
|
||||
|
||||
fileCreatedAt: null,
|
||||
fileModifiedAt: null,
|
||||
localDateTime: null,
|
||||
// TODO: device asset id is deprecated, remove it
|
||||
deviceAssetId: `${basename(assetPath)}`.replaceAll(/\s+/g, ''),
|
||||
deviceId: 'Library Import',
|
||||
fileCreatedAt: mtime,
|
||||
fileModifiedAt: mtime,
|
||||
localDateTime: mtime,
|
||||
type: assetType,
|
||||
type: mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE,
|
||||
originalFileName: parse(assetPath).base,
|
||||
isExternal: true,
|
||||
});
|
||||
|
||||
await this.queuePostSyncJobs(asset);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
livePhotoVideoId: null,
|
||||
};
|
||||
}
|
||||
|
||||
async queuePostSyncJobs(asset: AssetEntity) {
|
||||
this.logger.debug(`Queueing metadata extraction for: ${asset.originalPath}`);
|
||||
async queuePostSyncJobs(assetIds: string[]) {
|
||||
this.logger.debug(`Queuing sidecar discovery for ${assetIds.length} asset(s)`);
|
||||
|
||||
// We queue a sidecar discovery which, in turn, queues metadata extraction
|
||||
await this.jobRepository.queue({
|
||||
name: JobName.SIDECAR_DISCOVERY,
|
||||
data: { id: asset.id, source: 'upload' },
|
||||
});
|
||||
await this.jobRepository.queueAll(
|
||||
assetIds.map((assetId) => ({
|
||||
name: JobName.SIDECAR_DISCOVERY,
|
||||
data: { id: assetId, source: 'upload' },
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
async queueScan(id: string) {
|
||||
await this.findOrFail(id);
|
||||
|
||||
this.logger.log(`Starting to scan library ${id}`);
|
||||
|
||||
await this.jobRepository.queue({
|
||||
name: JobName.LIBRARY_QUEUE_SYNC_FILES,
|
||||
data: {
|
||||
id,
|
||||
},
|
||||
});
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, data: { id } });
|
||||
}
|
||||
|
||||
@ -454,11 +427,12 @@ export class LibraryService extends BaseService {
|
||||
|
||||
@OnJob({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, queue: QueueName.LIBRARY })
|
||||
async handleQueueScanAll(): Promise<JobStatus> {
|
||||
this.logger.log(`Refreshing all external libraries`);
|
||||
this.logger.log(`Initiating scan of all external libraries...`);
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_CLEANUP, data: {} });
|
||||
|
||||
const libraries = await this.libraryRepository.getAll(true);
|
||||
|
||||
await this.jobRepository.queueAll(
|
||||
libraries.map((library) => ({
|
||||
name: JobName.LIBRARY_QUEUE_SYNC_FILES,
|
||||
@ -475,64 +449,141 @@ export class LibraryService extends BaseService {
|
||||
},
|
||||
})),
|
||||
);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.LIBRARY_SYNC_ASSET, queue: QueueName.LIBRARY })
|
||||
async handleSyncAsset(job: JobOf<JobName.LIBRARY_SYNC_ASSET>): Promise<JobStatus> {
|
||||
const asset = await this.assetRepository.getById(job.id);
|
||||
if (!asset) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
@OnJob({ name: JobName.LIBRARY_SYNC_ASSETS, queue: QueueName.LIBRARY })
|
||||
async handleSyncAssets(job: JobOf<JobName.LIBRARY_SYNC_ASSETS>): Promise<JobStatus> {
|
||||
const assets = await this.assetRepository.getByIds(job.assetIds);
|
||||
|
||||
const markOffline = async (explanation: string) => {
|
||||
if (!asset.isOffline) {
|
||||
this.logger.debug(`${explanation}, removing: ${asset.originalPath}`);
|
||||
await this.assetRepository.updateAll([asset.id], { isOffline: true, deletedAt: new Date() });
|
||||
const assetIdsToOffline: string[] = [];
|
||||
const trashedAssetIdsToOffline: string[] = [];
|
||||
const assetIdsToOnline: string[] = [];
|
||||
const trashedAssetIdsToOnline: string[] = [];
|
||||
const assetIdsToUpdate: string[] = [];
|
||||
|
||||
this.logger.debug(`Checking batch of ${assets.length} existing asset(s) in library ${job.libraryId}`);
|
||||
|
||||
const stats = await Promise.all(
|
||||
assets.map((asset) => this.storageRepository.stat(asset.originalPath).catch(() => null)),
|
||||
);
|
||||
|
||||
for (let i = 0; i < assets.length; i++) {
|
||||
const asset = assets[i];
|
||||
const stat = stats[i];
|
||||
const action = this.checkExistingAsset(asset, stat);
|
||||
switch (action) {
|
||||
case AssetSyncResult.OFFLINE: {
|
||||
if (asset.status === AssetStatus.TRASHED) {
|
||||
trashedAssetIdsToOffline.push(asset.id);
|
||||
} else {
|
||||
assetIdsToOffline.push(asset.id);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case AssetSyncResult.UPDATE: {
|
||||
assetIdsToUpdate.push(asset.id);
|
||||
break;
|
||||
}
|
||||
case AssetSyncResult.CHECK_OFFLINE: {
|
||||
const isInImportPath = job.importPaths.find((path) => asset.originalPath.startsWith(path));
|
||||
|
||||
if (!isInImportPath) {
|
||||
this.logger.verbose(
|
||||
`Offline asset ${asset.originalPath} is still not in any import path, keeping offline in library ${job.libraryId}`,
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
const isExcluded = job.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern));
|
||||
|
||||
if (!isExcluded) {
|
||||
this.logger.debug(`Offline asset ${asset.originalPath} is now online in library ${job.libraryId}`);
|
||||
if (asset.status === AssetStatus.TRASHED) {
|
||||
trashedAssetIdsToOnline.push(asset.id);
|
||||
} else {
|
||||
assetIdsToOnline.push(asset.id);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
this.logger.verbose(
|
||||
`Offline asset ${asset.originalPath} is in an import path but still covered by exclusion pattern, keeping offline in library ${job.libraryId}`,
|
||||
);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const isInPath = job.importPaths.find((path) => asset.originalPath.startsWith(path));
|
||||
if (!isInPath) {
|
||||
await markOffline('Asset is no longer in an import path');
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
const isExcluded = job.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern));
|
||||
if (isExcluded) {
|
||||
await markOffline('Asset is covered by an exclusion pattern');
|
||||
return JobStatus.SUCCESS;
|
||||
const promises = [];
|
||||
if (assetIdsToOffline.length > 0) {
|
||||
promises.push(this.assetRepository.updateAll(assetIdsToOffline, { isOffline: true, deletedAt: new Date() }));
|
||||
}
|
||||
|
||||
let stat;
|
||||
try {
|
||||
stat = await this.storageRepository.stat(asset.originalPath);
|
||||
} catch {
|
||||
await markOffline('Asset is no longer on disk or is inaccessible because of permissions');
|
||||
return JobStatus.SUCCESS;
|
||||
if (trashedAssetIdsToOffline.length > 0) {
|
||||
promises.push(this.assetRepository.updateAll(trashedAssetIdsToOffline, { isOffline: true }));
|
||||
}
|
||||
|
||||
const mtime = stat.mtime;
|
||||
const isAssetModified = !asset.fileModifiedAt || mtime.toISOString() !== asset.fileModifiedAt.toISOString();
|
||||
|
||||
if (asset.isOffline || isAssetModified) {
|
||||
this.logger.debug(`Asset was offline or modified, updating asset record ${asset.originalPath}`);
|
||||
//TODO: When we have asset status, we need to leave deletedAt as is when status is trashed
|
||||
await this.assetRepository.updateAll([asset.id], {
|
||||
isOffline: false,
|
||||
deletedAt: null,
|
||||
fileModifiedAt: mtime,
|
||||
originalFileName: parse(asset.originalPath).base,
|
||||
});
|
||||
if (assetIdsToOnline.length > 0) {
|
||||
promises.push(this.assetRepository.updateAll(assetIdsToOnline, { isOffline: false, deletedAt: null }));
|
||||
}
|
||||
|
||||
if (isAssetModified) {
|
||||
this.logger.debug(`Asset was modified, queuing metadata extraction for: ${asset.originalPath}`);
|
||||
await this.queuePostSyncJobs(asset);
|
||||
if (trashedAssetIdsToOnline.length > 0) {
|
||||
promises.push(this.assetRepository.updateAll(trashedAssetIdsToOnline, { isOffline: false }));
|
||||
}
|
||||
|
||||
if (assetIdsToUpdate.length > 0) {
|
||||
promises.push(this.queuePostSyncJobs(assetIdsToUpdate));
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
const remainingCount = assets.length - assetIdsToOffline.length - assetIdsToUpdate.length - assetIdsToOnline.length;
|
||||
const cumulativePercentage = ((100 * job.progressCounter) / job.totalAssets).toFixed(1);
|
||||
this.logger.log(
|
||||
`Checked existing asset(s): ${assetIdsToOffline.length + trashedAssetIdsToOffline.length} offlined, ${assetIdsToOnline.length + trashedAssetIdsToOnline.length} onlined, ${assetIdsToUpdate.length} updated, ${remainingCount} unchanged of current batch of ${assets.length} (Total progress: ${job.progressCounter} of ${job.totalAssets}, ${cumulativePercentage} %) in library ${job.libraryId}.`,
|
||||
);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
private checkExistingAsset(asset: AssetEntity, stat: Stats | null): AssetSyncResult {
|
||||
if (!stat) {
|
||||
// File not found on disk or permission error
|
||||
if (asset.isOffline) {
|
||||
this.logger.verbose(
|
||||
`Asset ${asset.originalPath} is still not accessible, keeping offline in library ${asset.libraryId}`,
|
||||
);
|
||||
return AssetSyncResult.DO_NOTHING;
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`Asset ${asset.originalPath} is no longer on disk or is inaccessible because of permissions, marking offline in library ${asset.libraryId}`,
|
||||
);
|
||||
return AssetSyncResult.OFFLINE;
|
||||
}
|
||||
|
||||
if (asset.isOffline && asset.status !== AssetStatus.DELETED) {
|
||||
// Only perform the expensive check if the asset is offline
|
||||
return AssetSyncResult.CHECK_OFFLINE;
|
||||
}
|
||||
|
||||
if (
|
||||
!asset.fileCreatedAt ||
|
||||
!asset.localDateTime ||
|
||||
!asset.fileModifiedAt ||
|
||||
stat.mtime.valueOf() !== asset.fileModifiedAt.valueOf()
|
||||
) {
|
||||
this.logger.verbose(`Asset ${asset.originalPath} needs metadata extraction in library ${asset.libraryId}`);
|
||||
|
||||
return AssetSyncResult.UPDATE;
|
||||
}
|
||||
|
||||
return AssetSyncResult.DO_NOTHING;
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_FILES, queue: QueueName.LIBRARY })
|
||||
async handleQueueSyncFiles(job: JobOf<JobName.LIBRARY_QUEUE_SYNC_FILES>): Promise<JobStatus> {
|
||||
const library = await this.libraryRepository.get(job.id);
|
||||
@ -541,7 +592,7 @@ export class LibraryService extends BaseService {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
this.logger.log(`Refreshing library ${library.id} for new assets`);
|
||||
this.logger.debug(`Validating import paths for library ${library.id}...`);
|
||||
|
||||
const validImportPaths: string[] = [];
|
||||
|
||||
@ -556,35 +607,67 @@ export class LibraryService extends BaseService {
|
||||
|
||||
if (validImportPaths.length === 0) {
|
||||
this.logger.warn(`No valid import paths found for library ${library.id}`);
|
||||
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const assetsOnDisk = this.storageRepository.walk({
|
||||
const pathsOnDisk = this.storageRepository.walk({
|
||||
pathsToCrawl: validImportPaths,
|
||||
includeHidden: false,
|
||||
exclusionPatterns: library.exclusionPatterns,
|
||||
take: JOBS_LIBRARY_PAGINATION_SIZE,
|
||||
});
|
||||
|
||||
let count = 0;
|
||||
let importCount = 0;
|
||||
let crawlCount = 0;
|
||||
|
||||
for await (const assetBatch of assetsOnDisk) {
|
||||
count += assetBatch.length;
|
||||
this.logger.debug(`Discovered ${count} asset(s) on disk for library ${library.id}...`);
|
||||
await this.syncFiles(library, assetBatch);
|
||||
this.logger.verbose(`Queued scan of ${assetBatch.length} crawled asset(s) in library ${library.id}...`);
|
||||
this.logger.log(`Starting disk crawl of ${validImportPaths.length} import path(s) for library ${library.id}...`);
|
||||
|
||||
for await (const pathBatch of pathsOnDisk) {
|
||||
crawlCount += pathBatch.length;
|
||||
const paths = await this.assetRepository.filterNewExternalAssetPaths(library.id, pathBatch);
|
||||
|
||||
if (paths.length > 0) {
|
||||
importCount += paths.length;
|
||||
|
||||
await this.jobRepository.queue({
|
||||
name: JobName.LIBRARY_SYNC_FILES,
|
||||
data: {
|
||||
libraryId: library.id,
|
||||
paths,
|
||||
progressCounter: crawlCount,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
this.logger.log(
|
||||
`Crawled ${crawlCount} file(s) so far: ${paths.length} of current batch of ${pathBatch.length} will be imported to library ${library.id}...`,
|
||||
);
|
||||
}
|
||||
|
||||
if (count > 0) {
|
||||
this.logger.debug(`Finished queueing scan of ${count} assets on disk for library ${library.id}`);
|
||||
} else if (validImportPaths.length > 0) {
|
||||
this.logger.debug(`No non-excluded assets found in any import path for library ${library.id}`);
|
||||
}
|
||||
this.logger.log(
|
||||
`Finished disk crawl, ${crawlCount} file(s) found on disk and queued ${importCount} file(s) for import into ${library.id}`,
|
||||
);
|
||||
|
||||
await this.libraryRepository.update(job.id, { refreshedAt: new Date() });
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.LIBRARY_ASSET_REMOVAL, queue: QueueName.LIBRARY })
|
||||
async handleAssetRemoval(job: JobOf<JobName.LIBRARY_ASSET_REMOVAL>): Promise<JobStatus> {
|
||||
// This is only for handling file unlink events via the file watcher
|
||||
this.logger.verbose(`Deleting asset(s) ${job.paths} from library ${job.libraryId}`);
|
||||
for (const assetPath of job.paths) {
|
||||
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(job.libraryId, assetPath);
|
||||
if (asset) {
|
||||
await this.assetRepository.remove(asset);
|
||||
}
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, queue: QueueName.LIBRARY })
|
||||
async handleQueueSyncAssets(job: JobOf<JobName.LIBRARY_QUEUE_SYNC_ASSETS>): Promise<JobStatus> {
|
||||
const library = await this.libraryRepository.get(job.id);
|
||||
@ -592,27 +675,68 @@ export class LibraryService extends BaseService {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
this.logger.log(`Scanning library ${library.id} for removed assets`);
|
||||
const assetCount = await this.assetRepository.getLibraryAssetCount(job.id);
|
||||
|
||||
const onlineAssets = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getAll(pagination, { libraryId: job.id, withDeleted: true }),
|
||||
);
|
||||
|
||||
let assetCount = 0;
|
||||
for await (const assets of onlineAssets) {
|
||||
assetCount += assets.length;
|
||||
this.logger.debug(`Discovered ${assetCount} asset(s) in library ${library.id}...`);
|
||||
await this.jobRepository.queueAll(
|
||||
assets.map((asset) => ({
|
||||
name: JobName.LIBRARY_SYNC_ASSET,
|
||||
data: { id: asset.id, importPaths: library.importPaths, exclusionPatterns: library.exclusionPatterns },
|
||||
})),
|
||||
);
|
||||
this.logger.debug(`Queued check of ${assets.length} asset(s) in library ${library.id}...`);
|
||||
if (!assetCount) {
|
||||
this.logger.log(`Library ${library.id} is empty, no need to check assets`);
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
if (assetCount) {
|
||||
this.logger.log(`Finished queueing check of ${assetCount} assets for library ${library.id}`);
|
||||
this.logger.log(
|
||||
`Checking ${assetCount} asset(s) against import paths and exclusion patterns in library ${library.id}...`,
|
||||
);
|
||||
|
||||
const offlineResult = await this.assetRepository.detectOfflineExternalAssets(
|
||||
library.id,
|
||||
library.importPaths,
|
||||
library.exclusionPatterns,
|
||||
);
|
||||
|
||||
const affectedAssetCount = Number(offlineResult.numUpdatedRows);
|
||||
|
||||
this.logger.log(
|
||||
`${affectedAssetCount} asset(s) out of ${assetCount} were offlined due to import paths and/or exclusion pattern(s) in library ${library.id}`,
|
||||
);
|
||||
|
||||
if (affectedAssetCount === assetCount) {
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
this.logger.log(`Scanning library ${library.id} for assets missing from disk...`);
|
||||
|
||||
const existingAssets = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getAllInLibrary(pagination, job.id),
|
||||
);
|
||||
|
||||
let currentAssetCount = 0;
|
||||
for await (const assets of existingAssets) {
|
||||
if (assets.length === 0) {
|
||||
throw new BadRequestException(`Failed to get assets for library ${job.id}`);
|
||||
}
|
||||
|
||||
currentAssetCount += assets.length;
|
||||
|
||||
await this.jobRepository.queue({
|
||||
name: JobName.LIBRARY_SYNC_ASSETS,
|
||||
data: {
|
||||
libraryId: library.id,
|
||||
importPaths: library.importPaths,
|
||||
exclusionPatterns: library.exclusionPatterns,
|
||||
assetIds: assets.map(({ id }) => id),
|
||||
progressCounter: currentAssetCount,
|
||||
totalAssets: assetCount,
|
||||
},
|
||||
});
|
||||
|
||||
const completePercentage = ((100 * currentAssetCount) / assetCount).toFixed(1);
|
||||
|
||||
this.logger.log(
|
||||
`Queued check of ${currentAssetCount} of ${assetCount} (${completePercentage} %) existing asset(s) so far in library ${library.id}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (currentAssetCount) {
|
||||
this.logger.log(`Finished queuing ${currentAssetCount} asset check(s) for library ${library.id}`);
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
|
@ -662,10 +662,10 @@ export class MetadataService extends BaseService {
|
||||
let dateTimeOriginal = dateTime?.toDate();
|
||||
let localDateTime = dateTime?.toDateTime().setZone('UTC', { keepLocalTime: true }).toJSDate();
|
||||
if (!localDateTime || !dateTimeOriginal) {
|
||||
this.logger.debug(
|
||||
`No exif date time found, falling back on earliest of file creation and modification for assset ${asset.id}: ${asset.originalPath}`,
|
||||
);
|
||||
const earliestDate = this.earliestDate(asset.fileModifiedAt, asset.fileCreatedAt);
|
||||
this.logger.debug(
|
||||
`No exif date time found, falling back on ${earliestDate.toISOString()}, earliest of file creation and modification for assset ${asset.id}: ${asset.originalPath}`,
|
||||
);
|
||||
dateTimeOriginal = earliestDate;
|
||||
localDateTime = earliestDate;
|
||||
}
|
||||
|
@ -208,17 +208,23 @@ export interface IAssetDeleteJob extends IEntityJob {
|
||||
deleteOnDisk: boolean;
|
||||
}
|
||||
|
||||
export interface ILibraryFileJob extends IEntityJob {
|
||||
ownerId: string;
|
||||
assetPath: string;
|
||||
export interface ILibraryFileJob {
|
||||
libraryId: string;
|
||||
paths: string[];
|
||||
progressCounter?: number;
|
||||
totalAssets?: number;
|
||||
}
|
||||
|
||||
export interface ILibraryAssetJob extends IEntityJob {
|
||||
export interface ILibraryBulkIdsJob {
|
||||
libraryId: string;
|
||||
importPaths: string[];
|
||||
exclusionPatterns: string[];
|
||||
assetIds: string[];
|
||||
progressCounter: number;
|
||||
totalAssets: number;
|
||||
}
|
||||
|
||||
export interface IBulkEntityJob extends IBaseJob {
|
||||
export interface IBulkEntityJob {
|
||||
ids: string[];
|
||||
}
|
||||
|
||||
@ -354,10 +360,11 @@ export type JobItem =
|
||||
| { name: JobName.ASSET_DELETION_CHECK; data?: IBaseJob }
|
||||
|
||||
// Library Management
|
||||
| { name: JobName.LIBRARY_SYNC_FILE; data: ILibraryFileJob }
|
||||
| { name: JobName.LIBRARY_SYNC_FILES; data: ILibraryFileJob }
|
||||
| { name: JobName.LIBRARY_QUEUE_SYNC_FILES; data: IEntityJob }
|
||||
| { name: JobName.LIBRARY_QUEUE_SYNC_ASSETS; data: IEntityJob }
|
||||
| { name: JobName.LIBRARY_SYNC_ASSET; data: ILibraryAssetJob }
|
||||
| { name: JobName.LIBRARY_SYNC_ASSETS; data: ILibraryBulkIdsJob }
|
||||
| { name: JobName.LIBRARY_ASSET_REMOVAL; data: ILibraryFileJob }
|
||||
| { name: JobName.LIBRARY_DELETE; data: IEntityJob }
|
||||
| { name: JobName.LIBRARY_QUEUE_SCAN_ALL; data?: IBaseJob }
|
||||
| { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob }
|
||||
|
@ -44,6 +44,8 @@ export const anyUuid = (ids: string[]) => sql<string>`any(${`{${ids}}`}::uuid[])
|
||||
|
||||
export const asVector = (embedding: number[]) => sql<string>`${`[${embedding}]`}::vector`;
|
||||
|
||||
export const unnest = (array: string[]) => sql<Record<string, string>>`unnest(array[${sql.join(array)}]::text[])`;
|
||||
|
||||
/**
|
||||
* Mainly for type debugging to make VS Code display a more useful tooltip.
|
||||
* Source: https://stackoverflow.com/a/69288824
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { getKeysDeep, unsetDeep } from 'src/utils/misc';
|
||||
import { getKeysDeep, globToSqlPattern, unsetDeep } from 'src/utils/misc';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
describe('getKeysDeep', () => {
|
||||
@ -51,3 +51,19 @@ describe('unsetDeep', () => {
|
||||
expect(unsetDeep({ foo: 'bar', nested: { enabled: true } }, 'nested.enabled')).toEqual({ foo: 'bar' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('globToSqlPattern', () => {
|
||||
const testCases = [
|
||||
['**/Raw/**', '%/Raw/%'],
|
||||
['**/abc/*.tif', '%/abc/%.tif'],
|
||||
['**/*.tif', '%/%.tif'],
|
||||
['**/*.jp?', '%/%.jp_'],
|
||||
['**/@eaDir/**', '%/@eaDir/%'],
|
||||
['**/._*', `%/._%`],
|
||||
['/absolute/path/**', `/absolute/path/%`],
|
||||
];
|
||||
|
||||
it.each(testCases)('should convert %s to %s', (input, expected) => {
|
||||
expect(globToSqlPattern(input)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
@ -10,6 +10,8 @@ import { ReferenceObject, SchemaObject } from '@nestjs/swagger/dist/interfaces/o
|
||||
import _ from 'lodash';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import picomatch from 'picomatch';
|
||||
import parse from 'picomatch/lib/parse';
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { CLIP_MODEL_INFO, serverVersion } from 'src/constants';
|
||||
import { extraSyncModels } from 'src/dtos/sync.dto';
|
||||
@ -268,3 +270,35 @@ export const useSwagger = (app: INestApplication, { write }: { write: boolean })
|
||||
writeFileSync(outputPath, JSON.stringify(patchOpenAPI(specification), null, 2), { encoding: 'utf8' });
|
||||
}
|
||||
};
|
||||
|
||||
const convertTokenToSqlPattern = (token: parse.Token): string => {
|
||||
switch (token.type) {
|
||||
case 'slash': {
|
||||
return '/';
|
||||
}
|
||||
case 'text': {
|
||||
return token.value;
|
||||
}
|
||||
case 'globstar':
|
||||
case 'star': {
|
||||
return '%';
|
||||
}
|
||||
case 'underscore': {
|
||||
return String.raw`\_`;
|
||||
}
|
||||
case 'qmark': {
|
||||
return '_';
|
||||
}
|
||||
case 'dot': {
|
||||
return '.';
|
||||
}
|
||||
default: {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const globToSqlPattern = (glob: string) => {
|
||||
const tokens = picomatch.parse(glob).tokens;
|
||||
return tokens.map((token) => convertTokenToSqlPattern(token)).join('');
|
||||
};
|
||||
|
1
server/test/fixtures/asset.stub.ts
vendored
1
server/test/fixtures/asset.stub.ts
vendored
@ -296,6 +296,7 @@ export const assetStub = {
|
||||
isFavorite: false,
|
||||
isArchived: false,
|
||||
duration: null,
|
||||
libraryId: 'library-id',
|
||||
isVisible: true,
|
||||
isExternal: false,
|
||||
livePhotoVideo: null,
|
||||
|
@ -5,6 +5,7 @@ import { Mocked, vitest } from 'vitest';
|
||||
export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetRepository>> => {
|
||||
return {
|
||||
create: vitest.fn(),
|
||||
createAll: vitest.fn(),
|
||||
upsertExif: vitest.fn(),
|
||||
upsertJobStatus: vitest.fn(),
|
||||
getByDayOfYear: vitest.fn(),
|
||||
@ -23,6 +24,8 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
|
||||
getAll: vitest.fn().mockResolvedValue({ items: [], hasNextPage: false }),
|
||||
getAllByDeviceId: vitest.fn(),
|
||||
getLivePhotoCount: vitest.fn(),
|
||||
getAllInLibrary: vitest.fn(),
|
||||
getLibraryAssetCount: vitest.fn(),
|
||||
updateAll: vitest.fn(),
|
||||
updateDuplicates: vitest.fn(),
|
||||
getByLibraryIdAndOriginalPath: vitest.fn(),
|
||||
@ -39,5 +42,8 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
|
||||
getDuplicates: vitest.fn(),
|
||||
upsertFile: vitest.fn(),
|
||||
upsertFiles: vitest.fn(),
|
||||
detectOfflineExternalAssets: vitest.fn(),
|
||||
filterNewExternalAssetPaths: vitest.fn(),
|
||||
updateByLibraryId: vitest.fn(),
|
||||
};
|
||||
};
|
||||
|
Loading…
x
Reference in New Issue
Block a user