forked from Cutlery/immich
Compare commits
80 Commits
main
...
feat/offli
Author | SHA1 | Date | |
---|---|---|---|
|
b218e0d903 | ||
|
a352f7e1d7 | ||
|
0348372692 | ||
|
17f2adb2db | ||
|
ee0fedf061 | ||
|
5515f57c09 | ||
|
586bf19e1a | ||
|
37ad05ff86 | ||
|
cfbad58d2c | ||
|
2d40c85a54 | ||
|
52a19b6f1f | ||
|
57450108be | ||
|
f2c723510f | ||
|
2c4b174f73 | ||
|
2398edf231 | ||
|
17216e1984 | ||
|
fbee95b821 | ||
|
6fd511e59b | ||
|
7043f2f04b | ||
|
38e2cde109 | ||
|
f48992b0cb | ||
|
94b9b4d68a | ||
|
cba8019243 | ||
|
3fc6e826d5 | ||
|
5cdf31c739 | ||
|
ea47cb84a4 | ||
|
7858cf4009 | ||
|
11eb7f7c3f | ||
|
f5073a1a7a | ||
|
9a707874e4 | ||
|
b288743707 | ||
|
bedb494bf1 | ||
|
76241e0364 | ||
|
482645e22d | ||
|
894107126e | ||
|
f28f8992ab | ||
|
b969fc760d | ||
|
7db36ea70d | ||
|
e6c761894c | ||
|
6f3401343f | ||
|
3c5eb9259c | ||
|
104ffdd7d5 | ||
|
69ce4e883a | ||
|
0070b83d8a | ||
|
17c9f0bd1d | ||
|
32b86309a6 | ||
|
a5e6e90e24 | ||
|
f2017730a1 | ||
|
890d488d12 | ||
|
65d3990dce | ||
|
5905fce428 | ||
|
649358cbc3 | ||
|
9b5a0a90ce | ||
|
0bf31bdb44 | ||
|
d8830e2a52 | ||
|
3f56cbeddf | ||
|
5bc22d5854 | ||
|
a3ce28bfc9 | ||
|
f8039a7d57 | ||
|
5ae4fb8b81 | ||
|
e26f8b45e0 | ||
|
f7f30a5939 | ||
|
380ae35ca4 | ||
|
ff47d5576a | ||
|
8bcee7ff64 | ||
|
fa3a70a2ad | ||
|
d7a78e5f25 | ||
|
311d7d5fcd | ||
|
d8dd1fbff0 | ||
|
68a49258cb | ||
|
95b57f082e | ||
|
4ba95bb0a6 | ||
|
3b0d993f12 | ||
|
5b581cee6a | ||
|
d09d4d3f29 | ||
|
f68bcf0f07 | ||
|
0803458d40 | ||
|
247429c3e4 | ||
|
8bb73d6f3d | ||
|
5e497e5166 |
@ -474,10 +474,10 @@ describe('/library', () => {
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Can only refresh external libraries'));
|
||||
expect(body).toEqual(errorDto.badRequest('Can only scan external libraries'));
|
||||
});
|
||||
|
||||
it('should scan external library', async () => {
|
||||
it('should scan an external library', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
type: LibraryType.External,
|
||||
|
@ -31,77 +31,6 @@ describe(`${LibraryController.name} (e2e)`, () => {
|
||||
});
|
||||
|
||||
describe('POST /library/:id/scan', () => {
|
||||
it('should offline missing files', async () => {
|
||||
await fs.promises.cp(`${IMMICH_TEST_ASSET_PATH}/albums/nature`, `${IMMICH_TEST_ASSET_TEMP_PATH}/albums/nature`, {
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
type: LibraryType.EXTERNAL,
|
||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||
});
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const onlineAssets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
expect(onlineAssets.length).toBeGreaterThan(1);
|
||||
|
||||
await restoreTempFolder();
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const assets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
|
||||
expect(assets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
isOffline: true,
|
||||
originalFileName: 'el_torcal_rocks.jpg',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
isOffline: true,
|
||||
originalFileName: 'tanners_ridge.jpg',
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should scan new files', async () => {
|
||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
type: LibraryType.EXTERNAL,
|
||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||
});
|
||||
|
||||
await fs.promises.cp(
|
||||
`${IMMICH_TEST_ASSET_PATH}/albums/nature/silver_fir.jpg`,
|
||||
`${IMMICH_TEST_ASSET_TEMP_PATH}/silver_fir.jpg`,
|
||||
);
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
await fs.promises.cp(
|
||||
`${IMMICH_TEST_ASSET_PATH}/albums/nature/el_torcal_rocks.jpg`,
|
||||
`${IMMICH_TEST_ASSET_TEMP_PATH}/el_torcal_rocks.jpg`,
|
||||
);
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const assets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
|
||||
expect(assets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
originalFileName: 'el_torcal_rocks.jpg',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
originalFileName: 'silver_fir.jpg',
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
describe('with refreshModifiedFiles=true', () => {
|
||||
it('should reimport modified files', async () => {
|
||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||
@ -236,6 +165,185 @@ describe(`${LibraryController.name} (e2e)`, () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should offline a missing file', async () => {
|
||||
await fs.promises.cp(`${IMMICH_TEST_ASSET_PATH}/albums/nature`, `${IMMICH_TEST_ASSET_TEMP_PATH}/albums/nature`, {
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||
type: LibraryType.EXTERNAL,
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||
});
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const onlineAssets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
expect(onlineAssets.length).toBeGreaterThan(1);
|
||||
|
||||
await fs.promises.rm(`${IMMICH_TEST_ASSET_TEMP_PATH}/albums/nature/silver_fir.jpg`);
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const assets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
|
||||
expect(assets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
isOffline: true,
|
||||
originalFileName: 'silver_fir.jpg',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
isOffline: false,
|
||||
originalFileName: 'tanners_ridge.jpg',
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should offline a file on disk not in import paths', async () => {
|
||||
await fs.promises.cp(`${IMMICH_TEST_ASSET_PATH}/albums/nature`, `${IMMICH_TEST_ASSET_TEMP_PATH}/albums/nature`, {
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
await fs.promises.mkdir(`${IMMICH_TEST_ASSET_TEMP_PATH}/albums/other`);
|
||||
|
||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||
type: LibraryType.EXTERNAL,
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}/albums`],
|
||||
});
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const onlineAssets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
expect(onlineAssets.length).toBeGreaterThan(1);
|
||||
|
||||
await api.libraryApi.setImportPaths(server, admin.accessToken, library.id, [
|
||||
`${IMMICH_TEST_ASSET_TEMP_PATH}/albums/other`,
|
||||
]);
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const assets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
|
||||
expect(assets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
isOffline: true,
|
||||
originalFileName: 'silver_fir.jpg',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
isOffline: true,
|
||||
originalFileName: 'tanners_ridge.jpg',
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should mark a rediscovered file as back online', async () => {
|
||||
await fs.promises.cp(`${IMMICH_TEST_ASSET_PATH}/albums/nature`, `${IMMICH_TEST_ASSET_TEMP_PATH}/albums/nature`, {
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
type: LibraryType.EXTERNAL,
|
||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||
});
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const onlineAssets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
expect(onlineAssets.length).toBeGreaterThan(1);
|
||||
|
||||
await fs.promises.rm(`${IMMICH_TEST_ASSET_TEMP_PATH}/albums/nature/silver_fir.jpg`);
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
await fs.promises.cp(`${IMMICH_TEST_ASSET_PATH}/albums/nature`, `${IMMICH_TEST_ASSET_TEMP_PATH}/albums/nature`, {
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const assets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
|
||||
expect(assets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
isOffline: false,
|
||||
originalFileName: 'silver_fir.jpg',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
isOffline: false,
|
||||
originalFileName: 'tanners_ridge.jpg',
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should mark a rediscovered file in import paths as back online', async () => {
|
||||
await fs.promises.cp(`${IMMICH_TEST_ASSET_PATH}/albums/nature`, `${IMMICH_TEST_ASSET_TEMP_PATH}/albums/nature`, {
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
await fs.promises.mkdir(`${IMMICH_TEST_ASSET_TEMP_PATH}/albums/other`);
|
||||
|
||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
type: LibraryType.EXTERNAL,
|
||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}/albums/`],
|
||||
});
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const initialAssets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
expect(initialAssets.length).toBeGreaterThan(1);
|
||||
|
||||
await api.libraryApi.setImportPaths(server, admin.accessToken, library.id, [
|
||||
`${IMMICH_TEST_ASSET_TEMP_PATH}/albums/other`,
|
||||
]);
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const offlineAssets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
|
||||
expect(offlineAssets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
isOffline: true,
|
||||
originalFileName: 'silver_fir.jpg',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
isOffline: true,
|
||||
originalFileName: 'tanners_ridge.jpg',
|
||||
}),
|
||||
]),
|
||||
);
|
||||
|
||||
await api.libraryApi.setImportPaths(server, admin.accessToken, library.id, [
|
||||
`${IMMICH_TEST_ASSET_TEMP_PATH}/albums/nature`,
|
||||
]);
|
||||
|
||||
await api.libraryApi.scanLibrary(server, admin.accessToken, library.id);
|
||||
|
||||
const onlineAssets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||
|
||||
expect(onlineAssets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
isOffline: false,
|
||||
originalFileName: 'silver_fir.jpg',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
isOffline: false,
|
||||
originalFileName: 'tanners_ridge.jpg',
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /library/:id/removeOffline', () => {
|
||||
|
27
server/package-lock.json
generated
27
server/package-lock.json
generated
@ -47,7 +47,6 @@
|
||||
"js-yaml": "^4.1.0",
|
||||
"lodash": "^4.17.21",
|
||||
"luxon": "^3.4.2",
|
||||
"mnemonist": "^0.39.8",
|
||||
"nest-commander": "^3.11.1",
|
||||
"nestjs-otel": "^5.1.5",
|
||||
"openid-client": "^5.4.3",
|
||||
@ -10460,14 +10459,6 @@
|
||||
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/mnemonist": {
|
||||
"version": "0.39.8",
|
||||
"resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.39.8.tgz",
|
||||
"integrity": "sha512-vyWo2K3fjrUw8YeeZ1zF0fy6Mu59RHokURlld8ymdUPjMlD9EC9ov1/YPqTgqRvUN9nTr3Gqfz29LYAmu0PHPQ==",
|
||||
"dependencies": {
|
||||
"obliterator": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/mock-fs": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/mock-fs/-/mock-fs-5.2.0.tgz",
|
||||
@ -10853,11 +10844,6 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/obliterator": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/obliterator/-/obliterator-2.0.4.tgz",
|
||||
"integrity": "sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ=="
|
||||
},
|
||||
"node_modules/obuf": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz",
|
||||
@ -22093,14 +22079,6 @@
|
||||
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==",
|
||||
"dev": true
|
||||
},
|
||||
"mnemonist": {
|
||||
"version": "0.39.8",
|
||||
"resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.39.8.tgz",
|
||||
"integrity": "sha512-vyWo2K3fjrUw8YeeZ1zF0fy6Mu59RHokURlld8ymdUPjMlD9EC9ov1/YPqTgqRvUN9nTr3Gqfz29LYAmu0PHPQ==",
|
||||
"requires": {
|
||||
"obliterator": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"mock-fs": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/mock-fs/-/mock-fs-5.2.0.tgz",
|
||||
@ -22408,11 +22386,6 @@
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
|
||||
"integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g=="
|
||||
},
|
||||
"obliterator": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/obliterator/-/obliterator-2.0.4.tgz",
|
||||
"integrity": "sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ=="
|
||||
},
|
||||
"obuf": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz",
|
||||
|
@ -71,7 +71,6 @@
|
||||
"js-yaml": "^4.1.0",
|
||||
"lodash": "^4.17.21",
|
||||
"luxon": "^3.4.2",
|
||||
"mnemonist": "^0.39.8",
|
||||
"nest-commander": "^3.11.1",
|
||||
"nestjs-otel": "^5.1.5",
|
||||
"openid-client": "^5.4.3",
|
||||
|
@ -48,6 +48,7 @@ export enum WithoutProperty {
|
||||
|
||||
export enum WithProperty {
|
||||
SIDECAR = 'sidecar',
|
||||
IS_ONLINE = 'isOnline',
|
||||
IS_OFFLINE = 'isOffline',
|
||||
}
|
||||
|
||||
|
@ -72,6 +72,7 @@ export enum JobName {
|
||||
LIBRARY_SCAN = 'library-refresh',
|
||||
LIBRARY_SCAN_ASSET = 'library-refresh-asset',
|
||||
LIBRARY_REMOVE_OFFLINE = 'library-remove-offline',
|
||||
LIBRARY_CHECK_OFFLINE = 'library-check-offline',
|
||||
LIBRARY_DELETE = 'library-delete',
|
||||
LIBRARY_QUEUE_SCAN_ALL = 'library-queue-all-refresh',
|
||||
LIBRARY_QUEUE_CLEANUP = 'library-queue-cleanup',
|
||||
@ -111,6 +112,10 @@ export interface ILibraryFileJob extends IEntityJob {
|
||||
assetPath: string;
|
||||
}
|
||||
|
||||
export interface ILibraryOfflineJob extends IEntityJob {
|
||||
importPaths: string[];
|
||||
}
|
||||
|
||||
export interface ILibraryRefreshJob extends IEntityJob {
|
||||
refreshModifiedFiles: boolean;
|
||||
refreshAllFiles: boolean;
|
||||
@ -216,6 +221,7 @@ export type JobItem =
|
||||
| { name: JobName.LIBRARY_REMOVE_OFFLINE; data: IEntityJob }
|
||||
| { name: JobName.LIBRARY_DELETE; data: IEntityJob }
|
||||
| { name: JobName.LIBRARY_QUEUE_SCAN_ALL; data: IBaseJob }
|
||||
| { name: JobName.LIBRARY_CHECK_OFFLINE; data: IEntityJob }
|
||||
| { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob };
|
||||
|
||||
export enum JobStatus {
|
||||
|
@ -281,20 +281,6 @@ WHERE
|
||||
GROUP BY
|
||||
"libraries"."id"
|
||||
|
||||
-- LibraryRepository.getOnlineAssetPaths
|
||||
SELECT
|
||||
"assets"."originalPath" AS "assets_originalPath"
|
||||
FROM
|
||||
"libraries" "library"
|
||||
INNER JOIN "assets" "assets" ON "assets"."libraryId" = "library"."id"
|
||||
AND ("assets"."deletedAt" IS NULL)
|
||||
WHERE
|
||||
(
|
||||
"library"."id" = $1
|
||||
AND "assets"."isOffline" = false
|
||||
)
|
||||
AND ("library"."deletedAt" IS NULL)
|
||||
|
||||
-- LibraryRepository.getAssetIds
|
||||
SELECT
|
||||
"assets"."id" AS "assets_id"
|
||||
|
@ -289,7 +289,7 @@ export class AssetRepository implements IAssetRepository {
|
||||
|
||||
@GenerateSql(
|
||||
...Object.values(WithProperty)
|
||||
.filter((property) => property !== WithProperty.IS_OFFLINE)
|
||||
.filter((property) => property !== WithProperty.IS_OFFLINE && property !== WithProperty.IS_ONLINE)
|
||||
.map((property) => ({
|
||||
name: property,
|
||||
params: [DummyValue.PAGINATION, property],
|
||||
@ -430,7 +430,14 @@ export class AssetRepository implements IAssetRepository {
|
||||
if (!libraryId) {
|
||||
throw new Error('Library id is required when finding offline assets');
|
||||
}
|
||||
where = [{ isOffline: true, libraryId: libraryId }];
|
||||
where = [{ isOffline: true, libraryId }];
|
||||
break;
|
||||
}
|
||||
case WithProperty.IS_ONLINE: {
|
||||
if (!libraryId) {
|
||||
throw new Error('Library id is required when finding online assets');
|
||||
}
|
||||
where = [{ isOffline: false, libraryId }];
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -74,6 +74,7 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
|
||||
[JobName.LIBRARY_SCAN_ASSET]: QueueName.LIBRARY,
|
||||
[JobName.LIBRARY_SCAN]: QueueName.LIBRARY,
|
||||
[JobName.LIBRARY_DELETE]: QueueName.LIBRARY,
|
||||
[JobName.LIBRARY_CHECK_OFFLINE]: QueueName.LIBRARY,
|
||||
[JobName.LIBRARY_REMOVE_OFFLINE]: QueueName.LIBRARY,
|
||||
[JobName.LIBRARY_QUEUE_SCAN_ALL]: QueueName.LIBRARY,
|
||||
[JobName.LIBRARY_QUEUE_CLEANUP]: QueueName.LIBRARY,
|
||||
|
@ -151,26 +151,6 @@ export class LibraryRepository implements ILibraryRepository {
|
||||
};
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
async getOnlineAssetPaths(libraryId: string): Promise<string[]> {
|
||||
// Return all non-offline asset paths for a given library
|
||||
const rawResults = await this.repository
|
||||
.createQueryBuilder('library')
|
||||
.innerJoinAndSelect('library.assets', 'assets')
|
||||
.where('library.id = :id', { id: libraryId })
|
||||
.andWhere('assets.isOffline = false')
|
||||
.select('assets.originalPath')
|
||||
.getRawMany();
|
||||
|
||||
const results: string[] = [];
|
||||
|
||||
for (const rawPath of rawResults) {
|
||||
results.push(rawPath.assets_originalPath);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
async getAssetIds(libraryId: string, withDeleted = false): Promise<string[]> {
|
||||
let query = this.repository
|
||||
|
@ -11,7 +11,14 @@ import { UserEntity } from 'src/entities/user.entity';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.interface';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
|
||||
import { IDatabaseRepository } from 'src/interfaces/database.interface';
|
||||
import { IJobRepository, ILibraryFileJob, ILibraryRefreshJob, JobName, JobStatus } from 'src/interfaces/job.interface';
|
||||
import {
|
||||
IJobRepository,
|
||||
ILibraryFileJob,
|
||||
ILibraryOfflineJob,
|
||||
ILibraryRefreshJob,
|
||||
JobName,
|
||||
JobStatus,
|
||||
} from 'src/interfaces/job.interface';
|
||||
import { ILibraryRepository } from 'src/interfaces/library.interface';
|
||||
import { IStorageRepository, StorageEventType } from 'src/interfaces/storage.interface';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.interface';
|
||||
@ -148,13 +155,15 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
describe('handleQueueAssetRefresh', () => {
|
||||
it('should queue new assets', async () => {
|
||||
it('should queue refresh of a new asset', async () => {
|
||||
const mockLibraryJob: ILibraryRefreshJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
refreshModifiedFiles: false,
|
||||
refreshAllFiles: false,
|
||||
};
|
||||
|
||||
assetMock.getWith.mockResolvedValue({ items: [], hasNextPage: false });
|
||||
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
// eslint-disable-next-line @typescript-eslint/require-await
|
||||
storageMock.walk.mockImplementation(async function* generator() {
|
||||
@ -184,6 +193,7 @@ describe(LibraryService.name, () => {
|
||||
refreshAllFiles: true,
|
||||
};
|
||||
|
||||
assetMock.getWith.mockResolvedValue({ items: [], hasNextPage: false });
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
// eslint-disable-next-line @typescript-eslint/require-await
|
||||
storageMock.walk.mockImplementation(async function* generator() {
|
||||
@ -231,6 +241,8 @@ describe(LibraryService.name, () => {
|
||||
|
||||
storageMock.checkFileExists.mockResolvedValue(true);
|
||||
|
||||
assetMock.getWith.mockResolvedValue({ items: [], hasNextPage: false });
|
||||
|
||||
const mockLibraryJob: ILibraryRefreshJob = {
|
||||
id: libraryStub.externalLibraryWithImportPaths1.id,
|
||||
refreshModifiedFiles: false,
|
||||
@ -247,49 +259,85 @@ describe(LibraryService.name, () => {
|
||||
exclusionPatterns: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleOfflineCheck', () => {
|
||||
it('should set missing assets offline', async () => {
|
||||
const mockLibraryJob: ILibraryRefreshJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
refreshModifiedFiles: false,
|
||||
refreshAllFiles: false,
|
||||
const mockAssetJob: ILibraryOfflineJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/'],
|
||||
};
|
||||
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
assetMock.getLibraryAssetPaths.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
assetMock.getById.mockResolvedValue(assetStub.external);
|
||||
|
||||
await sut.handleQueueAssetRefresh(mockLibraryJob);
|
||||
storageMock.checkFileExists.mockResolvedValue(false);
|
||||
|
||||
expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.image.id], { isOffline: true });
|
||||
expect(assetMock.updateAll).not.toHaveBeenCalledWith(expect.anything(), { isOffline: false });
|
||||
expect(jobMock.queueAll).not.toHaveBeenCalled();
|
||||
await sut.handleOfflineCheck(mockAssetJob);
|
||||
|
||||
expect(assetMock.update).toHaveBeenCalledWith({ id: assetStub.external.id, isOffline: true });
|
||||
});
|
||||
|
||||
it('should set crawled assets that were previously offline back online', async () => {
|
||||
const mockLibraryJob: ILibraryRefreshJob = {
|
||||
id: libraryStub.externalLibrary1.id,
|
||||
refreshModifiedFiles: false,
|
||||
refreshAllFiles: false,
|
||||
it('should set an asset outside of import paths as offline', async () => {
|
||||
const mockAssetJob: ILibraryOfflineJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/data/user2'],
|
||||
};
|
||||
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||
// eslint-disable-next-line @typescript-eslint/require-await
|
||||
storageMock.walk.mockImplementation(async function* generator() {
|
||||
yield assetStub.offline.originalPath;
|
||||
});
|
||||
assetMock.getLibraryAssetPaths.mockResolvedValue({
|
||||
items: [assetStub.offline],
|
||||
hasNextPage: false,
|
||||
});
|
||||
assetMock.getById.mockResolvedValue(assetStub.external);
|
||||
|
||||
await sut.handleQueueAssetRefresh(mockLibraryJob);
|
||||
storageMock.checkFileExists.mockResolvedValue(true);
|
||||
|
||||
expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.offline.id], { isOffline: false });
|
||||
expect(assetMock.updateAll).not.toHaveBeenCalledWith(expect.anything(), { isOffline: true });
|
||||
expect(jobMock.queueAll).not.toHaveBeenCalled();
|
||||
await sut.handleOfflineCheck(mockAssetJob);
|
||||
|
||||
expect(assetMock.update).toHaveBeenCalledWith({ id: assetStub.external.id, isOffline: true });
|
||||
});
|
||||
|
||||
it('should skip an already-offline asset', async () => {
|
||||
const mockAssetJob: ILibraryOfflineJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/'],
|
||||
};
|
||||
|
||||
assetMock.getById.mockResolvedValue(assetStub.offline);
|
||||
|
||||
storageMock.checkFileExists.mockResolvedValue(true);
|
||||
|
||||
const response = await sut.handleOfflineCheck(mockAssetJob);
|
||||
expect(response).toBe(JobStatus.SKIPPED);
|
||||
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing if asset is still online', async () => {
|
||||
const mockAssetJob: ILibraryOfflineJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/'],
|
||||
};
|
||||
|
||||
assetMock.getById.mockResolvedValue(assetStub.external);
|
||||
|
||||
storageMock.checkFileExists.mockResolvedValue(true);
|
||||
|
||||
const response = await sut.handleOfflineCheck(mockAssetJob);
|
||||
expect(response).toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip a nonexistent asset id', async () => {
|
||||
const mockAssetJob: ILibraryOfflineJob = {
|
||||
id: assetStub.external.id,
|
||||
importPaths: ['/'],
|
||||
};
|
||||
|
||||
assetMock.getById.mockImplementation(() => Promise.resolve(null));
|
||||
|
||||
storageMock.checkFileExists.mockResolvedValue(true);
|
||||
|
||||
const response = await sut.handleOfflineCheck(mockAssetJob);
|
||||
expect(response).toBe(JobStatus.SKIPPED);
|
||||
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@ -1096,6 +1144,18 @@ describe(LibraryService.name, () => {
|
||||
expect(libraryMock.update).toHaveBeenCalledWith(expect.objectContaining({ id: 'library-id' }));
|
||||
});
|
||||
|
||||
it('should reject an invalid import path', async () => {
|
||||
libraryMock.update.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||
storageMock.stat.mockResolvedValue({
|
||||
isDirectory: () => false,
|
||||
} as Stats);
|
||||
|
||||
await expect(sut.update('library-id', { importPaths: ['/nonexistent'] })).rejects.toThrow(
|
||||
'Invalid import path: Not a directory',
|
||||
);
|
||||
});
|
||||
|
||||
it('should re-watch library when updating import paths', async () => {
|
||||
libraryMock.update.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { Trie } from 'mnemonist';
|
||||
import { R_OK } from 'node:constants';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { Stats } from 'node:fs';
|
||||
@ -20,8 +19,8 @@ import {
|
||||
ValidateLibraryResponseDto,
|
||||
mapLibrary,
|
||||
} from 'src/dtos/library.dto';
|
||||
import { AssetType } from 'src/entities/asset.entity';
|
||||
import { LibraryEntity, LibraryType } from 'src/entities/library.entity';
|
||||
import { AssetEntity, AssetType } from 'src/entities/asset.entity';
|
||||
import { LibraryType } from 'src/entities/library.entity';
|
||||
import { IAssetRepository, WithProperty } from 'src/interfaces/asset.interface';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
|
||||
import { DatabaseLock, IDatabaseRepository } from 'src/interfaces/database.interface';
|
||||
@ -31,6 +30,7 @@ import {
|
||||
IEntityJob,
|
||||
IJobRepository,
|
||||
ILibraryFileJob,
|
||||
ILibraryOfflineJob,
|
||||
ILibraryRefreshJob,
|
||||
JOBS_ASSET_PAGINATION_SIZE,
|
||||
JobName,
|
||||
@ -45,7 +45,7 @@ import { handlePromiseError } from 'src/utils/misc';
|
||||
import { usePagination } from 'src/utils/pagination';
|
||||
import { validateCronExpression } from 'src/validation';
|
||||
|
||||
const LIBRARY_SCAN_BATCH_SIZE = 5000;
|
||||
const LIBRARY_SCAN_BATCH_SIZE = 1000;
|
||||
|
||||
@Injectable()
|
||||
export class LibraryService extends EventEmitter {
|
||||
@ -294,24 +294,17 @@ export class LibraryService extends EventEmitter {
|
||||
private async scanAssets(libraryId: string, assetPaths: string[], ownerId: string, force = false) {
|
||||
this.logger.verbose(`Queuing refresh of ${assetPaths.length} asset(s)`);
|
||||
|
||||
// We perform this in batches to save on memory when performing large refreshes (greater than 1M assets)
|
||||
const batchSize = 5000;
|
||||
for (let i = 0; i < assetPaths.length; i += batchSize) {
|
||||
const batch = assetPaths.slice(i, i + batchSize);
|
||||
await this.jobRepository.queueAll(
|
||||
batch.map((assetPath) => ({
|
||||
name: JobName.LIBRARY_SCAN_ASSET,
|
||||
data: {
|
||||
id: libraryId,
|
||||
assetPath: assetPath,
|
||||
ownerId,
|
||||
force,
|
||||
},
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.debug('Asset refresh queue completed');
|
||||
await this.jobRepository.queueAll(
|
||||
assetPaths.map((assetPath) => ({
|
||||
name: JobName.LIBRARY_SCAN_ASSET,
|
||||
data: {
|
||||
id: libraryId,
|
||||
assetPath: assetPath,
|
||||
ownerId,
|
||||
force,
|
||||
},
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
private async validateImportPath(importPath: string): Promise<ValidateLibraryImportPathResponseDto> {
|
||||
@ -494,6 +487,7 @@ export class LibraryService extends EventEmitter {
|
||||
sidecarPath = `${assetPath}.xmp`;
|
||||
}
|
||||
|
||||
// TODO: device asset id is deprecated, remove it
|
||||
const deviceAssetId = `${basename(assetPath)}`.replaceAll(/\s+/g, '');
|
||||
|
||||
let assetId;
|
||||
@ -550,7 +544,7 @@ export class LibraryService extends EventEmitter {
|
||||
async queueScan(id: string, dto: ScanLibraryDto) {
|
||||
const library = await this.findOrFail(id);
|
||||
if (library.type !== LibraryType.EXTERNAL) {
|
||||
throw new BadRequestException('Can only refresh external libraries');
|
||||
throw new BadRequestException('Can only scan external libraries');
|
||||
}
|
||||
|
||||
await this.jobRepository.queue({
|
||||
@ -589,6 +583,29 @@ export class LibraryService extends EventEmitter {
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
// Check if an asset has no file or is outside of import paths, marking it as offline
|
||||
async handleOfflineCheck(job: ILibraryOfflineJob): Promise<JobStatus> {
|
||||
const asset = await this.assetRepository.getById(job.id);
|
||||
|
||||
if (!asset || asset.isOffline) {
|
||||
// We only care about online assets, we exit here if offline
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const exists = await this.storageRepository.checkFileExists(asset.originalPath, R_OK);
|
||||
|
||||
const isInPath = job.importPaths.find((path) => asset.originalPath.startsWith(path));
|
||||
|
||||
if (exists && isInPath) {
|
||||
this.logger.verbose(`Asset is still online: ${asset.originalPath}`);
|
||||
} else {
|
||||
this.logger.debug(`Marking asset as offline: ${asset.originalPath}`);
|
||||
await this.assetRepository.update({ id: asset.id, isOffline: true });
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleOfflineRemoval(job: IEntityJob): Promise<JobStatus> {
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getWith(pagination, WithProperty.IS_OFFLINE, job.id),
|
||||
@ -607,106 +624,92 @@ export class LibraryService extends EventEmitter {
|
||||
async handleQueueAssetRefresh(job: ILibraryRefreshJob): Promise<JobStatus> {
|
||||
const library = await this.repository.get(job.id);
|
||||
if (!library || library.type !== LibraryType.EXTERNAL) {
|
||||
this.logger.warn('Can only refresh external libraries');
|
||||
this.logger.warn('Can only scan external libraries');
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
this.logger.log(`Refreshing library: ${job.id}`);
|
||||
|
||||
const crawledAssetPaths = await this.getPathTrie(library);
|
||||
this.logger.debug(`Found ${crawledAssetPaths.size} asset(s) when crawling import paths ${library.importPaths}`);
|
||||
const validImportPaths: string[] = [];
|
||||
|
||||
const assetIdsToMarkOffline = [];
|
||||
const assetIdsToMarkOnline = [];
|
||||
const pagination = usePagination(LIBRARY_SCAN_BATCH_SIZE, (pagination) =>
|
||||
this.assetRepository.getLibraryAssetPaths(pagination, library.id),
|
||||
);
|
||||
|
||||
this.logger.verbose(`Crawled asset paths paginated`);
|
||||
|
||||
const shouldScanAll = job.refreshAllFiles || job.refreshModifiedFiles;
|
||||
for await (const page of pagination) {
|
||||
for (const asset of page) {
|
||||
const isOffline = !crawledAssetPaths.has(asset.originalPath);
|
||||
if (isOffline && !asset.isOffline) {
|
||||
assetIdsToMarkOffline.push(asset.id);
|
||||
this.logger.verbose(`Added to mark-offline list: ${asset.originalPath}`);
|
||||
}
|
||||
|
||||
if (!isOffline && asset.isOffline) {
|
||||
assetIdsToMarkOnline.push(asset.id);
|
||||
this.logger.verbose(`Added to mark-online list: ${asset.originalPath}`);
|
||||
}
|
||||
|
||||
if (!shouldScanAll) {
|
||||
crawledAssetPaths.delete(asset.originalPath);
|
||||
}
|
||||
for (const importPath of library.importPaths) {
|
||||
const validation = await this.validateImportPath(importPath);
|
||||
if (validation.isValid) {
|
||||
validImportPaths.push(path.normalize(importPath));
|
||||
} else {
|
||||
this.logger.error(`Skipping invalid import path: ${importPath}. Reason: ${validation.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.verbose(`Crawled assets have been checked for online/offline status`);
|
||||
|
||||
if (assetIdsToMarkOffline.length > 0) {
|
||||
this.logger.debug(`Found ${assetIdsToMarkOffline.length} offline asset(s) previously marked as online`);
|
||||
await this.assetRepository.updateAll(assetIdsToMarkOffline, { isOffline: true });
|
||||
}
|
||||
|
||||
if (assetIdsToMarkOnline.length > 0) {
|
||||
this.logger.debug(`Found ${assetIdsToMarkOnline.length} online asset(s) previously marked as offline`);
|
||||
await this.assetRepository.updateAll(assetIdsToMarkOnline, { isOffline: false });
|
||||
}
|
||||
|
||||
if (crawledAssetPaths.size > 0) {
|
||||
if (!shouldScanAll) {
|
||||
this.logger.debug(`Will import ${crawledAssetPaths.size} new asset(s)`);
|
||||
}
|
||||
|
||||
let batch = [];
|
||||
for (const assetPath of crawledAssetPaths) {
|
||||
batch.push(assetPath);
|
||||
|
||||
if (batch.length >= LIBRARY_SCAN_BATCH_SIZE) {
|
||||
await this.scanAssets(job.id, batch, library.ownerId, job.refreshAllFiles ?? false);
|
||||
batch = [];
|
||||
}
|
||||
}
|
||||
|
||||
if (batch.length > 0) {
|
||||
await this.scanAssets(job.id, batch, library.ownerId, job.refreshAllFiles ?? false);
|
||||
}
|
||||
}
|
||||
|
||||
await this.repository.update({ id: job.id, refreshedAt: new Date() });
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
private async getPathTrie(library: LibraryEntity): Promise<Trie<string>> {
|
||||
const pathValidation = await Promise.all(
|
||||
library.importPaths.map(async (importPath) => await this.validateImportPath(importPath)),
|
||||
);
|
||||
|
||||
const validImportPaths = pathValidation
|
||||
.map((validation) => {
|
||||
if (!validation.isValid) {
|
||||
this.logger.error(`Skipping invalid import path: ${validation.importPath}. Reason: ${validation.message}`);
|
||||
}
|
||||
return validation;
|
||||
})
|
||||
.filter((validation) => validation.isValid)
|
||||
.map((validation) => validation.importPath);
|
||||
|
||||
const generator = this.storageRepository.walk({
|
||||
const crawledAssets = this.storageRepository.walk({
|
||||
pathsToCrawl: validImportPaths,
|
||||
exclusionPatterns: library.exclusionPatterns,
|
||||
});
|
||||
|
||||
const trie = new Trie<string>();
|
||||
for await (const filePath of generator) {
|
||||
trie.add(filePath);
|
||||
const onlineAssets = usePagination(LIBRARY_SCAN_BATCH_SIZE, (pagination) =>
|
||||
this.assetRepository.getWith(pagination, WithProperty.IS_ONLINE, job.id),
|
||||
);
|
||||
|
||||
let crawlDone = false;
|
||||
let existingAssetsDone = false;
|
||||
let crawlCounter = 0;
|
||||
let existingAssetCounter = 0;
|
||||
|
||||
const checkIfOnlineAssetsAreOffline = async () => {
|
||||
const existingAssetPage = await onlineAssets.next();
|
||||
existingAssetsDone = existingAssetPage.done ?? true;
|
||||
|
||||
if (existingAssetPage.value) {
|
||||
existingAssetCounter += existingAssetPage.value.length;
|
||||
this.logger.log(
|
||||
`Queuing online check of ${existingAssetPage.value.length} asset(s) in library ${library.id}...`,
|
||||
);
|
||||
await this.jobRepository.queueAll(
|
||||
existingAssetPage.value.map((asset: AssetEntity) => ({
|
||||
name: JobName.LIBRARY_CHECK_OFFLINE,
|
||||
data: { id: asset.id, importPaths: validImportPaths },
|
||||
})),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
let crawledAssetPaths: string[] = [];
|
||||
|
||||
while (!crawlDone) {
|
||||
const crawlResult = await crawledAssets.next();
|
||||
|
||||
crawlDone = crawlResult.done ?? true;
|
||||
|
||||
if (!crawlDone) {
|
||||
crawledAssetPaths.push(crawlResult.value);
|
||||
crawlCounter++;
|
||||
}
|
||||
|
||||
if (crawledAssetPaths.length % LIBRARY_SCAN_BATCH_SIZE === 0 || crawlDone) {
|
||||
this.logger.log(`Queueing scan of ${crawledAssetPaths.length} asset path(s) in library ${library.id}...`);
|
||||
// We have reached the batch size or the end of the generator, scan the assets
|
||||
await this.scanAssets(job.id, crawledAssetPaths, library.ownerId, job.refreshAllFiles ?? false);
|
||||
crawledAssetPaths = [];
|
||||
|
||||
if (!existingAssetsDone) {
|
||||
// Interweave the queuing of offline checks with the asset scanning (if any)
|
||||
await checkIfOnlineAssetsAreOffline();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return trie;
|
||||
// If there are any remaining assets to check for offline status, do so
|
||||
while (!existingAssetsDone) {
|
||||
await checkIfOnlineAssetsAreOffline();
|
||||
}
|
||||
|
||||
this.logger.log(
|
||||
`Finished queuing scan of ${crawlCounter} crawled and ${existingAssetCounter} existing asset(s) in library ${library.id}`,
|
||||
);
|
||||
|
||||
await this.repository.update({ id: job.id, refreshedAt: new Date() });
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
private async findOrFail(id: string) {
|
||||
|
@ -74,6 +74,7 @@ export class MicroservicesService {
|
||||
[JobName.LIBRARY_SCAN_ASSET]: (data) => this.libraryService.handleAssetRefresh(data),
|
||||
[JobName.LIBRARY_SCAN]: (data) => this.libraryService.handleQueueAssetRefresh(data),
|
||||
[JobName.LIBRARY_DELETE]: (data) => this.libraryService.handleDeleteLibrary(data),
|
||||
[JobName.LIBRARY_CHECK_OFFLINE]: (data) => this.libraryService.handleOfflineCheck(data),
|
||||
[JobName.LIBRARY_REMOVE_OFFLINE]: (data) => this.libraryService.handleOfflineRemoval(data),
|
||||
[JobName.LIBRARY_QUEUE_SCAN_ALL]: (data) => this.libraryService.handleQueueAllScan(data),
|
||||
[JobName.LIBRARY_QUEUE_CLEANUP]: () => this.libraryService.handleQueueCleanup(),
|
||||
|
@ -371,16 +371,20 @@
|
||||
{:else}{owner[index].name}{/if}
|
||||
</td>
|
||||
|
||||
{#if totalCount[index] == undefined}
|
||||
<td colspan="2" class="flex w-1/3 items-center justify-center text-ellipsis px-4 text-sm">
|
||||
<td class=" text-ellipsis px-4 text-sm">
|
||||
{#if totalCount[index] == undefined}
|
||||
<LoadingSpinner size="40" />
|
||||
</td>
|
||||
{:else}
|
||||
<td class=" text-ellipsis px-4 text-sm">
|
||||
{:else}
|
||||
{totalCount[index]}
|
||||
</td>
|
||||
<td class=" text-ellipsis px-4 text-sm">{diskUsage[index]} {diskUsageUnit[index]}</td>
|
||||
{/if}
|
||||
{/if}
|
||||
</td>
|
||||
<td class=" text-ellipsis px-4 text-sm">
|
||||
{#if totalCount[index] == undefined}
|
||||
<LoadingSpinner size="40" />
|
||||
{:else}
|
||||
{diskUsage[index]} {diskUsageUnit[index]}
|
||||
{/if}
|
||||
</td>
|
||||
|
||||
<td class=" text-ellipsis px-4 text-sm">
|
||||
<button
|
||||
|
Loading…
x
Reference in New Issue
Block a user