refactor: more job queries (#17745)

This commit is contained in:
Daniel Dietzler 2025-04-29 00:03:20 +02:00 committed by GitHub
parent 7f69abbf0d
commit f621f8ef2c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 256 additions and 150 deletions

View File

@ -259,6 +259,130 @@ from
where
"assets"."id" = $2
-- AssetJobRepository.getForSyncAssets
select
"assets"."id",
"assets"."isOffline",
"assets"."libraryId",
"assets"."originalPath",
"assets"."status",
"assets"."fileModifiedAt"
from
"assets"
where
"assets"."id" = any ($1::uuid[])
-- AssetJobRepository.getForAssetDeletion
select
"assets"."id",
"assets"."isVisible",
"assets"."libraryId",
"assets"."ownerId",
"assets"."livePhotoVideoId",
"assets"."sidecarPath",
"assets"."encodedVideoPath",
"assets"."originalPath",
to_json("exif") as "exifInfo",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_faces".*,
"person" as "person"
from
"asset_faces"
left join lateral (
select
"person".*
from
"person"
where
"asset_faces"."personId" = "person"."id"
) as "person" on true
where
"asset_faces"."assetId" = "assets"."id"
and "asset_faces"."deletedAt" is null
) as agg
) as "faces",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_files"."id",
"asset_files"."path",
"asset_files"."type"
from
"asset_files"
where
"asset_files"."assetId" = "assets"."id"
) as agg
) as "files",
to_json("stacked_assets") as "stack"
from
"assets"
left join "exif" on "assets"."id" = "exif"."assetId"
left join "asset_stack" on "asset_stack"."id" = "assets"."stackId"
left join lateral (
select
"asset_stack"."id",
"asset_stack"."primaryAssetId",
array_agg("stacked") as "assets"
from
"assets" as "stacked"
where
"stacked"."deletedAt" is not null
and "stacked"."isArchived" = $1
and "stacked"."stackId" = "asset_stack"."id"
group by
"asset_stack"."id"
) as "stacked_assets" on "asset_stack"."id" is not null
where
"assets"."id" = $2
-- AssetJobRepository.streamForVideoConversion
select
"assets"."id"
from
"assets"
where
"assets"."type" = $1
and (
"assets"."encodedVideoPath" is null
or "assets"."encodedVideoPath" = $2
)
and "assets"."isVisible" = $3
and "assets"."deletedAt" is null
-- AssetJobRepository.getForVideoConversion
select
"assets"."id",
"assets"."ownerId",
"assets"."originalPath",
"assets"."encodedVideoPath"
from
"assets"
where
"assets"."id" = $1
and "assets"."type" = $2
-- AssetJobRepository.streamForMetadataExtraction
select
"assets"."id"
from
"assets"
left join "asset_job_status" on "asset_job_status"."assetId" = "assets"."id"
where
(
"asset_job_status"."metadataExtractedAt" is null
or "asset_job_status"."assetId" is null
)
and "assets"."isVisible" = $1
and "assets"."deletedAt" is null
-- AssetJobRepository.getForStorageTemplateJob
select
"assets"."id",

View File

@ -2,12 +2,21 @@ import { Injectable } from '@nestjs/common';
import { Kysely } from 'kysely';
import { jsonArrayFrom } from 'kysely/helpers/postgres';
import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { Asset, columns } from 'src/database';
import { DB } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetFileType } from 'src/enum';
import { AssetFileType, AssetType } from 'src/enum';
import { StorageAsset } from 'src/types';
import { anyUuid, asUuid, withExifInner, withFaces, withFiles } from 'src/utils/database';
import {
anyUuid,
asUuid,
toJson,
withExif,
withExifInner,
withFaces,
withFacesAndPeople,
withFiles,
} from 'src/utils/database';
@Injectable()
export class AssetJobRepository {
@ -148,6 +157,7 @@ export class AssetJobRepository {
.executeTakeFirst();
}
@GenerateSql({ params: [[DummyValue.UUID]] })
getForSyncAssets(ids: string[]) {
return this.db
.selectFrom('assets')
@ -163,6 +173,84 @@ export class AssetJobRepository {
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForAssetDeletion(id: string) {
return this.db
.selectFrom('assets')
.select([
'assets.id',
'assets.isVisible',
'assets.libraryId',
'assets.ownerId',
'assets.livePhotoVideoId',
'assets.sidecarPath',
'assets.encodedVideoPath',
'assets.originalPath',
])
.$call(withExif)
.select(withFacesAndPeople)
.select(withFiles)
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
.leftJoinLateral(
(eb) =>
eb
.selectFrom('assets as stacked')
.select(['asset_stack.id', 'asset_stack.primaryAssetId'])
.select((eb) => eb.fn<Asset[]>('array_agg', [eb.table('stacked')]).as('assets'))
.where('stacked.deletedAt', 'is not', null)
.where('stacked.isArchived', '=', false)
.whereRef('stacked.stackId', '=', 'asset_stack.id')
.groupBy('asset_stack.id')
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => toJson(eb, 'stacked_assets').as('stack'))
.where('assets.id', '=', id)
.executeTakeFirst();
}
@GenerateSql({ params: [], stream: true })
streamForVideoConversion(force?: boolean) {
return this.db
.selectFrom('assets')
.select(['assets.id'])
.where('assets.type', '=', AssetType.VIDEO)
.$if(!force, (qb) =>
qb
.where((eb) => eb.or([eb('assets.encodedVideoPath', 'is', null), eb('assets.encodedVideoPath', '=', '')]))
.where('assets.isVisible', '=', true),
)
.where('assets.deletedAt', 'is', null)
.stream();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForVideoConversion(id: string) {
return this.db
.selectFrom('assets')
.select(['assets.id', 'assets.ownerId', 'assets.originalPath', 'assets.encodedVideoPath'])
.where('assets.id', '=', id)
.where('assets.type', '=', AssetType.VIDEO)
.executeTakeFirst();
}
@GenerateSql({ params: [], stream: true })
streamForMetadataExtraction(force?: boolean) {
return this.db
.selectFrom('assets')
.select(['assets.id'])
.$if(!force, (qb) =>
qb
.leftJoin('asset_job_status', 'asset_job_status.assetId', 'assets.id')
.where((eb) =>
eb.or([eb('asset_job_status.metadataExtractedAt', 'is', null), eb('asset_job_status.assetId', 'is', null)]),
)
.where('assets.isVisible', '=', true),
)
.where('assets.deletedAt', 'is', null)
.stream();
}
private storageTemplateAssetQuery() {
return this.db
.selectFrom('assets')

View File

@ -565,7 +565,7 @@ describe(AssetService.name, () => {
it('should remove faces', async () => {
const assetWithFace = { ...assetStub.image, faces: [faceStub.face1, faceStub.mergeFace1] };
mocks.asset.getById.mockResolvedValue(assetWithFace);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetWithFace);
await sut.handleAssetDeletion({ id: assetWithFace.id, deleteOnDisk: true });
@ -592,7 +592,7 @@ describe(AssetService.name, () => {
it('should update stack primary asset if deleted asset was primary asset in a stack', async () => {
mocks.stack.update.mockResolvedValue(factory.stack() as any);
mocks.asset.getById.mockResolvedValue(assetStub.primaryImage);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.primaryImage);
await sut.handleAssetDeletion({ id: assetStub.primaryImage.id, deleteOnDisk: true });
@ -604,7 +604,7 @@ describe(AssetService.name, () => {
it('should delete the entire stack if deleted asset was the primary asset and the stack would only contain one asset afterwards', async () => {
mocks.stack.delete.mockResolvedValue();
mocks.asset.getById.mockResolvedValue({
mocks.assetJob.getForAssetDeletion.mockResolvedValue({
...assetStub.primaryImage,
stack: { ...assetStub.primaryImage.stack, assets: assetStub.primaryImage.stack!.assets.slice(0, 2) },
});
@ -615,7 +615,7 @@ describe(AssetService.name, () => {
});
it('should delete a live photo', async () => {
mocks.asset.getById.mockResolvedValue(assetStub.livePhotoStillAsset);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.livePhotoStillAsset as any);
mocks.asset.getLivePhotoCount.mockResolvedValue(0);
await sut.handleAssetDeletion({
@ -653,7 +653,7 @@ describe(AssetService.name, () => {
it('should not delete a live motion part if it is being used by another asset', async () => {
mocks.asset.getLivePhotoCount.mockResolvedValue(2);
mocks.asset.getById.mockResolvedValue(assetStub.livePhotoStillAsset);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.livePhotoStillAsset as any);
await sut.handleAssetDeletion({
id: assetStub.livePhotoStillAsset.id,
@ -680,12 +680,13 @@ describe(AssetService.name, () => {
});
it('should update usage', async () => {
mocks.asset.getById.mockResolvedValue(assetStub.image);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.image);
await sut.handleAssetDeletion({ id: assetStub.image.id, deleteOnDisk: true });
expect(mocks.user.updateUsage).toHaveBeenCalledWith(assetStub.image.ownerId, -5000);
});
it('should fail if asset could not be found', async () => {
mocks.assetJob.getForAssetDeletion.mockResolvedValue(void 0);
await expect(sut.handleAssetDeletion({ id: assetStub.image.id, deleteOnDisk: true })).resolves.toBe(
JobStatus.FAILED,
);

View File

@ -189,13 +189,7 @@ export class AssetService extends BaseService {
async handleAssetDeletion(job: JobOf<JobName.ASSET_DELETION>): Promise<JobStatus> {
const { id, deleteOnDisk } = job;
const asset = await this.assetRepository.getById(id, {
faces: { person: true },
library: true,
stack: { assets: true },
exifInfo: true,
files: true,
});
const asset = await this.assetJobRepository.getForAssetDeletion(id);
if (!asset) {
return JobStatus.FAILED;

View File

@ -15,7 +15,6 @@ import {
TranscodePolicy,
VideoCodec,
} from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { MediaService } from 'src/services/media.service';
import { JobCounts, RawImageInfo } from 'src/types';
import { assetStub } from 'test/fixtures/asset.stub';
@ -841,16 +840,12 @@ describe(MediaService.name, () => {
describe('handleQueueVideoConversion', () => {
it('should queue all video assets', async () => {
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.video],
hasNextPage: false,
});
mocks.assetJob.streamForVideoConversion.mockReturnValue(makeStream([assetStub.video]));
mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueVideoConversion({ force: true });
expect(mocks.asset.getAll).toHaveBeenCalledWith({ skip: 0, take: 1000 }, { type: AssetType.VIDEO });
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
expect(mocks.assetJob.streamForVideoConversion).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.VIDEO_CONVERSION,
@ -860,15 +855,11 @@ describe(MediaService.name, () => {
});
it('should queue all video assets without encoded videos', async () => {
mocks.asset.getWithout.mockResolvedValue({
items: [assetStub.video],
hasNextPage: false,
});
mocks.assetJob.streamForVideoConversion.mockReturnValue(makeStream([assetStub.video]));
await sut.handleQueueVideoConversion({});
expect(mocks.asset.getAll).not.toHaveBeenCalled();
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.ENCODED_VIDEO);
expect(mocks.assetJob.streamForVideoConversion).toHaveBeenCalledWith(void 0);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.VIDEO_CONVERSION,
@ -880,26 +871,18 @@ describe(MediaService.name, () => {
describe('handleVideoConversion', () => {
beforeEach(() => {
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.assetJob.getForVideoConversion.mockResolvedValue(assetStub.video);
sut.videoInterfaces = { dri: ['renderD128'], mali: true };
});
it('should skip transcoding if asset not found', async () => {
mocks.asset.getByIds.mockResolvedValue([]);
mocks.assetJob.getForVideoConversion.mockResolvedValue(void 0);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.probe).not.toHaveBeenCalled();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should skip transcoding if non-video asset', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleVideoConversion({ id: assetStub.image.id });
expect(mocks.media.probe).not.toHaveBeenCalled();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should transcode the longest stream', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.logger.isLevelEnabled.mockReturnValue(false);
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
@ -921,14 +904,12 @@ describe(MediaService.name, () => {
it('should skip a video without any streams', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noVideoStreams);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should skip a video without any height', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noHeight);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@ -936,7 +917,6 @@ describe(MediaService.name, () => {
it('should throw an error if an unknown transcode policy is configured', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noAudioStreams);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: 'foo' } } as never as SystemConfig);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
@ -947,7 +927,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.ALL, accel: TranscodeHWAccel.DISABLED },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValue(new Error('Error transcoding video'));
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
@ -957,7 +936,6 @@ describe(MediaService.name, () => {
it('should transcode when set to all', async () => {
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.ALL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1035,7 +1013,6 @@ describe(MediaService.name, () => {
it('should scale horizontally when video is horizontal', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1051,7 +1028,6 @@ describe(MediaService.name, () => {
it('should scale vertically when video is vertical', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1069,7 +1045,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.ALL, targetResolution: 'original' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1087,7 +1062,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.ALL, targetResolution: 'original' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1105,7 +1079,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { targetVideoCodec: VideoCodec.HEVC, acceptedAudioCodecs: [AudioCodec.AAC] },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1127,7 +1100,6 @@ describe(MediaService.name, () => {
acceptedAudioCodecs: [AudioCodec.AAC],
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1149,7 +1121,6 @@ describe(MediaService.name, () => {
acceptedAudioCodecs: [AudioCodec.AAC],
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1165,7 +1136,6 @@ describe(MediaService.name, () => {
it('should copy audio stream when audio matches target', async () => {
mocks.media.probe.mockResolvedValue(probeStub.audioStreamAac);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1180,7 +1150,6 @@ describe(MediaService.name, () => {
it('should remux when input is not an accepted container', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamAvi);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1204,7 +1173,6 @@ describe(MediaService.name, () => {
it('should not transcode if transcoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@ -1212,7 +1180,6 @@ describe(MediaService.name, () => {
it('should not remux when input is not an accepted container and transcoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@ -1220,7 +1187,6 @@ describe(MediaService.name, () => {
it('should not transcode if target codec is invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: 'invalid' as any } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@ -1229,7 +1195,7 @@ describe(MediaService.name, () => {
const asset = assetStub.hasEncodedVideo;
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } });
mocks.asset.getByIds.mockResolvedValue([asset]);
mocks.assetJob.getForVideoConversion.mockResolvedValue(asset);
await sut.handleVideoConversion({ id: asset.id });
@ -1243,7 +1209,6 @@ describe(MediaService.name, () => {
it('should set max bitrate if above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { maxBitrate: '4500k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1259,7 +1224,6 @@ describe(MediaService.name, () => {
it('should default max bitrate to kbps if no unit is provided', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { maxBitrate: '4500' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1275,7 +1239,6 @@ describe(MediaService.name, () => {
it('should transcode in two passes for h264/h265 when enabled and max bitrate is above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { twoPass: true, maxBitrate: '4500k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1291,7 +1254,6 @@ describe(MediaService.name, () => {
it('should fallback to one pass for h264/h265 if two-pass is enabled but no max bitrate is set', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { twoPass: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1313,7 +1275,6 @@ describe(MediaService.name, () => {
targetVideoCodec: VideoCodec.VP9,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1335,7 +1296,6 @@ describe(MediaService.name, () => {
targetVideoCodec: VideoCodec.VP9,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1351,7 +1311,6 @@ describe(MediaService.name, () => {
it('should configure preset for vp9', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.VP9, preset: 'slow' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1367,7 +1326,6 @@ describe(MediaService.name, () => {
it('should not configure preset for vp9 if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { preset: 'invalid', targetVideoCodec: VideoCodec.VP9 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1383,7 +1341,6 @@ describe(MediaService.name, () => {
it('should configure threads if above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.VP9, threads: 2 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1399,7 +1356,6 @@ describe(MediaService.name, () => {
it('should disable thread pooling for h264 if thread limit is 1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 1 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1415,7 +1371,6 @@ describe(MediaService.name, () => {
it('should omit thread flags for h264 if thread limit is at or below 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 0 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1431,7 +1386,6 @@ describe(MediaService.name, () => {
it('should disable thread pooling for hevc if thread limit is 1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 1, targetVideoCodec: VideoCodec.HEVC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1447,7 +1401,6 @@ describe(MediaService.name, () => {
it('should omit thread flags for hevc if thread limit is at or below 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 0, targetVideoCodec: VideoCodec.HEVC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1463,7 +1416,6 @@ describe(MediaService.name, () => {
it('should use av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1489,7 +1441,6 @@ describe(MediaService.name, () => {
it('should map `veryslow` preset to 4 for av1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, preset: 'veryslow' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1505,7 +1456,6 @@ describe(MediaService.name, () => {
it('should set max bitrate for av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, maxBitrate: '2M' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1521,7 +1471,6 @@ describe(MediaService.name, () => {
it('should set threads for av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, threads: 4 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1539,7 +1488,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { targetVideoCodec: VideoCodec.AV1, threads: 4, maxBitrate: '2M' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1561,7 +1509,6 @@ describe(MediaService.name, () => {
targetResolution: '1080p',
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@ -1571,7 +1518,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, targetVideoCodec: VideoCodec.VP9 },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@ -1579,7 +1525,6 @@ describe(MediaService.name, () => {
it('should fail if hwaccel option is invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: 'invalid' as any } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@ -1587,7 +1532,6 @@ describe(MediaService.name, () => {
it('should set options for nvenc', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1625,7 +1569,6 @@ describe(MediaService.name, () => {
twoPass: true,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1641,7 +1584,6 @@ describe(MediaService.name, () => {
it('should set vbr options for nvenc when max bitrate is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1657,7 +1599,6 @@ describe(MediaService.name, () => {
it('should set cq options for nvenc when max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1673,7 +1614,6 @@ describe(MediaService.name, () => {
it('should omit preset for nvenc if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, preset: 'invalid' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1689,7 +1629,6 @@ describe(MediaService.name, () => {
it('should ignore two pass for nvenc if max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1707,7 +1646,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1730,7 +1668,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1752,7 +1689,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1768,7 +1704,6 @@ describe(MediaService.name, () => {
it('should set options for qsv', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1809,7 +1744,6 @@ describe(MediaService.name, () => {
preferredHwDevice: '/dev/dri/renderD128',
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1828,7 +1762,6 @@ describe(MediaService.name, () => {
it('should omit preset for qsv if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, preset: 'invalid' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1849,7 +1782,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, targetVideoCodec: VideoCodec.VP9 },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1869,7 +1801,6 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: [], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
@ -1880,7 +1811,6 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -1901,7 +1831,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@ -1928,7 +1857,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@ -1958,7 +1886,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true, preferredHwDevice: 'renderD129' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -1977,7 +1904,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@ -2000,7 +1926,6 @@ describe(MediaService.name, () => {
it('should set options for vaapi', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2031,7 +1956,6 @@ describe(MediaService.name, () => {
it('should set vbr options for vaapi when max bitrate is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2056,7 +1980,6 @@ describe(MediaService.name, () => {
it('should set cq options for vaapi when max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2081,7 +2004,6 @@ describe(MediaService.name, () => {
it('should omit preset for vaapi if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, preset: 'invalid' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2101,7 +2023,6 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2123,7 +2044,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, preferredHwDevice: '/dev/dri/renderD128' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2144,7 +2064,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@ -2170,7 +2089,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@ -2194,7 +2112,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@ -2215,7 +2132,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true, preferredHwDevice: 'renderD129' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2232,7 +2148,6 @@ describe(MediaService.name, () => {
it('should fallback to hw encoding and sw decoding if hw transcoding fails and hw decoding is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledTimes(2);
@ -2253,7 +2168,6 @@ describe(MediaService.name, () => {
it('should fallback to sw decoding if fallback to sw decoding + hw encoding fails', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
await sut.handleVideoConversion({ id: assetStub.video.id });
@ -2272,7 +2186,6 @@ describe(MediaService.name, () => {
it('should fallback to sw transcoding if hw transcoding fails and hw decoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledTimes(2);
@ -2291,7 +2204,6 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: [], mali: true };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@ -2299,7 +2211,6 @@ describe(MediaService.name, () => {
it('should set options for rkmpp', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2340,7 +2251,6 @@ describe(MediaService.name, () => {
targetVideoCodec: VideoCodec.HEVC,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2358,7 +2268,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2376,7 +2285,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2399,7 +2307,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2419,7 +2326,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: false, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2442,7 +2348,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2462,7 +2367,6 @@ describe(MediaService.name, () => {
it('should tonemap when policy is required and video is hdr', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2482,7 +2386,6 @@ describe(MediaService.name, () => {
it('should tonemap when policy is optimal and video is hdr', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2502,7 +2405,6 @@ describe(MediaService.name, () => {
it('should transcode when policy is required and video is not yuv420p', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2518,7 +2420,6 @@ describe(MediaService.name, () => {
it('should convert to yuv420p when scaling without tone-mapping', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream4K10Bit);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2534,7 +2435,6 @@ describe(MediaService.name, () => {
it('should count frames for progress when log level is debug', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.logger.isLevelEnabled.mockReturnValue(true);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@ -2557,7 +2457,6 @@ describe(MediaService.name, () => {
it('should not count frames for progress when log level is not debug', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.logger.isLevelEnabled.mockReturnValue(false);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.probe).toHaveBeenCalledWith(assetStub.video.originalPath, { countFrames: false });

View File

@ -22,7 +22,7 @@ import {
VideoCodec,
VideoContainer,
} from 'src/enum';
import { UpsertFileOptions, WithoutProperty } from 'src/repositories/asset.repository';
import { UpsertFileOptions } from 'src/repositories/asset.repository';
import { BaseService } from 'src/services/base.service';
import {
AudioStreamInfo,
@ -330,25 +330,25 @@ export class MediaService extends BaseService {
async handleQueueVideoConversion(job: JobOf<JobName.QUEUE_VIDEO_CONVERSION>): Promise<JobStatus> {
const { force } = job;
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination, { type: AssetType.VIDEO })
: this.assetRepository.getWithout(pagination, WithoutProperty.ENCODED_VIDEO);
});
let queue: { name: JobName.VIDEO_CONVERSION; data: { id: string } }[] = [];
for await (const asset of this.assetJobRepository.streamForVideoConversion(force)) {
queue.push({ name: JobName.VIDEO_CONVERSION, data: { id: asset.id } });
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.VIDEO_CONVERSION, data: { id: asset.id } })),
);
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(queue);
queue = [];
}
}
await this.jobRepository.queueAll(queue);
return JobStatus.SUCCESS;
}
@OnJob({ name: JobName.VIDEO_CONVERSION, queue: QueueName.VIDEO_CONVERSION })
async handleVideoConversion({ id }: JobOf<JobName.VIDEO_CONVERSION>): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset || asset.type !== AssetType.VIDEO) {
const asset = await this.assetJobRepository.getForVideoConversion(id);
if (!asset) {
return JobStatus.FAILED;
}

View File

@ -14,7 +14,7 @@ import { probeStub } from 'test/fixtures/media.stub';
import { personStub } from 'test/fixtures/person.stub';
import { tagStub } from 'test/fixtures/tag.stub';
import { factory } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
const makeFaceTags = (face: Partial<{ Name: string }> = {}) => ({
RegionInfo: {
@ -104,10 +104,10 @@ describe(MetadataService.name, () => {
describe('handleQueueMetadataExtraction', () => {
it('should queue metadata extraction for all assets without exif values', async () => {
mocks.asset.getWithout.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
mocks.assetJob.streamForMetadataExtraction.mockReturnValue(makeStream([assetStub.image]));
await expect(sut.handleQueueMetadataExtraction({ force: false })).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.getWithout).toHaveBeenCalled();
expect(mocks.assetJob.streamForMetadataExtraction).toHaveBeenCalledWith(false);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.METADATA_EXTRACTION,
@ -117,10 +117,10 @@ describe(MetadataService.name, () => {
});
it('should queue metadata extraction for all assets', async () => {
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
mocks.assetJob.streamForMetadataExtraction.mockReturnValue(makeStream([assetStub.image]));
await expect(sut.handleQueueMetadataExtraction({ force: true })).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.getAll).toHaveBeenCalled();
expect(mocks.assetJob.streamForMetadataExtraction).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.METADATA_EXTRACTION,

View File

@ -168,18 +168,18 @@ export class MetadataService extends BaseService {
@OnJob({ name: JobName.QUEUE_METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION })
async handleQueueMetadataExtraction(job: JobOf<JobName.QUEUE_METADATA_EXTRACTION>): Promise<JobStatus> {
const { force } = job;
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination)
: this.assetRepository.getWithout(pagination, WithoutProperty.EXIF);
});
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id } })),
);
let queue: { name: JobName.METADATA_EXTRACTION; data: { id: string } }[] = [];
for await (const asset of this.assetJobRepository.streamForMetadataExtraction(force)) {
queue.push({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id } });
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(queue);
queue = [];
}
}
await this.jobRepository.queueAll(queue);
return JobStatus.SUCCESS;
}