immich/server/apps/microservices/src/processors/video-transcode.processor.ts
Jason Rasmussen 6c7679714b
refactor(server): jobs and processors (#1787)
* refactor: jobs and processors

* refactor: storage migration processor

* fix: tests

* fix: code warning

* chore: ignore coverage from infra

* fix: sync move asset logic between job core and asset core

* refactor: move error handling inside of catch

* refactor(server): job core into dedicated service calls

* refactor: smart info

* fix: tests

* chore: smart info tests

* refactor: use asset repository

* refactor: thumbnail processor

* chore: coverage reqs
2023-02-25 08:12:03 -06:00

100 lines
3.5 KiB
TypeScript

import { APP_UPLOAD_LOCATION } from '@app/common/constants';
import { AssetEntity } from '@app/infra';
import { IAssetJob, IAssetRepository, JobName, QueueName, SystemConfigService } from '@app/domain';
import { Process, Processor } from '@nestjs/bull';
import { Inject, Logger } from '@nestjs/common';
import { Job } from 'bull';
import ffmpeg, { FfprobeData } from 'fluent-ffmpeg';
import { existsSync, mkdirSync } from 'fs';
@Processor(QueueName.VIDEO_CONVERSION)
export class VideoTranscodeProcessor {
readonly logger = new Logger(VideoTranscodeProcessor.name);
constructor(
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
private systemConfigService: SystemConfigService,
) {}
@Process({ name: JobName.VIDEO_CONVERSION, concurrency: 2 })
async videoConversion(job: Job<IAssetJob>) {
const { asset } = job.data;
const basePath = APP_UPLOAD_LOCATION;
const encodedVideoPath = `${basePath}/${asset.ownerId}/encoded-video`;
if (!existsSync(encodedVideoPath)) {
mkdirSync(encodedVideoPath, { recursive: true });
}
const savedEncodedPath = `${encodedVideoPath}/${asset.id}.mp4`;
await this.runVideoEncode(asset, savedEncodedPath);
}
async runFFProbePipeline(asset: AssetEntity): Promise<FfprobeData> {
return new Promise((resolve, reject) => {
ffmpeg.ffprobe(asset.originalPath, (err, data) => {
if (err || !data) {
this.logger.error(`Cannot probe video ${err}`, 'runFFProbePipeline');
reject(err);
}
resolve(data);
});
});
}
async runVideoEncode(asset: AssetEntity, savedEncodedPath: string): Promise<void> {
const config = await this.systemConfigService.getConfig();
if (config.ffmpeg.transcodeAll) {
return this.runFFMPEGPipeLine(asset, savedEncodedPath);
}
const videoInfo = await this.runFFProbePipeline(asset);
const videoStreams = videoInfo.streams.filter((stream) => {
return stream.codec_type === 'video';
});
const longestVideoStream = videoStreams.sort((stream1, stream2) => {
const stream1Frames = Number.parseInt(stream1.nb_frames ?? '0');
const stream2Frames = Number.parseInt(stream2.nb_frames ?? '0');
return stream2Frames - stream1Frames;
})[0];
//TODO: If video or audio are already the correct format, don't re-encode, copy the stream
if (longestVideoStream.codec_name !== config.ffmpeg.targetVideoCodec) {
return this.runFFMPEGPipeLine(asset, savedEncodedPath);
}
}
async runFFMPEGPipeLine(asset: AssetEntity, savedEncodedPath: string): Promise<void> {
const config = await this.systemConfigService.getConfig();
return new Promise((resolve, reject) => {
ffmpeg(asset.originalPath)
.outputOptions([
`-crf ${config.ffmpeg.crf}`,
`-preset ${config.ffmpeg.preset}`,
`-vcodec ${config.ffmpeg.targetVideoCodec}`,
`-acodec ${config.ffmpeg.targetAudioCodec}`,
`-vf scale=${config.ffmpeg.targetScaling}`,
])
.output(savedEncodedPath)
.on('start', () => {
this.logger.log('Start Converting Video');
})
.on('error', (error) => {
this.logger.error(`Cannot Convert Video ${error}`);
reject();
})
.on('end', async () => {
this.logger.log(`Converting Success ${asset.id}`);
await this.assetRepository.save({ id: asset.id, encodedVideoPath: savedEncodedPath });
resolve();
})
.run();
});
}
}