dummy jobs

This commit is contained in:
mertalev 2025-04-30 16:38:35 -04:00
parent 8c0c8a8d0e
commit 262ef2a746
No known key found for this signature in database
GPG Key ID: DF6ABC77AAD98C95
8 changed files with 28 additions and 459 deletions

View File

@ -255,10 +255,10 @@ export class DatabaseRepository {
}
}
if (error) {
this.logger.error(`Kysely migrations failed: ${error}`);
throw error;
}
// if (error) {
// this.logger.error(`Kysely migrations failed: ${error}`);
// throw error;
// }
this.logger.debug('Finished running kysely migrations');
}

View File

@ -4,11 +4,10 @@ import { OnJob } from 'src/decorators';
import { mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { DuplicateResponseDto } from 'src/dtos/duplicate.dto';
import { AssetFileType, JobName, JobStatus, QueueName } from 'src/enum';
import { JobName, JobStatus, QueueName } from 'src/enum';
import { AssetDuplicateResult } from 'src/repositories/search.repository';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
import { getAssetFile } from 'src/utils/asset.util';
import { isDuplicateDetectionEnabled } from 'src/utils/misc';
@Injectable()
@ -60,49 +59,6 @@ export class DuplicateService extends BaseService {
return JobStatus.FAILED;
}
if (asset.stackId) {
this.logger.debug(`Asset ${id} is part of a stack, skipping`);
return JobStatus.SKIPPED;
}
if (!asset.isVisible) {
this.logger.debug(`Asset ${id} is not visible, skipping`);
return JobStatus.SKIPPED;
}
const previewFile = getAssetFile(asset.files || [], AssetFileType.PREVIEW);
if (!previewFile) {
this.logger.warn(`Asset ${id} is missing preview image`);
return JobStatus.FAILED;
}
if (!asset.embedding) {
this.logger.debug(`Asset ${id} is missing embedding`);
return JobStatus.FAILED;
}
const duplicateAssets = await this.searchRepository.searchDuplicates({
assetId: asset.id,
embedding: asset.embedding,
maxDistance: machineLearning.duplicateDetection.maxDistance,
type: asset.type,
userIds: [asset.ownerId],
});
let assetIds = [asset.id];
if (duplicateAssets.length > 0) {
this.logger.debug(
`Found ${duplicateAssets.length} duplicate${duplicateAssets.length === 1 ? '' : 's'} for asset ${asset.id}`,
);
assetIds = await this.updateDuplicates(asset, duplicateAssets);
} else if (asset.duplicateId) {
this.logger.debug(`No duplicates found for asset ${asset.id}, removing duplicateId`);
await this.assetRepository.update({ id: asset.id, duplicateId: null });
}
const duplicatesDetectedAt = new Date();
await this.assetRepository.upsertJobStatus(...assetIds.map((assetId) => ({ assetId, duplicatesDetectedAt })));
return JobStatus.SUCCESS;
}

View File

@ -5,7 +5,6 @@ import { OnEvent } from 'src/decorators';
import { mapAsset } from 'src/dtos/asset-response.dto';
import { AllJobStatusResponseDto, JobCommandDto, JobCreateDto, JobStatusDto } from 'src/dtos/job.dto';
import {
AssetType,
BootstrapEventPriority,
ImmichWorker,
JobCommand,
@ -306,12 +305,9 @@ export class JobService extends BaseService {
const jobs: JobItem[] = [
{ name: JobName.SMART_SEARCH, data: item.data },
{ name: JobName.FACE_DETECTION, data: item.data },
{ name: JobName.VIDEO_CONVERSION, data: item.data },
];
if (asset.type === AssetType.VIDEO) {
jobs.push({ name: JobName.VIDEO_CONVERSION, data: item.data });
}
await this.jobRepository.queueAll(jobs);
if (asset.isVisible) {
this.eventRepository.clientSend('on_upload_success', asset.ownerId, mapAsset(asset));

View File

@ -5,24 +5,19 @@ import { Exif } from 'src/database';
import { OnEvent, OnJob } from 'src/decorators';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import {
AssetFileType,
AssetPathType,
AssetType,
AudioCodec,
Colorspace,
JobName,
JobStatus,
LogLevel,
QueueName,
RawExtractedFormat,
StorageFolder,
TranscodeHWAccel,
TranscodePolicy,
TranscodeTarget,
VideoCodec,
VideoContainer,
VideoContainer
} from 'src/enum';
import { UpsertFileOptions } from 'src/repositories/asset.repository';
import { BaseService } from 'src/services/base.service';
import {
AudioStreamInfo,
@ -34,7 +29,7 @@ import {
VideoStreamInfo,
} from 'src/types';
import { getAssetFiles } from 'src/utils/asset.util';
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
import { ThumbnailConfig } from 'src/utils/media';
import { mimeTypes } from 'src/utils/mime-types';
@Injectable()
@ -49,26 +44,25 @@ export class MediaService extends BaseService {
@OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION })
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QUEUE_GENERATE_THUMBNAILS>): Promise<JobStatus> {
let jobs: JobItem[] = [];
for (let i = 0; i < 10; i++) {
let thumbJobs: JobItem[] = [];
for await (const asset of this.assetJobRepository.streamForThumbnailJob(!!force)) {
const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
const queueAll = async () => {
await this.jobRepository.queueAll(jobs);
jobs = [];
};
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
thumbJobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } });
continue;
}
for await (const asset of this.assetJobRepository.streamForThumbnailJob(!!force)) {
const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
jobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } });
}
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
if (thumbJobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(thumbJobs);
thumbJobs = [];
}
}
await this.jobRepository.queueAll(thumbJobs);
}
await queueAll();
const jobs: JobItem[] = [];
const people = this.personRepository.getAll(force ? undefined : { thumbnailPath: '' });
@ -83,12 +77,9 @@ export class MediaService extends BaseService {
}
jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: person.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
}
await queueAll();
await this.jobRepository.queueAll(jobs);
return JobStatus.SUCCESS;
}
@ -151,75 +142,6 @@ export class MediaService extends BaseService {
this.logger.warn(`Thumbnail generation failed for asset ${id}: not found`);
return JobStatus.FAILED;
}
if (!asset.isVisible) {
this.logger.verbose(`Thumbnail generation skipped for asset ${id}: not visible`);
return JobStatus.SKIPPED;
}
let generated: {
previewPath: string;
thumbnailPath: string;
fullsizePath?: string;
thumbhash: Buffer;
};
if (asset.type === AssetType.VIDEO || asset.originalFileName.toLowerCase().endsWith('.gif')) {
generated = await this.generateVideoThumbnails(asset);
} else if (asset.type === AssetType.IMAGE) {
generated = await this.generateImageThumbnails(asset);
} else {
this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`);
return JobStatus.SKIPPED;
}
const { previewFile, thumbnailFile, fullsizeFile } = getAssetFiles(asset.files);
const toUpsert: UpsertFileOptions[] = [];
if (previewFile?.path !== generated.previewPath) {
toUpsert.push({ assetId: asset.id, path: generated.previewPath, type: AssetFileType.PREVIEW });
}
if (thumbnailFile?.path !== generated.thumbnailPath) {
toUpsert.push({ assetId: asset.id, path: generated.thumbnailPath, type: AssetFileType.THUMBNAIL });
}
if (generated.fullsizePath && fullsizeFile?.path !== generated.fullsizePath) {
toUpsert.push({ assetId: asset.id, path: generated.fullsizePath, type: AssetFileType.FULLSIZE });
}
if (toUpsert.length > 0) {
await this.assetRepository.upsertFiles(toUpsert);
}
const pathsToDelete: string[] = [];
if (previewFile && previewFile.path !== generated.previewPath) {
this.logger.debug(`Deleting old preview for asset ${asset.id}`);
pathsToDelete.push(previewFile.path);
}
if (thumbnailFile && thumbnailFile.path !== generated.thumbnailPath) {
this.logger.debug(`Deleting old thumbnail for asset ${asset.id}`);
pathsToDelete.push(thumbnailFile.path);
}
if (fullsizeFile && fullsizeFile.path !== generated.fullsizePath) {
this.logger.debug(`Deleting old fullsize preview image for asset ${asset.id}`);
pathsToDelete.push(fullsizeFile.path);
if (!generated.fullsizePath) {
// did not generate a new fullsize image, delete the existing record
await this.assetRepository.deleteFiles([fullsizeFile]);
}
}
if (pathsToDelete.length > 0) {
await Promise.all(pathsToDelete.map((path) => this.storageRepository.unlink(path)));
}
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, generated.thumbhash) !== 0) {
await this.assetRepository.update({ id: asset.id, thumbhash: generated.thumbhash });
}
await this.assetRepository.upsertJobStatus({ assetId: asset.id, previewAt: new Date(), thumbnailAt: new Date() });
return JobStatus.SUCCESS;
}
@ -364,81 +286,6 @@ export class MediaService extends BaseService {
if (!asset) {
return JobStatus.FAILED;
}
const input = asset.originalPath;
const output = StorageCore.getEncodedVideoPath(asset);
this.storageCore.ensureFolders(output);
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
countFrames: this.logger.isLevelEnabled(LogLevel.DEBUG), // makes frame count more reliable for progress logs
});
const videoStream = this.getMainStream(videoStreams);
const audioStream = this.getMainStream(audioStreams);
if (!videoStream || !format.formatName) {
return JobStatus.FAILED;
}
if (!videoStream.height || !videoStream.width) {
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
return JobStatus.FAILED;
}
let { ffmpeg } = await this.getConfig({ withCache: true });
const target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream);
if (target === TranscodeTarget.NONE && !this.isRemuxRequired(ffmpeg, format)) {
if (asset.encodedVideoPath) {
this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`);
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [asset.encodedVideoPath] } });
await this.assetRepository.update({ id: asset.id, encodedVideoPath: null });
} else {
this.logger.verbose(`Asset ${asset.id} does not require transcoding based on current policy, skipping`);
}
return JobStatus.SKIPPED;
}
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
if (ffmpeg.accel === TranscodeHWAccel.DISABLED) {
this.logger.log(`Transcoding video ${asset.id} without hardware acceleration`);
} else {
this.logger.log(
`Transcoding video ${asset.id} with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and${ffmpeg.accelDecode ? '' : ' software'} decoding`,
);
}
try {
await this.mediaRepository.transcode(input, output, command);
} catch (error: any) {
this.logger.error(`Error occurred during transcoding: ${error.message}`);
if (ffmpeg.accel === TranscodeHWAccel.DISABLED) {
return JobStatus.FAILED;
}
let partialFallbackSuccess = false;
if (ffmpeg.accelDecode) {
try {
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and software decoding`);
ffmpeg = { ...ffmpeg, accelDecode: false };
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
await this.mediaRepository.transcode(input, output, command);
partialFallbackSuccess = true;
} catch (error: any) {
this.logger.error(`Error occurred during transcoding: ${error.message}`);
}
}
if (!partialFallbackSuccess) {
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()} acceleration disabled`);
ffmpeg = { ...ffmpeg, accel: TranscodeHWAccel.DISABLED };
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
await this.mediaRepository.transcode(input, output, command);
}
}
this.logger.log(`Successfully encoded ${asset.id}`);
await this.assetRepository.update({ id: asset.id, encodedVideoPath: output });
return JobStatus.SUCCESS;
}

View File

@ -23,11 +23,9 @@ import {
SourceType,
} from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { ReverseGeocodeResult } from 'src/repositories/map.repository';
import { ImmichTags } from 'src/repositories/metadata.repository';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
import { isFaceImportEnabled } from 'src/utils/misc';
import { upsertTags } from 'src/utils/tag';
/** look for a date from these tags (in order) */
@ -189,98 +187,6 @@ export class MetadataService extends BaseService {
if (!asset) {
return JobStatus.FAILED;
}
const [exifTags, stats] = await Promise.all([
this.getExifTags(asset),
this.storageRepository.stat(asset.originalPath),
]);
this.logger.verbose('Exif Tags', exifTags);
const dates = this.getDates(asset, exifTags, stats);
const { width, height } = this.getImageDimensions(exifTags);
let geo: ReverseGeocodeResult = { country: null, state: null, city: null },
latitude: number | null = null,
longitude: number | null = null;
if (this.hasGeo(exifTags)) {
latitude = exifTags.GPSLatitude;
longitude = exifTags.GPSLongitude;
if (reverseGeocoding.enabled) {
geo = await this.mapRepository.reverseGeocode({ latitude, longitude });
}
}
const exifData: Insertable<Exif> = {
assetId: asset.id,
// dates
dateTimeOriginal: dates.dateTimeOriginal,
modifyDate: stats.mtime,
timeZone: dates.timeZone,
// gps
latitude,
longitude,
country: geo.country,
state: geo.state,
city: geo.city,
// image/file
fileSizeInByte: stats.size,
exifImageHeight: validate(height),
exifImageWidth: validate(width),
orientation: validate(exifTags.Orientation)?.toString() ?? null,
projectionType: exifTags.ProjectionType ? String(exifTags.ProjectionType).toUpperCase() : null,
bitsPerSample: this.getBitsPerSample(exifTags),
colorspace: exifTags.ColorSpace ?? null,
// camera
make: exifTags.Make ?? exifTags?.Device?.Manufacturer ?? exifTags.AndroidMake ?? null,
model: exifTags.Model ?? exifTags?.Device?.ModelName ?? exifTags.AndroidModel ?? null,
fps: validate(Number.parseFloat(exifTags.VideoFrameRate!)),
iso: validate(exifTags.ISO) as number,
exposureTime: exifTags.ExposureTime ?? null,
lensModel: getLensModel(exifTags),
fNumber: validate(exifTags.FNumber),
focalLength: validate(exifTags.FocalLength),
// comments
description: String(exifTags.ImageDescription || exifTags.Description || '').trim(),
profileDescription: exifTags.ProfileDescription || null,
rating: validateRange(exifTags.Rating, -1, 5),
// grouping
livePhotoCID: (exifTags.ContentIdentifier || exifTags.MediaGroupUUID) ?? null,
autoStackId: this.getAutoStackId(exifTags),
};
const promises: Promise<unknown>[] = [
this.assetRepository.upsertExif(exifData),
this.assetRepository.update({
id: asset.id,
duration: exifTags.Duration?.toString() ?? null,
localDateTime: dates.localDateTime,
fileCreatedAt: dates.dateTimeOriginal ?? undefined,
fileModifiedAt: stats.mtime,
}),
this.applyTagList(asset, exifTags),
];
if (this.isMotionPhoto(asset, exifTags)) {
promises.push(this.applyMotionPhotos(asset, exifTags, dates, stats));
}
if (isFaceImportEnabled(metadata) && this.hasTaggedFaces(exifTags)) {
promises.push(this.applyTaggedFaces(asset, exifTags));
}
await Promise.all(promises);
if (exifData.livePhotoCID) {
await this.linkLivePhotos(asset, exifData);
}
await this.assetRepository.upsertJobStatus({ assetId: asset.id, metadataExtractedAt: new Date() });
return JobStatus.SUCCESS;
}
@ -425,9 +331,9 @@ export class MetadataService extends BaseService {
typeof tag === 'number'
? String(tag)
: tag
.split('|')
.map((tag) => tag.replaceAll('/', '|'))
.join('/'),
.split('|')
.map((tag) => tag.replaceAll('/', '|'))
.join('/'),
);
} else if (exifTags.Keywords) {
let keywords = exifTags.Keywords;

View File

@ -1,9 +1,7 @@
import { BadRequestException, Injectable, NotFoundException } from '@nestjs/common';
import { Insertable, Updateable } from 'kysely';
import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { Updateable } from 'kysely';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { Person } from 'src/database';
import { AssetFaces, FaceSearch } from 'src/db';
import { Chunked, OnJob } from 'src/decorators';
import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
@ -27,7 +25,6 @@ import {
import {
AssetType,
CacheControl,
ImageFormat,
JobName,
JobStatus,
Permission,
@ -291,79 +288,6 @@ export class PersonService extends BaseService {
}
const asset = await this.assetJobRepository.getForDetectFacesJob(id);
const previewFile = asset?.files[0];
if (!asset || asset.files.length !== 1 || !previewFile) {
return JobStatus.FAILED;
}
if (!asset.isVisible) {
return JobStatus.SKIPPED;
}
const { imageHeight, imageWidth, faces } = await this.machineLearningRepository.detectFaces(
machineLearning.urls,
previewFile.path,
machineLearning.facialRecognition,
);
this.logger.debug(`${faces.length} faces detected in ${previewFile.path}`);
const facesToAdd: (Insertable<AssetFaces> & { id: string })[] = [];
const embeddings: FaceSearch[] = [];
const mlFaceIds = new Set<string>();
for (const face of asset.faces) {
if (face.sourceType === SourceType.MACHINE_LEARNING) {
mlFaceIds.add(face.id);
}
}
const heightScale = imageHeight / (asset.faces[0]?.imageHeight || 1);
const widthScale = imageWidth / (asset.faces[0]?.imageWidth || 1);
for (const { boundingBox, embedding } of faces) {
const scaledBox = {
x1: boundingBox.x1 * widthScale,
y1: boundingBox.y1 * heightScale,
x2: boundingBox.x2 * widthScale,
y2: boundingBox.y2 * heightScale,
};
const match = asset.faces.find((face) => this.iou(face, scaledBox) > 0.5);
if (match && !mlFaceIds.delete(match.id)) {
embeddings.push({ faceId: match.id, embedding });
} else if (!match) {
const faceId = this.cryptoRepository.randomUUID();
facesToAdd.push({
id: faceId,
assetId: asset.id,
imageHeight,
imageWidth,
boundingBoxX1: boundingBox.x1,
boundingBoxY1: boundingBox.y1,
boundingBoxX2: boundingBox.x2,
boundingBoxY2: boundingBox.y2,
});
embeddings.push({ faceId, embedding });
}
}
const faceIdsToRemove = [...mlFaceIds];
if (facesToAdd.length > 0 || faceIdsToRemove.length > 0 || embeddings.length > 0) {
await this.personRepository.refreshFaces(facesToAdd, faceIdsToRemove, embeddings);
}
if (faceIdsToRemove.length > 0) {
this.logger.log(`Removed ${faceIdsToRemove.length} faces below detection threshold in asset ${id}`);
}
if (facesToAdd.length > 0) {
this.logger.log(`Detected ${facesToAdd.length} new faces in asset ${id}`);
const jobs = facesToAdd.map((face) => ({ name: JobName.FACIAL_RECOGNITION, data: { id: face.id } }) as const);
await this.jobRepository.queueAll([{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } }, ...jobs]);
} else if (embeddings.length > 0) {
this.logger.log(`Added ${embeddings.length} face embeddings for asset ${id}`);
}
await this.assetRepository.upsertJobStatus({ assetId: asset.id, facesRecognizedAt: new Date() });
return JobStatus.SUCCESS;
}
@ -547,26 +471,6 @@ export class PersonService extends BaseService {
this.logger.error(`Could not generate person thumbnail for ${id}: missing data`);
return JobStatus.FAILED;
}
const { ownerId, x1, y1, x2, y2, oldWidth, oldHeight } = data;
const { width, height, inputPath } = await this.getInputDimensions(data);
const thumbnailPath = StorageCore.getPersonThumbnailPath({ id, ownerId });
this.storageCore.ensureFolders(thumbnailPath);
const thumbnailOptions = {
colorspace: image.colorspace,
format: ImageFormat.JPEG,
size: FACE_THUMBNAIL_SIZE,
quality: image.thumbnail.quality,
crop: this.getCrop({ old: { width: oldWidth, height: oldHeight }, new: { width, height } }, { x1, y1, x2, y2 }),
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
};
await this.mediaRepository.generateThumbnail(inputPath, thumbnailOptions, thumbnailPath);
await this.personRepository.update({ id, thumbnailPath });
return JobStatus.SUCCESS;
}

View File

@ -103,30 +103,6 @@ export class SmartInfoService extends BaseService {
if (!asset || asset.files.length !== 1) {
return JobStatus.FAILED;
}
if (!asset.isVisible) {
return JobStatus.SKIPPED;
}
const embedding = await this.machineLearningRepository.encodeImage(
machineLearning.urls,
asset.files[0].path,
machineLearning.clip,
);
if (this.databaseRepository.isBusy(DatabaseLock.CLIPDimSize)) {
this.logger.verbose(`Waiting for CLIP dimension size to be updated`);
await this.databaseRepository.wait(DatabaseLock.CLIPDimSize);
}
const newConfig = await this.getConfig({ withCache: true });
if (machineLearning.clip.modelName !== newConfig.machineLearning.clip.modelName) {
// Skip the job if the the model has changed since the embedding was generated.
return JobStatus.SKIPPED;
}
await this.searchRepository.upsert(asset.id, embedding);
return JobStatus.SUCCESS;
}
}

View File

@ -10,7 +10,6 @@ import { AssetPathType, AssetType, DatabaseLock, JobName, JobStatus, QueueName,
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import { JobOf, StorageAsset } from 'src/types';
import { getLivePhotoMotionFilename } from 'src/utils/file';
const storageTokens = {
secondOptions: ['s', 'ss', 'SSS'],
@ -128,21 +127,6 @@ export class StorageTemplateService extends BaseService {
if (!asset) {
return JobStatus.FAILED;
}
const user = await this.userRepository.get(asset.ownerId, {});
const storageLabel = user?.storageLabel || null;
const filename = asset.originalFileName || asset.id;
await this.moveAsset(asset, { storageLabel, filename });
// move motion part of live photo
if (asset.livePhotoVideoId) {
const livePhotoVideo = await this.assetJobRepository.getForStorageTemplateJob(asset.livePhotoVideoId);
if (!livePhotoVideo) {
return JobStatus.FAILED;
}
const motionFilename = getLivePhotoMotionFilename(filename, livePhotoVideo.originalPath);
await this.moveAsset(livePhotoVideo, { storageLabel, filename: motionFilename });
}
return JobStatus.SUCCESS;
}