Compare commits

...

8 Commits

Author SHA1 Message Date
bwees a69374aa17 chore: more cleanup 2026-03-29 20:49:46 -05:00
bwees 15c15bd543 chore: update openapi 2026-03-29 20:42:59 -05:00
bwees 10e754e1aa chore: cleanup 2026-03-29 20:40:51 -05:00
bwees b9282b27e5 fix: await both live photo and regular asset when applying edits 2026-03-29 20:26:21 -05:00
bwees 146a076324 chore: new behavior tests 2026-03-24 23:49:33 -05:00
bwees 4c70afc8f4 chore: resolve tests 2026-03-24 23:39:11 -05:00
bwees 80db413d69 chore: more wip 2026-03-24 16:35:44 -05:00
bwees d8d532e7ca feat: wip 2026-03-24 16:35:43 -05:00
27 changed files with 939 additions and 187 deletions
+12 -3
View File
@@ -1004,10 +1004,13 @@ class AssetsApi {
///
/// * [String] id (required):
///
/// * [bool] edited:
/// Return edited asset if available
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> playAssetVideoWithHttpInfo(String id, { String? key, String? slug, }) async {
Future<Response> playAssetVideoWithHttpInfo(String id, { bool? edited, String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/{id}/video/playback'
.replaceAll('{id}', id);
@@ -1019,6 +1022,9 @@ class AssetsApi {
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (edited != null) {
queryParams.addAll(_queryParams('', 'edited', edited));
}
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
@@ -1048,11 +1054,14 @@ class AssetsApi {
///
/// * [String] id (required):
///
/// * [bool] edited:
/// Return edited asset if available
///
/// * [String] key:
///
/// * [String] slug:
Future<MultipartFile?> playAssetVideo(String id, { String? key, String? slug, }) async {
final response = await playAssetVideoWithHttpInfo(id, key: key, slug: slug, );
Future<MultipartFile?> playAssetVideo(String id, { bool? edited, String? key, String? slug, }) async {
final response = await playAssetVideoWithHttpInfo(id, edited: edited, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
+3 -3
View File
@@ -29,7 +29,6 @@ class JobName {
static const assetDetectFaces = JobName._(r'AssetDetectFaces');
static const assetDetectDuplicatesQueueAll = JobName._(r'AssetDetectDuplicatesQueueAll');
static const assetDetectDuplicates = JobName._(r'AssetDetectDuplicates');
static const assetEditThumbnailGeneration = JobName._(r'AssetEditThumbnailGeneration');
static const assetEncodeVideoQueueAll = JobName._(r'AssetEncodeVideoQueueAll');
static const assetEncodeVideo = JobName._(r'AssetEncodeVideo');
static const assetEmptyTrash = JobName._(r'AssetEmptyTrash');
@@ -38,6 +37,7 @@ class JobName {
static const assetFileMigration = JobName._(r'AssetFileMigration');
static const assetGenerateThumbnailsQueueAll = JobName._(r'AssetGenerateThumbnailsQueueAll');
static const assetGenerateThumbnails = JobName._(r'AssetGenerateThumbnails');
static const assetProcessEdit = JobName._(r'AssetProcessEdit');
static const auditLogCleanup = JobName._(r'AuditLogCleanup');
static const auditTableCleanup = JobName._(r'AuditTableCleanup');
static const databaseBackup = JobName._(r'DatabaseBackup');
@@ -88,7 +88,6 @@ class JobName {
assetDetectFaces,
assetDetectDuplicatesQueueAll,
assetDetectDuplicates,
assetEditThumbnailGeneration,
assetEncodeVideoQueueAll,
assetEncodeVideo,
assetEmptyTrash,
@@ -97,6 +96,7 @@ class JobName {
assetFileMigration,
assetGenerateThumbnailsQueueAll,
assetGenerateThumbnails,
assetProcessEdit,
auditLogCleanup,
auditTableCleanup,
databaseBackup,
@@ -182,7 +182,6 @@ class JobNameTypeTransformer {
case r'AssetDetectFaces': return JobName.assetDetectFaces;
case r'AssetDetectDuplicatesQueueAll': return JobName.assetDetectDuplicatesQueueAll;
case r'AssetDetectDuplicates': return JobName.assetDetectDuplicates;
case r'AssetEditThumbnailGeneration': return JobName.assetEditThumbnailGeneration;
case r'AssetEncodeVideoQueueAll': return JobName.assetEncodeVideoQueueAll;
case r'AssetEncodeVideo': return JobName.assetEncodeVideo;
case r'AssetEmptyTrash': return JobName.assetEmptyTrash;
@@ -191,6 +190,7 @@ class JobNameTypeTransformer {
case r'AssetFileMigration': return JobName.assetFileMigration;
case r'AssetGenerateThumbnailsQueueAll': return JobName.assetGenerateThumbnailsQueueAll;
case r'AssetGenerateThumbnails': return JobName.assetGenerateThumbnails;
case r'AssetProcessEdit': return JobName.assetProcessEdit;
case r'AuditLogCleanup': return JobName.auditLogCleanup;
case r'AuditTableCleanup': return JobName.auditTableCleanup;
case r'DatabaseBackup': return JobName.databaseBackup;
+11 -1
View File
@@ -4402,6 +4402,16 @@
"description": "Streams the video file for the specified asset. This endpoint also supports byte range requests.",
"operationId": "playAssetVideo",
"parameters": [
{
"name": "edited",
"required": false,
"in": "query",
"description": "Return edited asset if available",
"schema": {
"default": false,
"type": "boolean"
}
},
{
"name": "id",
"required": true,
@@ -18144,7 +18154,6 @@
"AssetDetectFaces",
"AssetDetectDuplicatesQueueAll",
"AssetDetectDuplicates",
"AssetEditThumbnailGeneration",
"AssetEncodeVideoQueueAll",
"AssetEncodeVideo",
"AssetEmptyTrash",
@@ -18153,6 +18162,7 @@
"AssetFileMigration",
"AssetGenerateThumbnailsQueueAll",
"AssetGenerateThumbnails",
"AssetProcessEdit",
"AuditLogCleanup",
"AuditTableCleanup",
"DatabaseBackup",
+4 -2
View File
@@ -4316,7 +4316,8 @@ export function viewAsset({ edited, id, key, size, slug }: {
/**
* Play asset video
*/
export function playAssetVideo({ id, key, slug }: {
export function playAssetVideo({ edited, id, key, slug }: {
edited?: boolean;
id: string;
key?: string;
slug?: string;
@@ -4325,6 +4326,7 @@ export function playAssetVideo({ id, key, slug }: {
status: 200;
data: Blob;
}>(`/assets/${encodeURIComponent(id)}/video/playback${QS.query(QS.explode({
edited,
key,
slug
}))}`, {
@@ -7164,7 +7166,6 @@ export enum JobName {
AssetDetectFaces = "AssetDetectFaces",
AssetDetectDuplicatesQueueAll = "AssetDetectDuplicatesQueueAll",
AssetDetectDuplicates = "AssetDetectDuplicates",
AssetEditThumbnailGeneration = "AssetEditThumbnailGeneration",
AssetEncodeVideoQueueAll = "AssetEncodeVideoQueueAll",
AssetEncodeVideo = "AssetEncodeVideo",
AssetEmptyTrash = "AssetEmptyTrash",
@@ -7173,6 +7174,7 @@ export enum JobName {
AssetFileMigration = "AssetFileMigration",
AssetGenerateThumbnailsQueueAll = "AssetGenerateThumbnailsQueueAll",
AssetGenerateThumbnails = "AssetGenerateThumbnails",
AssetProcessEdit = "AssetProcessEdit",
AuditLogCleanup = "AuditLogCleanup",
AuditTableCleanup = "AuditTableCleanup",
DatabaseBackup = "DatabaseBackup",
@@ -30,6 +30,7 @@ import {
AssetMediaOptionsDto,
AssetMediaReplaceDto,
AssetMediaSize,
AssetThumbnailOptionsDto,
CheckExistingAssetsDto,
UploadFieldName,
} from 'src/dtos/asset-media.dto';
@@ -154,7 +155,7 @@ export class AssetMediaController {
async viewAsset(
@Auth() auth: AuthDto,
@Param() { id }: UUIDParamDto,
@Query() dto: AssetMediaOptionsDto,
@Query() dto: AssetThumbnailOptionsDto,
@Req() req: Request,
@Res() res: Response,
@Next() next: NextFunction,
@@ -197,9 +198,10 @@ export class AssetMediaController {
@Auth() auth: AuthDto,
@Param() { id }: UUIDParamDto,
@Res() res: Response,
@Query() dto: AssetMediaOptionsDto,
@Next() next: NextFunction,
) {
await sendFile(res, next, () => this.service.playbackVideo(auth, id), this.logger);
await sendFile(res, next, () => this.service.playbackVideo(auth, id, dto), this.logger);
}
@Post('exist')
+2 -2
View File
@@ -120,8 +120,8 @@ export class StorageCore {
);
}
static getEncodedVideoPath(asset: ThumbnailPathEntity) {
return StorageCore.getNestedPath(StorageFolder.EncodedVideo, asset.ownerId, `${asset.id}.mp4`);
static getEncodedVideoPath(asset: ThumbnailPathEntity, isEdited: boolean = false) {
return StorageCore.getNestedPath(StorageFolder.EncodedVideo, asset.ownerId, `${asset.id}${isEdited ? '_edited' : ''}.mp4`);
}
static getAndroidMotionPath(asset: ThumbnailPathEntity, uuid: string) {
+1 -2
View File
@@ -346,8 +346,7 @@ export const columns = {
'asset.width',
'asset.height',
],
assetFiles: ['asset_file.id', 'asset_file.path', 'asset_file.type', 'asset_file.isEdited'],
assetFilesForThumbnail: [
assetFiles: [
'asset_file.id',
'asset_file.path',
'asset_file.type',
+5 -3
View File
@@ -18,13 +18,15 @@ export enum AssetMediaSize {
}
export class AssetMediaOptionsDto {
@ValidateEnum({ enum: AssetMediaSize, name: 'AssetMediaSize', description: 'Asset media size', optional: true })
size?: AssetMediaSize;
@ValidateBoolean({ optional: true, description: 'Return edited asset if available', default: false })
edited?: boolean;
}
export class AssetThumbnailOptionsDto extends AssetMediaOptionsDto {
@ValidateEnum({ enum: AssetMediaSize, name: 'AssetMediaSize', description: 'Asset media size', optional: true })
size?: AssetMediaSize;
}
export enum UploadFieldName {
ASSET_DATA = 'assetData',
SIDECAR_DATA = 'sidecarData',
+1 -1
View File
@@ -588,7 +588,6 @@ export enum JobName {
AssetDetectFaces = 'AssetDetectFaces',
AssetDetectDuplicatesQueueAll = 'AssetDetectDuplicatesQueueAll',
AssetDetectDuplicates = 'AssetDetectDuplicates',
AssetEditThumbnailGeneration = 'AssetEditThumbnailGeneration',
AssetEncodeVideoQueueAll = 'AssetEncodeVideoQueueAll',
AssetEncodeVideo = 'AssetEncodeVideo',
AssetEmptyTrash = 'AssetEmptyTrash',
@@ -597,6 +596,7 @@ export enum JobName {
AssetFileMigration = 'AssetFileMigration',
AssetGenerateThumbnailsQueueAll = 'AssetGenerateThumbnailsQueueAll',
AssetGenerateThumbnails = 'AssetGenerateThumbnails',
AssetProcessEdit = 'AssetProcessEdit',
AuditLogCleanup = 'AuditLogCleanup',
AuditTableCleanup = 'AuditTableCleanup',
+103 -15
View File
@@ -30,7 +30,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
@@ -60,7 +62,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
@@ -184,7 +188,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
@@ -245,6 +251,55 @@ from
where
"asset"."id" = $4
-- AssetJobRepository.getForAssetEditProcessing
select
"asset"."id",
"asset"."visibility",
"asset"."originalFileName",
"asset"."originalPath",
"asset"."ownerId",
"asset"."thumbhash",
"asset"."type",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" in ($1, $2, $3, $4)
) as agg
) as "files",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_edit"."action",
"asset_edit"."parameters"
from
"asset_edit"
where
"asset_edit"."assetId" = "asset"."id"
) as agg
) as "edits",
to_json("asset_exif") as "exifInfo"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."id" = $5
-- AssetJobRepository.getForMetadataExtraction
select
"asset"."id",
@@ -288,7 +343,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
@@ -314,7 +371,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
@@ -371,7 +430,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
@@ -411,7 +472,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
@@ -436,11 +499,12 @@ select
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
and "asset_file"."isEdited" = $2
) as "previewFile"
from
"asset"
where
"asset"."id" = $2
"asset"."id" = $3
-- AssetJobRepository.getForSyncAssets
select
@@ -474,7 +538,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
@@ -515,7 +581,8 @@ where
-- AssetJobRepository.streamForVideoConversion
select
"asset"."id"
"asset"."id",
"asset"."isEdited"
from
"asset"
where
@@ -546,17 +613,34 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
) as agg
) as "files"
) as "files",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_edit"."action",
"asset_edit"."parameters"
from
"asset_edit"
where
"asset_edit"."assetId" = "asset"."id"
) as agg
) as "edits"
from
"asset"
where
"asset"."id" = $1
"asset"."id" = $2
and "asset"."type" = 'VIDEO'
-- AssetJobRepository.streamForMetadataExtraction
@@ -598,7 +682,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
@@ -640,7 +726,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
+6 -3
View File
@@ -285,7 +285,9 @@ select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type",
"asset_file"."isEdited"
"asset_file"."isEdited",
"asset_file"."isProgressive",
"asset_file"."isTransparent"
from
"asset_file"
where
@@ -638,12 +640,13 @@ select
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
and "asset_file"."isEdited" = $2
) as "encodedVideoPath"
from
"asset"
where
"asset"."id" = $2
and "asset"."type" = $3
"asset"."id" = $3
and "asset"."type" = $4
-- AssetRepository.getForOcr
select
@@ -112,6 +112,26 @@ export class AssetJobRepository {
@GenerateSql({ params: [DummyValue.UUID] })
getForGenerateThumbnailJob(id: string) {
return this.db
.selectFrom('asset')
.select([
'asset.id',
'asset.visibility',
'asset.originalFileName',
'asset.originalPath',
'asset.ownerId',
'asset.thumbhash',
'asset.type',
])
.select((eb) => withFiles(eb, [AssetFileType.Thumbnail, AssetFileType.Preview, AssetFileType.FullSize]))
.select(withEdits)
.$call(withExifInner)
.where('asset.id', '=', id)
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForAssetEditProcessing(id: string) {
return this.db
.selectFrom('asset')
.select([
@@ -124,13 +144,12 @@ export class AssetJobRepository {
'asset.type',
])
.select((eb) =>
jsonArrayFrom(
eb
.selectFrom('asset_file')
.select(columns.assetFilesForThumbnail)
.whereRef('asset_file.assetId', '=', 'asset.id')
.where('asset_file.type', 'in', [AssetFileType.Thumbnail, AssetFileType.Preview, AssetFileType.FullSize]),
).as('files'),
withFiles(eb, [
AssetFileType.Thumbnail,
AssetFileType.Preview,
AssetFileType.FullSize,
AssetFileType.EncodedVideo,
]),
)
.select(withEdits)
.$call(withExifInner)
@@ -308,7 +327,7 @@ export class AssetJobRepository {
streamForVideoConversion(force?: boolean) {
return this.db
.selectFrom('asset')
.select(['asset.id'])
.select(['asset.id', 'asset.isEdited'])
.where('asset.type', '=', sql.lit(AssetType.Video))
.$if(!force, (qb) =>
qb
@@ -334,7 +353,8 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.select(['asset.id', 'asset.ownerId', 'asset.originalPath'])
.select(withFiles)
.select((eb) => withFiles(eb, AssetFileType.EncodedVideo))
.select(withEdits)
.where('asset.id', '=', id)
.where('asset.type', '=', sql.lit(AssetType.Video))
.executeTakeFirst();
+3 -3
View File
@@ -1149,12 +1149,12 @@ export class AssetRepository {
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [DummyValue.UUID] })
async getForVideo(id: string) {
@GenerateSql({ params: [DummyValue.UUID, true] })
async getForVideo(id: string, isEdited: boolean) {
return this.db
.selectFrom('asset')
.select(['asset.originalPath'])
.select((eb) => withFilePath(eb, AssetFileType.EncodedVideo).as('encodedVideoPath'))
.select((eb) => withFilePath(eb, AssetFileType.EncodedVideo, isEdited).as('encodedVideoPath'))
.where('asset.id', '=', id)
.where('asset.type', '=', AssetType.Video)
.executeTakeFirst();
@@ -695,7 +695,9 @@ describe(AssetMediaService.name, () => {
describe('playbackVideo', () => {
it('should require asset.view permissions', async () => {
await expect(sut.playbackVideo(authStub.admin, 'id')).rejects.toBeInstanceOf(BadRequestException);
await expect(sut.playbackVideo(authStub.admin, 'id', { edited: true })).rejects.toBeInstanceOf(
BadRequestException,
);
expect(mocks.access.asset.checkOwnerAccess).toHaveBeenCalledWith(userStub.admin.id, new Set(['id']), undefined);
expect(mocks.access.asset.checkAlbumAccess).toHaveBeenCalledWith(userStub.admin.id, new Set(['id']));
@@ -706,7 +708,9 @@ describe(AssetMediaService.name, () => {
const asset = AssetFactory.create();
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([asset.id]));
await expect(sut.playbackVideo(authStub.admin, asset.id)).rejects.toBeInstanceOf(NotFoundException);
await expect(sut.playbackVideo(authStub.admin, asset.id, { edited: true })).rejects.toBeInstanceOf(
NotFoundException,
);
});
it('should return the encoded video path if available', async () => {
@@ -719,7 +723,7 @@ describe(AssetMediaService.name, () => {
encodedVideoPath: asset.files[0].path,
});
await expect(sut.playbackVideo(authStub.admin, asset.id)).resolves.toEqual(
await expect(sut.playbackVideo(authStub.admin, asset.id, { edited: true })).resolves.toEqual(
new ImmichFileResponse({
path: '/path/to/encoded/video.mp4',
cacheControl: CacheControl.PrivateWithCache,
@@ -736,7 +740,7 @@ describe(AssetMediaService.name, () => {
encodedVideoPath: null,
});
await expect(sut.playbackVideo(authStub.admin, asset.id)).resolves.toEqual(
await expect(sut.playbackVideo(authStub.admin, asset.id, { edited: true })).resolves.toEqual(
new ImmichFileResponse({
path: asset.originalPath,
cacheControl: CacheControl.PrivateWithCache,
+4 -3
View File
@@ -17,6 +17,7 @@ import {
AssetMediaOptionsDto,
AssetMediaReplaceDto,
AssetMediaSize,
AssetThumbnailOptionsDto,
CheckExistingAssetsDto,
UploadFieldName,
} from 'src/dtos/asset-media.dto';
@@ -222,7 +223,7 @@ export class AssetMediaService extends BaseService {
async viewThumbnail(
auth: AuthDto,
id: string,
dto: AssetMediaOptionsDto,
dto: AssetThumbnailOptionsDto,
): Promise<ImmichFileResponse | AssetMediaRedirectResponse> {
await this.requireAccess({ auth, permission: Permission.AssetView, ids: [id] });
@@ -266,10 +267,10 @@ export class AssetMediaService extends BaseService {
});
}
async playbackVideo(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
async playbackVideo(auth: AuthDto, id: string, dto: AssetMediaOptionsDto): Promise<ImmichFileResponse> {
await this.requireAccess({ auth, permission: Permission.AssetView, ids: [id] });
const asset = await this.assetRepository.getForVideo(id);
const asset = await this.assetRepository.getForVideo(id, dto.edited ?? false);
if (!asset) {
throw new NotFoundException('Asset not found or asset is not a video');
+32 -6
View File
@@ -47,6 +47,7 @@ import {
} from 'src/utils/asset.util';
import { updateLockedColumns } from 'src/utils/database';
import { extractTimeZone } from 'src/utils/date';
import { scaleEdits } from 'src/utils/editor';
import { transformOcrBoundingBox } from 'src/utils/transform';
@Injectable()
@@ -565,10 +566,6 @@ export class AssetService extends BaseService {
throw new BadRequestException('Only images can be edited');
}
if (asset.livePhotoVideoId) {
throw new BadRequestException('Editing live photos is not supported');
}
if (isPanorama(asset)) {
throw new BadRequestException('Editing panorama images is not supported');
}
@@ -609,7 +606,28 @@ export class AssetService extends BaseService {
}
const newEdits = await this.assetEditRepository.replaceAll(id, edits);
await this.jobRepository.queue({ name: JobName.AssetEditThumbnailGeneration, data: { id } });
await this.jobRepository.queue({ name: JobName.AssetProcessEdit, data: { id } });
if (asset.livePhotoVideoId) {
const liveAsset = await this.assetRepository.getForEdit(asset.livePhotoVideoId);
if (!liveAsset) {
throw new BadRequestException('Live photo video not found');
}
const { width: liveWidth, height: liveHeight } = getDimensions(liveAsset);
const scaledEdits = scaleEdits(
edits,
{ width: liveWidth, height: liveHeight },
{ width: assetWidth, height: assetHeight },
);
await this.assetEditRepository.replaceAll(asset.livePhotoVideoId, scaledEdits);
await this.jobRepository.queue({
name: JobName.AssetProcessEdit,
data: { id: asset.livePhotoVideoId },
});
}
// Return the asset and its applied edits
return {
@@ -627,6 +645,14 @@ export class AssetService extends BaseService {
}
await this.assetEditRepository.replaceAll(id, []);
await this.jobRepository.queue({ name: JobName.AssetEditThumbnailGeneration, data: { id } });
await this.jobRepository.queue({ name: JobName.AssetProcessEdit, data: { id } });
if (asset.livePhotoVideoId) {
await this.assetEditRepository.replaceAll(asset.livePhotoVideoId, []);
await this.jobRepository.queue({
name: JobName.AssetProcessEdit,
data: { id: asset.livePhotoVideoId },
});
}
}
}
+1 -2
View File
@@ -95,8 +95,7 @@ export class JobService extends BaseService {
}
break;
}
case JobName.AssetEditThumbnailGeneration: {
case JobName.AssetProcessEdit: {
const asset = await this.assetRepository.getById(item.data.id);
const edits = await this.assetEditRepository.getWithSyncInfo(item.data.id);
+323 -29
View File
@@ -221,7 +221,7 @@ describe(MediaService.name, () => {
expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith({ force: false, fullsizeEnabled: false });
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.AssetEditThumbnailGeneration,
name: JobName.AssetProcessEdit,
data: { id: asset.id },
},
]);
@@ -273,7 +273,7 @@ describe(MediaService.name, () => {
data: { id: asset.id },
},
{
name: JobName.AssetEditThumbnailGeneration,
name: JobName.AssetProcessEdit,
data: { id: asset.id },
},
]);
@@ -1321,9 +1321,101 @@ describe(MediaService.name, () => {
expect.stringContaining('fullsize.jpeg'),
);
});
it('should generate edited video thumbnails when asset has edits', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/video.mp4' })
.exif()
.edit({ action: AssetEditAction.Crop, parameters: { height: 500, width: 500, x: 0, y: 0 } })
.build();
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
await sut.handleGenerateThumbnails({ id: asset.id });
// should generate both original and edited thumbnails (2 original + 2 edited transcodes)
expect(mocks.media.transcode).toHaveBeenCalledTimes(4);
// should upsert files for both original and edited
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({ type: AssetFileType.Preview, isEdited: false }),
expect.objectContaining({ type: AssetFileType.Thumbnail, isEdited: false }),
expect.objectContaining({ type: AssetFileType.Preview, isEdited: true }),
expect.objectContaining({ type: AssetFileType.Thumbnail, isEdited: true }),
]),
);
});
it('should not generate edited video thumbnails when asset has no edits', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/video.mp4' }).exif().build();
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.media.generateThumbhash.mockResolvedValue(Buffer.from('thumbhash'));
await sut.handleGenerateThumbnails({ id: asset.id });
// should only generate original thumbnails (2 transcodes for preview + thumbnail)
expect(mocks.media.transcode).toHaveBeenCalledTimes(2);
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith(
expect.not.arrayContaining([expect.objectContaining({ isEdited: true })]),
);
});
it('should use edited thumbhash when asset has edits', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/video.mp4' })
.exif()
.edit({ action: AssetEditAction.Crop })
.build();
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
const originalThumbhash = Buffer.from('original thumbhash');
const editedThumbhash = Buffer.from('edited thumbhash');
mocks.media.generateThumbhash.mockResolvedValueOnce(originalThumbhash).mockResolvedValueOnce(editedThumbhash);
await sut.handleGenerateThumbnails({ id: asset.id });
// should use the edited thumbhash (second call) for the asset update
expect(mocks.asset.update).toHaveBeenCalledWith(expect.objectContaining({ thumbhash: editedThumbhash }));
});
it('should generate edited image thumbnails with edits applied', async () => {
const asset = AssetFactory.from()
.exif()
.edit({ action: AssetEditAction.Crop, parameters: { height: 500, width: 500, x: 100, y: 100 } })
.build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
await sut.handleGenerateThumbnails({ id: asset.id });
// should generate original (2) + edited (3 with fullsize) thumbnails
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({
edits: [
expect.objectContaining({
action: 'crop',
parameters: { height: 500, width: 500, x: 100, y: 100 },
}),
],
}),
expect.stringContaining('edited'),
);
// should upsert both original and edited files
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({ isEdited: false }),
expect.objectContaining({ isEdited: true }),
]),
);
});
});
describe('handleAssetEditThumbnailGeneration', () => {
describe('handleAssetEditProcessing', () => {
let rawInfo: RawImageInfo;
beforeEach(() => {
@@ -1340,14 +1432,6 @@ describe(MediaService.name, () => {
mocks.media.getImageMetadata.mockResolvedValue({ width: 100, height: 100, isTransparent: false });
});
it('should skip videos', async () => {
const asset = AssetFactory.from({ type: AssetType.Video }).exif().build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
await expect(sut.handleAssetEditThumbnailGeneration({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
});
it('should upsert 3 edited files for edit jobs', async () => {
const asset = AssetFactory.from()
.exif()
@@ -1359,13 +1443,13 @@ describe(MediaService.name, () => {
])
.build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
mocks.person.getFaces.mockResolvedValue([]);
mocks.ocr.getByAssetId.mockResolvedValue([]);
await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith(
expect.arrayContaining([
@@ -1381,11 +1465,11 @@ describe(MediaService.name, () => {
.exif()
.edit({ action: AssetEditAction.Crop, parameters: { height: 1152, width: 1512, x: 216, y: 1512 } })
.build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.person.getFaces.mockResolvedValue([]);
mocks.ocr.getByAssetId.mockResolvedValue([]);
await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({
@@ -1409,9 +1493,9 @@ describe(MediaService.name, () => {
{ type: AssetFileType.FullSize, path: 'edited3.jpg', isEdited: true },
])
.build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
const status = await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
const status = await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.FileDelete,
@@ -1427,11 +1511,11 @@ describe(MediaService.name, () => {
it('should generate all 3 edited files if an asset has edits', async () => {
const asset = AssetFactory.from().exif().edit().build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.person.getFaces.mockResolvedValue([]);
mocks.ocr.getByAssetId.mockResolvedValue([]);
await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
@@ -1453,26 +1537,147 @@ describe(MediaService.name, () => {
it('should generate the original thumbhash if no edits exist', async () => {
const asset = AssetFactory.from().exif().build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.media.generateThumbhash.mockResolvedValue(factory.buffer());
await sut.handleAssetEditThumbnailGeneration({ id: asset.id, source: 'upload' });
await sut.handleAssetEditProcessing({ id: asset.id, source: 'upload' });
expect(mocks.media.generateThumbhash).toHaveBeenCalled();
});
it('should apply thumbhash if job source is edit and edits exist', async () => {
const asset = AssetFactory.from().exif().edit().build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
const thumbhashBuffer = factory.buffer();
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
mocks.person.getFaces.mockResolvedValue([]);
mocks.ocr.getByAssetId.mockResolvedValue([]);
await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.asset.update).toHaveBeenCalledWith(expect.objectContaining({ thumbhash: thumbhashBuffer }));
});
it('should return failed if asset not found', async () => {
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(undefined as never);
const status = await sut.handleAssetEditProcessing({ id: 'non-existent' });
expect(status).toBe(JobStatus.Failed);
});
it('should transcode edited video and generate thumbnails', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/video.mp4' })
.exif()
.edit({ action: AssetEditAction.Crop, parameters: { height: 500, width: 500, x: 0, y: 0 } })
.files([
{ type: AssetFileType.Preview, isEdited: false },
{ type: AssetFileType.EncodedVideo, isEdited: true },
{ type: AssetFileType.Preview, isEdited: true },
{ type: AssetFileType.Thumbnail, isEdited: true },
])
.build();
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
await sut.handleAssetEditProcessing({ id: asset.id });
// should transcode the video with hw accel disabled
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/video.mp4',
expect.stringContaining('edited'),
expect.objectContaining({
inputOptions: expect.any(Array),
outputOptions: expect.any(Array),
}),
);
// should generate edited thumbnails (preview + thumbnail via transcode)
expect(mocks.media.transcode).toHaveBeenCalledTimes(3); // 1 video + 2 thumbnails
// should update thumbhash
expect(mocks.asset.update).toHaveBeenCalledWith(expect.objectContaining({ thumbhash: thumbhashBuffer }));
});
it('should clean up edited video files when asset has no edits', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/video.mp4' })
.exif()
.files([
{ type: AssetFileType.EncodedVideo, path: 'edited_video.mp4', isEdited: true },
{ type: AssetFileType.Preview, path: 'edited_preview.jpg', isEdited: true },
{ type: AssetFileType.Thumbnail, path: 'edited_thumbnail.webp', isEdited: true },
])
.build();
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.media.generateThumbhash.mockResolvedValue(factory.buffer());
await sut.handleAssetEditProcessing({ id: asset.id });
// should not transcode since there are no edits
expect(mocks.media.transcode).not.toHaveBeenCalled();
// should delete old edited files
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.FileDelete,
data: {
files: expect.arrayContaining(['edited_video.mp4']),
},
});
});
it('should skip thumbnail generation for hidden video assets (live photo video portions)', async () => {
const asset = AssetFactory.from({
type: AssetType.Video,
originalPath: '/original/video.mp4',
visibility: AssetVisibility.Hidden,
})
.exif()
.edit({ action: AssetEditAction.Crop })
.files([{ type: AssetFileType.Preview, isEdited: false }])
.build();
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.media.transcode).toHaveBeenCalledTimes(1);
expect(mocks.media.generateThumbhash).not.toHaveBeenCalled();
});
it('should use original thumbhash when video has no edits but is visible', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/video.mp4' })
.exif()
.files([{ type: AssetFileType.Preview, path: '/thumbs/preview.jpg', isEdited: false }])
.build();
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
const thumbhashBuffer = factory.buffer();
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.media.generateThumbhash).toHaveBeenCalledWith('/thumbs/preview.jpg', expect.any(Object));
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({ id: asset.id, thumbhash: thumbhashBuffer }),
);
});
it('should update dimensions from transcoded video edit', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/video.mp4' })
.exif()
.edit({ action: AssetEditAction.Crop, parameters: { height: 500, width: 800, x: 100, y: 100 } })
.files([{ type: AssetFileType.Preview, isEdited: false }])
.build();
mocks.assetJob.getForAssetEditProcessing.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.media.generateThumbhash.mockResolvedValue(factory.buffer());
await sut.handleAssetEditProcessing({ id: asset.id });
// should update asset dimensions
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({ id: asset.id, width: 800, height: 500 }),
);
});
});
describe('handleGeneratePersonThumbnail', () => {
@@ -1974,7 +2179,7 @@ describe(MediaService.name, () => {
mocks.media.probe.mockResolvedValue(probeStub.noAudioStreams);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: 'foo' } } as never as SystemConfig);
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
await expect(sut.handleVideoConversion({ id: 'video-id' })).resolves.toBe(JobStatus.Failed);
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -2228,7 +2433,7 @@ describe(MediaService.name, () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: 'invalid' as any } });
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
await expect(sut.handleVideoConversion({ id: 'video-id' })).resolves.toBe(JobStatus.Failed);
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -2626,14 +2831,14 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, targetVideoCodec: VideoCodec.Vp9 },
});
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
await expect(sut.handleVideoConversion({ id: 'video-id' })).resolves.toBe(JobStatus.Failed);
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should fail if hwaccel option is invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: 'invalid' as any } });
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
await expect(sut.handleVideoConversion({ id: 'video-id' })).resolves.toBe(JobStatus.Failed);
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -2920,7 +3125,7 @@ describe(MediaService.name, () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv } });
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
await expect(sut.handleVideoConversion({ id: 'video-id' })).resolves.toBe(JobStatus.Failed);
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -3330,7 +3535,7 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: [], mali: true };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } });
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
await expect(sut.handleVideoConversion({ id: 'video-id' })).resolves.toBe(JobStatus.Failed);
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -3605,6 +3810,95 @@ describe(MediaService.name, () => {
}),
);
});
it('should also transcode edited version when asset has edits', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' })
.edit({ action: AssetEditAction.Crop, parameters: { height: 500, width: 500, x: 0, y: 0 } })
.build();
mocks.assetJob.getForVideoConversion.mockResolvedValue(asset);
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
await sut.handleVideoConversion({ id: asset.id });
// should be called for both original and edited
expect(mocks.media.probe).toHaveBeenCalledTimes(2);
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
expect.stringContaining('edited'),
expect.objectContaining({
inputOptions: expect.any(Array),
outputOptions: expect.any(Array),
}),
);
});
it('should not transcode edited version when asset has no edits', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' }).build();
mocks.assetJob.getForVideoConversion.mockResolvedValue(asset);
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
await sut.handleVideoConversion({ id: asset.id });
// probe is called for both original and edit attempt, but only original is transcoded
expect(mocks.media.transcode).toHaveBeenCalledTimes(1);
expect(mocks.asset.upsertFiles).not.toHaveBeenCalledWith(
expect.arrayContaining([expect.objectContaining({ isEdited: true })]),
);
});
it('should disable hardware acceleration for edited video transcoding', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' })
.edit({ action: AssetEditAction.Crop, parameters: { height: 500, width: 500, x: 0, y: 0 } })
.build();
mocks.assetJob.getForVideoConversion.mockResolvedValue(asset);
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, transcode: TranscodePolicy.All },
});
await sut.handleVideoConversion({ id: asset.id });
// the edited transcode call should NOT have hw accel options
const transcodeCalls = mocks.media.transcode.mock.calls;
const editedCall = transcodeCalls.find((call) => (call[1] as string).includes('edited'));
expect(editedCall).toBeDefined();
// hw accel typically adds device-specific input options; for edited, should be software only
expect(editedCall![2].inputOptions).not.toEqual(expect.arrayContaining([expect.stringContaining('qsv')]));
});
it('should upsert both original and edited encoded video files', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' })
.edit({ action: AssetEditAction.Crop, parameters: { height: 500, width: 500, x: 0, y: 0 } })
.build();
mocks.assetJob.getForVideoConversion.mockResolvedValue(asset);
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
await sut.handleVideoConversion({ id: asset.id });
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({ type: AssetFileType.EncodedVideo, isEdited: false }),
expect.objectContaining({ type: AssetFileType.EncodedVideo, isEdited: true }),
]),
);
});
it('should clean up edited encoded video when edits are removed', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' })
.file({ type: AssetFileType.EncodedVideo, path: '/encoded/edited_video.mp4', isEdited: true })
.build();
mocks.assetJob.getForVideoConversion.mockResolvedValue(asset);
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
await sut.handleVideoConversion({ id: asset.id });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.FileDelete,
data: {
files: expect.arrayContaining(['/encoded/edited_video.mp4']),
},
});
});
});
describe('isSRGB', () => {
+241 -72
View File
@@ -4,7 +4,7 @@ import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { ImagePathOptions, StorageCore, ThumbnailPathEntity } from 'src/cores/storage.core';
import { AssetFile } from 'src/database';
import { OnEvent, OnJob } from 'src/decorators';
import { AssetEditAction, CropParameters } from 'src/dtos/editing.dto';
import { AssetEditAction, AssetEditActionItem, CropParameters } from 'src/dtos/editing.dto';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import {
AssetFileType,
@@ -39,7 +39,7 @@ import {
VideoInterfaces,
VideoStreamInfo,
} from 'src/types';
import { getAssetFile, getDimensions } from 'src/utils/asset.util';
import { getDimensions } from 'src/utils/asset.util';
import { checkFaceVisibility, checkOcrVisibility } from 'src/utils/editor';
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
import { mimeTypes } from 'src/utils/mime-types';
@@ -56,6 +56,13 @@ interface UpsertFileOptions {
}
type ThumbnailAsset = NonNullable<Awaited<ReturnType<AssetJobRepository['getForGenerateThumbnailJob']>>>;
type VideoConversionAsset = NonNullable<Awaited<ReturnType<AssetJobRepository['getForVideoConversion']>>>;
type ThumbnailGenerationResult = {
files: UpsertFileOptions[];
thumbhash: Buffer;
fullsizeDimensions: ImageDimensions;
};
@Injectable()
export class MediaService extends BaseService {
@@ -84,7 +91,7 @@ export class MediaService extends BaseService {
}
if (asset.isEdited) {
jobs.push({ name: JobName.AssetEditThumbnailGeneration, data: { id: asset.id } });
jobs.push({ name: JobName.AssetProcessEdit, data: { id: asset.id } });
}
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
@@ -168,9 +175,9 @@ export class MediaService extends BaseService {
return JobStatus.Success;
}
@OnJob({ name: JobName.AssetEditThumbnailGeneration, queue: QueueName.Editor })
async handleAssetEditThumbnailGeneration({ id }: JobOf<JobName.AssetEditThumbnailGeneration>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForGenerateThumbnailJob(id);
@OnJob({ name: JobName.AssetProcessEdit, queue: QueueName.Editor })
async handleAssetEditProcessing({ id }: JobOf<JobName.AssetProcessEdit>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForAssetEditProcessing(id);
const config = await this.getConfig({ withCache: true });
if (!asset) {
@@ -178,7 +185,25 @@ export class MediaService extends BaseService {
return JobStatus.Failed;
}
const generated = await this.generateEditedThumbnails(asset, config);
switch (asset.type) {
case AssetType.Image: {
await this.handleImageEdit(asset, config);
break;
}
case AssetType.Video: {
await this.handleVideoEdit(asset, config);
break;
}
default: {
this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`);
}
}
return JobStatus.Success;
}
private async handleImageEdit(asset: ThumbnailAsset, config: SystemConfig) {
const generated = await this.generateEditedImageThumbnails(asset, config);
await this.syncFiles(
asset.files.filter((file) => file.isEdited),
generated?.files ?? [],
@@ -203,8 +228,51 @@ export class MediaService extends BaseService {
const fullsizeDimensions = generated?.fullsizeDimensions ?? getDimensions(asset.exifInfo!);
await this.assetRepository.update({ id: asset.id, ...fullsizeDimensions });
}
return JobStatus.Success;
private async handleVideoEdit(asset: ThumbnailAsset, config: SystemConfig) {
// transcode edited video
const generatedVideo = asset.edits.length > 0 ? await this.transcodeVideo(asset, config.ffmpeg, true) : undefined;
await this.syncFiles(
asset.files.filter((file) => file.isEdited && file.type === AssetFileType.EncodedVideo),
generatedVideo ? [generatedVideo.file] : [],
);
// update asset dimensions
const newDimensions = generatedVideo?.dimensions ?? getDimensions(asset.exifInfo!);
await this.assetRepository.update({ id: asset.id, ...newDimensions });
// if the asset is hidden, we dont need to update the thumbhash or thumbnails
if (asset.visibility === AssetVisibility.Hidden) {
return;
}
const editedThumbnails = await this.generateEditedVideoThumbnails(asset, config);
await this.syncFiles(
asset.files.filter((file) => file.isEdited && file.type !== AssetFileType.EncodedVideo),
editedThumbnails?.files ?? [],
);
let thumbhash: Buffer | undefined = editedThumbnails?.thumbhash;
if (!thumbhash) {
const previewFile = asset.files.find((file) => file.type === AssetFileType.Preview && !file.isEdited);
if (!previewFile) {
this.logger.warn(`Failed to generate thumbhash for asset ${asset.id}: missing preview file`);
return;
}
thumbhash = await this.mediaRepository.generateThumbhash(previewFile.path, {
colorspace: config.image.colorspace,
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
});
}
// update asset table info
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, thumbhash) !== 0) {
await this.assetRepository.update({ id: asset.id, thumbhash });
}
}
@OnJob({ name: JobName.AssetGenerateThumbnails, queue: QueueName.ThumbnailGeneration })
@@ -217,31 +285,34 @@ export class MediaService extends BaseService {
return JobStatus.Failed;
}
let generated: ThumbnailGenerationResult;
let generatedEdited: ThumbnailGenerationResult | undefined;
if (asset.visibility === AssetVisibility.Hidden) {
this.logger.verbose(`Thumbnail generation skipped for asset ${id}: not visible`);
return JobStatus.Skipped;
}
let generated: Awaited<ReturnType<MediaService['generateImageThumbnails']>>;
if (asset.type === AssetType.Video || asset.originalFileName.toLowerCase().endsWith('.gif')) {
this.logger.verbose(`Thumbnail generation for video ${id} ${asset.originalPath}`);
generated = await this.generateVideoThumbnails(asset, config);
generatedEdited = await this.generateEditedVideoThumbnails(asset, config);
} else if (asset.type === AssetType.Image) {
this.logger.verbose(`Thumbnail generation for image ${id} ${asset.originalPath}`);
generated = await this.generateImageThumbnails(asset, config);
generatedEdited = await this.generateEditedImageThumbnails(asset, config);
} else {
this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`);
return JobStatus.Skipped;
}
const editedGenerated = await this.generateEditedThumbnails(asset, config);
if (editedGenerated) {
generated.files.push(...editedGenerated.files);
if (generatedEdited) {
generated.files.push(...generatedEdited.files);
}
await this.syncFiles(asset.files, generated.files);
const thumbhash = editedGenerated?.thumbhash || generated.thumbhash;
const thumbhash = generatedEdited?.thumbhash || generated.thumbhash;
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, thumbhash) !== 0) {
await this.assetRepository.update({ id: asset.id, thumbhash });
}
@@ -507,20 +578,21 @@ export class MediaService extends BaseService {
}
private async generateVideoThumbnails(
asset: ThumbnailPathEntity & { originalPath: string },
asset: ThumbnailPathEntity & { originalPath: string; edits: AssetEditActionItem[] },
{ ffmpeg, image }: SystemConfig,
useEdits: boolean = false,
) {
const previewFile = this.getImageFile(asset, {
fileType: AssetFileType.Preview,
format: image.preview.format,
isEdited: false,
isEdited: useEdits,
isProgressive: false,
isTransparent: false,
});
const thumbnailFile = this.getImageFile(asset, {
fileType: AssetFileType.Thumbnail,
format: image.thumbnail.format,
isEdited: false,
isEdited: useEdits,
isProgressive: false,
isTransparent: false,
});
@@ -533,14 +605,27 @@ export class MediaService extends BaseService {
}
const mainAudioStream = this.getMainStream(audioStreams);
let edits: AssetEditActionItem[] | undefined;
if (useEdits) {
ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled };
edits = asset.edits;
}
const previewConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.preview.size.toString() });
const thumbnailConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.thumbnail.size.toString() });
const previewOptions = previewConfig.getCommand(TranscodeTarget.Video, mainVideoStream, mainAudioStream, format);
const previewOptions = previewConfig.getCommand(
TranscodeTarget.Video,
mainVideoStream,
mainAudioStream,
format,
edits,
);
const thumbnailOptions = thumbnailConfig.getCommand(
TranscodeTarget.Video,
mainVideoStream,
mainAudioStream,
format,
edits,
);
await this.mediaRepository.transcode(asset.originalPath, previewFile.path, previewOptions);
@@ -551,73 +636,69 @@ export class MediaService extends BaseService {
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
});
let fullsizeDimensions = { width: mainVideoStream.width, height: mainVideoStream.height };
if (useEdits) {
fullsizeDimensions = getOutputDimensions(asset.edits, fullsizeDimensions);
}
return {
files: [previewFile, thumbnailFile],
thumbhash,
fullsizeDimensions: { width: mainVideoStream.width, height: mainVideoStream.height },
fullsizeDimensions,
};
}
@OnJob({ name: JobName.AssetEncodeVideoQueueAll, queue: QueueName.VideoConversion })
async handleQueueVideoConversion(job: JobOf<JobName.AssetEncodeVideoQueueAll>): Promise<JobStatus> {
const { force } = job;
let queue: { name: JobName.AssetEncodeVideo; data: { id: string } }[] = [];
for await (const asset of this.assetJobRepository.streamForVideoConversion(force)) {
queue.push({ name: JobName.AssetEncodeVideo, data: { id: asset.id } });
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(queue);
queue = [];
}
}
await this.jobRepository.queueAll(queue);
return JobStatus.Success;
}
@OnJob({ name: JobName.AssetEncodeVideo, queue: QueueName.VideoConversion })
async handleVideoConversion({ id }: JobOf<JobName.AssetEncodeVideo>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForVideoConversion(id);
if (!asset) {
return JobStatus.Failed;
}
private async transcodeVideo(
asset: VideoConversionAsset,
ffmpeg: SystemConfigFFmpegDto,
useEdits: boolean = false,
): Promise<{ file: UpsertFileOptions; dimensions: { width: number; height: number } } | undefined> {
const input = asset.originalPath;
const output = StorageCore.getEncodedVideoPath(asset);
const output = StorageCore.getEncodedVideoPath(asset, useEdits);
this.storageCore.ensureFolders(output);
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
countFrames: this.logger.isLevelEnabled(LogLevel.Debug), // makes frame count more reliable for progress logs
countFrames: this.logger.isLevelEnabled(LogLevel.Debug),
});
const videoStream = this.getMainStream(videoStreams);
const audioStream = this.getMainStream(audioStreams);
if (!videoStream || !format.formatName) {
return JobStatus.Failed;
return undefined;
}
if (!videoStream.height || !videoStream.width) {
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
return JobStatus.Failed;
return undefined;
}
let { ffmpeg } = await this.getConfig({ withCache: true });
const target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream);
if (target === TranscodeTarget.None && !this.isRemuxRequired(ffmpeg, format)) {
const encodedVideo = getAssetFile(asset.files, AssetFileType.EncodedVideo, { isEdited: false });
if (encodedVideo) {
this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`);
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files: [encodedVideo.path] } });
await this.assetRepository.deleteFiles([encodedVideo]);
} else {
this.logger.verbose(`Asset ${asset.id} does not require transcoding based on current policy, skipping`);
let target: TranscodeTarget;
let edits: AssetEditActionItem[] | undefined;
if (useEdits) {
if (asset.edits.length === 0) {
this.logger.verbose(`Asset ${asset.id} has no edits, skipping edited version transcoding`);
return undefined;
}
return JobStatus.Skipped;
ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled };
target = TranscodeTarget.All;
edits = asset.edits;
} else {
target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream);
if (target === TranscodeTarget.None && !this.isRemuxRequired(ffmpeg, format)) {
this.logger.verbose(`Asset ${asset.id} does not require transcoding based on current policy, skipping`);
return undefined;
}
}
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(
target,
videoStream,
audioStream,
useEdits ? undefined : format,
edits,
);
if (ffmpeg.accel === TranscodeHardwareAcceleration.Disabled) {
this.logger.log(`Transcoding video ${asset.id} without hardware acceleration`);
} else {
@@ -631,7 +712,7 @@ export class MediaService extends BaseService {
} catch (error: any) {
this.logger.error(`Error occurred during transcoding: ${error.message}`);
if (ffmpeg.accel === TranscodeHardwareAcceleration.Disabled) {
return JobStatus.Failed;
throw error;
}
let partialFallbackSuccess = false;
@@ -639,7 +720,13 @@ export class MediaService extends BaseService {
try {
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and software decoding`);
ffmpeg = { ...ffmpeg, accelDecode: false };
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(
target,
videoStream,
audioStream,
format,
edits,
);
await this.mediaRepository.transcode(input, output, command);
partialFallbackSuccess = true;
} catch (error: any) {
@@ -650,19 +737,87 @@ export class MediaService extends BaseService {
if (!partialFallbackSuccess) {
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()} acceleration disabled`);
ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled };
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(
target,
videoStream,
audioStream,
format,
edits,
);
await this.mediaRepository.transcode(input, output, command);
}
}
this.logger.log(`Successfully encoded ${asset.id}`);
await this.assetRepository.upsertFile({
assetId: asset.id,
type: AssetFileType.EncodedVideo,
path: output,
isEdited: false,
});
let finalDimensions = { width: videoStream.width, height: videoStream.height };
if (useEdits) {
finalDimensions = getOutputDimensions(asset.edits, finalDimensions);
}
return {
dimensions: finalDimensions,
file: {
assetId: asset.id,
type: AssetFileType.EncodedVideo,
path: output,
isEdited: useEdits,
isProgressive: false,
isTransparent: false,
},
};
}
@OnJob({ name: JobName.AssetEncodeVideoQueueAll, queue: QueueName.VideoConversion })
async handleQueueVideoConversion(job: JobOf<JobName.AssetEncodeVideoQueueAll>): Promise<JobStatus> {
const { force } = job;
let jobs: JobItem[] = [];
for await (const asset of this.assetJobRepository.streamForVideoConversion(force)) {
if (force || !asset.isEdited) {
jobs.push({ name: JobName.AssetEncodeVideo, data: { id: asset.id } });
}
if (asset.isEdited) {
jobs.push({ name: JobName.AssetProcessEdit, data: { id: asset.id } });
}
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
}
}
await this.jobRepository.queueAll(jobs);
return JobStatus.Success;
}
@OnJob({ name: JobName.AssetEncodeVideo, queue: QueueName.VideoConversion })
async handleVideoConversion({ id }: JobOf<JobName.AssetEncodeVideo>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForVideoConversion(id);
if (!asset) {
return JobStatus.Failed;
}
const { ffmpeg } = await this.getConfig({ withCache: true });
const files: UpsertFileOptions[] = [];
try {
const generated = await this.transcodeVideo(asset, ffmpeg);
if (generated?.file) {
files.push(generated.file);
}
const editedGenerated = await this.transcodeVideo(asset, ffmpeg, true);
if (editedGenerated) {
files.push(editedGenerated.file);
}
} catch {
return JobStatus.Failed;
}
await this.syncFiles(asset.files, files);
return JobStatus.Success;
}
@@ -874,13 +1029,29 @@ export class MediaService extends BaseService {
}
}
private async generateEditedThumbnails(asset: ThumbnailAsset, config: SystemConfig) {
private async generateEditedImageThumbnails(asset: ThumbnailAsset, config: SystemConfig) {
if (asset.type !== AssetType.Image || (asset.files.length === 0 && asset.edits.length === 0)) {
return;
}
const generated = asset.edits.length > 0 ? await this.generateImageThumbnails(asset, config, true) : undefined;
await this.updateMLVisibilities(asset);
return generated;
}
private async generateEditedVideoThumbnails(asset: ThumbnailAsset, config: SystemConfig) {
if (asset.type !== AssetType.Video || (asset.files.length === 0 && asset.edits.length === 0)) {
return;
}
const generated = asset.edits.length > 0 ? await this.generateVideoThumbnails(asset, config, true) : undefined;
await this.updateMLVisibilities(asset);
return generated;
}
private async updateMLVisibilities(asset: ThumbnailAsset) {
const crop = asset.edits.find((e) => e.action === AssetEditAction.Crop);
const cropBox = crop
? {
@@ -900,8 +1071,6 @@ export class MediaService extends BaseService {
const ocrStatuses = checkOcrVisibility(ocrData, originalDimensions, cropBox);
await this.ocrRepository.updateOcrVisibilities(asset.id, ocrStatuses.visible, ocrStatuses.hidden);
return generated;
}
private warnOnTransparencyLoss(isTransparent: boolean, format: ImageFormat, assetId: string) {
+3 -1
View File
@@ -130,6 +130,7 @@ export interface TranscodeCommand {
progress: {
frameCount: number;
percentInterval: number;
callback: (percent: number, frame: number) => void;
};
}
@@ -151,6 +152,7 @@ export interface VideoCodecSWConfig {
videoStream: VideoStreamInfo,
audioStream: AudioStreamInfo,
format?: VideoFormat,
edits?: AssetEditActionItem[],
): TranscodeCommand;
}
@@ -389,7 +391,7 @@ export type JobItem =
| { name: JobName.WorkflowRun; data: IWorkflowJob }
// Editor
| { name: JobName.AssetEditThumbnailGeneration; data: IEntityJob };
| { name: JobName.AssetProcessEdit; data: IEntityJob };
export type VectorExtension = (typeof VECTOR_EXTENSIONS)[number];
+6 -4
View File
@@ -116,22 +116,24 @@ export function withFaces(eb: ExpressionBuilder<DB, 'asset'>, withHidden?: boole
).as('faces');
}
export function withFiles(eb: ExpressionBuilder<DB, 'asset'>, type?: AssetFileType) {
export function withFiles(eb: ExpressionBuilder<DB, 'asset'>, type?: AssetFileType | AssetFileType[]) {
return jsonArrayFrom(
eb
.selectFrom('asset_file')
.select(columns.assetFiles)
.whereRef('asset_file.assetId', '=', 'asset.id')
.$if(!!type, (qb) => qb.where('asset_file.type', '=', type!)),
.$if(!!type && typeof type === 'string', (qb) => qb.where('asset_file.type', '=', type!))
.$if(!!type && Array.isArray(type), (qb) => qb.where('asset_file.type', 'in', type as AssetFileType[])),
).as('files');
}
export function withFilePath(eb: ExpressionBuilder<DB, 'asset'>, type: AssetFileType) {
export function withFilePath(eb: ExpressionBuilder<DB, 'asset'>, type: AssetFileType, isEdited = false) {
return eb
.selectFrom('asset_file')
.select('asset_file.path')
.whereRef('asset_file.assetId', '=', 'asset.id')
.where('asset_file.type', '=', type);
.where('asset_file.type', '=', type)
.where('asset_file.isEdited', '=', isEdited);
}
export function withFacesAndPeople(
+27
View File
@@ -1,4 +1,5 @@
import { AssetFace } from 'src/database';
import { AssetEditActionItem, CropParameters } from 'src/dtos/editing.dto';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import { ImageDimensions } from 'src/types';
@@ -31,6 +32,15 @@ const scale = (box: BoundingBox, target: ImageDimensions, source?: ImageDimensio
};
};
const scaleCrop = (crop: CropParameters, target: ImageDimensions, source: ImageDimensions) => {
return {
width: Math.round((crop.width / source.width) * target.width),
height: Math.round((crop.height / source.height) * target.height),
x: Math.round((crop.x / source.width) * target.width),
y: Math.round((crop.y / source.height) * target.height),
};
};
export const checkFaceVisibility = (
faces: AssetFace[],
originalAssetDimensions: ImageDimensions,
@@ -105,3 +115,20 @@ export const checkOcrVisibility = (
hidden: status.filter((s) => !s.isVisible).map((s) => s.ocr),
};
};
export const scaleEdits = (
edits: AssetEditActionItem[],
target: ImageDimensions,
source: ImageDimensions,
): AssetEditActionItem[] => {
return edits.map((edit) => {
if (edit.action === 'crop') {
return {
...edit,
parameters: scaleCrop(edit.parameters as CropParameters, target, source),
} as AssetEditActionItem;
}
return edit;
});
};
+95 -12
View File
@@ -1,4 +1,12 @@
import { AUDIO_ENCODER } from 'src/constants';
import {
AssetEditAction,
AssetEditActionItem,
CropParameters,
MirrorAxis,
MirrorParameters,
RotateParameters,
} from 'src/dtos/editing.dto';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import { CQMode, ToneMapping, TranscodeHardwareAcceleration, TranscodeTarget, VideoCodec } from 'src/enum';
import {
@@ -88,15 +96,26 @@ export class BaseConfig implements VideoCodecSWConfig {
videoStream: VideoStreamInfo,
audioStream?: AudioStreamInfo,
format?: VideoFormat,
edits: AssetEditActionItem[] = [],
) {
const inputOptions = this.getBaseInputOptions(videoStream, format);
if (edits.length > 0) {
// turns out MOV files can have cropping metadata that ffmpeg automatically applies when decoding
// this means that the video streams dimensions can just be wrong once it hits the filter pipeline
// https://github.com/FFmpeg/FFmpeg/blob/f40fcf802472227851e0b8eeba40b9e6b3b8a3a1/libavutil/frame.h#L1021
inputOptions.push('-apply_cropping 0');
}
const options = {
inputOptions: this.getBaseInputOptions(videoStream, format),
inputOptions,
outputOptions: [...this.getBaseOutputOptions(target, videoStream, audioStream), '-v verbose'],
twoPass: this.eligibleForTwoPass(),
progress: { frameCount: videoStream.frameCount, percentInterval: 5 },
} as TranscodeCommand;
if ([TranscodeTarget.All, TranscodeTarget.Video].includes(target)) {
const filters = this.getFilterOptions(videoStream);
const filters = this.getFilterOptions(videoStream, edits);
if (filters.length > 0) {
options.outputOptions.push(`-vf ${filters.join(',')}`);
}
@@ -156,10 +175,46 @@ export class BaseConfig implements VideoCodecSWConfig {
return options;
}
getFilterOptions(videoStream: VideoStreamInfo) {
getEditOptions(videoStream: VideoStreamInfo, edits: AssetEditActionItem[]) {
const options = [];
if (this.shouldScale(videoStream)) {
options.push(`scale=${this.getScaling(videoStream)}`);
let currentDimensions = { width: videoStream.width, height: videoStream.height };
// Apply CPU edit operations before hwupload
for (const edit of edits) {
switch (edit.action) {
case AssetEditAction.Crop: {
options.push(this.getCropOperation(edit.parameters));
currentDimensions = { width: edit.parameters.width, height: edit.parameters.height };
break;
}
case AssetEditAction.Rotate: {
const rotateFilter = this.getRotateOperation(edit.parameters);
if (rotateFilter) {
options.push(rotateFilter);
if (Math.abs(edit.parameters.angle) === 90 || Math.abs(edit.parameters.angle) === 270) {
currentDimensions = { width: currentDimensions.height, height: currentDimensions.width };
}
}
break;
}
case AssetEditAction.Mirror: {
options.push(this.getMirrorOperation(edit.parameters));
break;
}
}
}
return { options, currentDimensions };
}
getFilterOptions(videoStream: VideoStreamInfo, edits: AssetEditActionItem[] = []) {
const options = [];
const { options: editOptions, currentDimensions } = this.getEditOptions(videoStream, edits);
options.push(...editOptions);
// Apply scaling based on current dimensions after edits
if (this.shouldScale(videoStream, currentDimensions)) {
options.push(`scale=${this.getScaling(videoStream, 2, currentDimensions)}`);
}
const tonemapOptions = this.getToneMapping(videoStream);
@@ -238,9 +293,10 @@ export class BaseConfig implements VideoCodecSWConfig {
return target;
}
shouldScale(videoStream: VideoStreamInfo) {
const oddDimensions = videoStream.height % 2 !== 0 || videoStream.width % 2 !== 0;
const largerThanTarget = Math.min(videoStream.height, videoStream.width) > this.getTargetResolution(videoStream);
shouldScale(videoStream: VideoStreamInfo, currentDimensions?: { width: number; height: number }) {
const dims = currentDimensions || { width: videoStream.width, height: videoStream.height };
const oddDimensions = dims.height % 2 !== 0 || dims.width % 2 !== 0;
const largerThanTarget = Math.min(dims.height, dims.width) > this.getTargetResolution(videoStream);
return oddDimensions || largerThanTarget;
}
@@ -248,9 +304,11 @@ export class BaseConfig implements VideoCodecSWConfig {
return videoStream.isHDR && this.config.tonemap !== ToneMapping.Disabled;
}
getScaling(videoStream: VideoStreamInfo, mult = 2) {
getScaling(videoStream: VideoStreamInfo, mult = 2, currentDimensions?: { width: number; height: number }) {
const dims = currentDimensions || { width: videoStream.width, height: videoStream.height };
const targetResolution = this.getTargetResolution(videoStream);
return this.isVideoVertical(videoStream) ? `${targetResolution}:-${mult}` : `-${mult}:${targetResolution}`;
const isVertical = dims.height > dims.width || this.isVideoRotated(videoStream);
return isVertical ? `${targetResolution}:-${mult}` : `-${mult}:${targetResolution}`;
}
getSize(videoStream: VideoStreamInfo) {
@@ -329,6 +387,31 @@ export class BaseConfig implements VideoCodecSWConfig {
useCQP() {
return this.config.cqMode === CQMode.Cqp;
}
// Edit operations (software filters)
getCropOperation({ x, y, width, height }: CropParameters): string {
return `crop=${width}:${height}:${x}:${y}`;
}
getRotateOperation({ angle }: RotateParameters): string {
switch (angle) {
case 90: {
return 'transpose=1'; // 90° clockwise
}
case 180: {
return 'hflip,vflip'; // 180°
}
case 270: {
return 'transpose=2'; // 90° counter-clockwise (270° clockwise)
}
}
return '';
}
getMirrorOperation({ axis }: MirrorParameters): string {
return axis === MirrorAxis.Horizontal ? 'hflip' : 'vflip';
}
}
export class BaseHWConfig extends BaseConfig implements VideoCodecHWConfig {
@@ -423,14 +506,14 @@ export class ThumbnailConfig extends BaseConfig {
return ['-fps_mode vfr', '-frames:v 1', '-update 1'];
}
getFilterOptions(videoStream: VideoStreamInfo): string[] {
getFilterOptions(videoStream: VideoStreamInfo, edits: AssetEditActionItem[] = []): string[] {
return [
'fps=12:start_time=0:eof_action=pass:round=down',
'thumbnail=12',
String.raw`select=gt(scene\,0.1)-eq(prev_selected_n\,n)+isnan(prev_selected_n)+gt(n\,20)`,
'trim=end_frame=2',
'reverse',
...super.getFilterOptions(videoStream),
...super.getFilterOptions(videoStream, edits),
];
}
@@ -139,6 +139,7 @@ export class AssetViewerManager extends BaseEventManager<Events> {
openEditor() {
this.closeActivityPanel();
this.isPlayingMotionPhoto = false;
this.isShowEditor = true;
}
@@ -127,7 +127,17 @@ export class EditManager {
try {
// Setup the websocket listener before sending the edit request
const editCompleted = waitForWebsocketEvent('AssetEditReadyV1', (event) => event.asset.id === assetId, 10_000);
const editEvents = [waitForWebsocketEvent('AssetEditReadyV1', (event) => event.asset.id === assetId, 10_000)];
if (this.currentAsset.livePhotoVideoId) {
editEvents.push(
waitForWebsocketEvent(
'AssetEditReadyV1',
(event) => event.asset.id === this.currentAsset!.livePhotoVideoId,
10_000,
),
);
}
await (edits.length === 0
? removeAssetEdits({ id: assetId })
@@ -138,7 +148,7 @@ export class EditManager {
},
}));
await editCompleted;
await Promise.all(editEvents);
eventManager.emit('AssetEditsApplied', assetId);
-1
View File
@@ -242,7 +242,6 @@ export const getAssetActions = ($t: MessageFormatter, asset: AssetResponseDto) =
!sharedLink &&
isOwner &&
asset.type === AssetTypeEnum.Image &&
!asset.livePhotoVideoId &&
asset.exifInfo?.projectionType !== ProjectionType.EQUIRECTANGULAR &&
!asset.originalPath.toLowerCase().endsWith('.insp') &&
!asset.originalPath.toLowerCase().endsWith('.gif') &&
+2 -2
View File
@@ -238,8 +238,8 @@ export const getAssetMediaUrl = (options: AssetUrlOptions) => {
};
export const getAssetPlaybackUrl = (options: AssetUrlOptions) => {
const { id, cacheKey: c } = options;
return createUrl(getAssetPlaybackPath(id), { ...authManager.params, c });
const { id, cacheKey: c, edited = true } = options;
return createUrl(getAssetPlaybackPath(id), { ...authManager.params, c, edited });
};
export const getProfileImageUrl = (user: UserResponseDto) =>