track video metadata

This commit is contained in:
mertalev 2026-04-22 00:46:27 -04:00
parent f0835d06f8
commit f6db3ade4b
No known key found for this signature in database
GPG Key ID: 0603AE056AA39037
26 changed files with 1619 additions and 317 deletions

View File

@ -597,6 +597,127 @@ export enum ExifOrientation {
Rotate270CW = 8,
}
/** ITU-T H.273 colour primaries codes. */
export enum ColorPrimaries {
Reserved = 0,
Bt709 = 1,
Unknown = 2,
Bt470M = 4,
Bt470Bg = 5,
Smpte170M = 6,
Smpte240M = 7,
Film = 8,
Bt2020 = 9,
Smpte428 = 10,
Smpte431 = 11,
Smpte432 = 12,
Ebu3213 = 22,
}
/** ITU-T H.273 transfer characteristics codes. */
export enum ColorTransfer {
Reserved = 0,
Bt709 = 1,
Unknown = 2,
Bt470M = 4,
Bt470Bg = 5,
Smpte170M = 6,
Smpte240M = 7,
Linear = 8,
Log100 = 9,
Log316 = 10,
Iec6196624 = 11,
Bt1361E = 12,
Iec6196621 = 13,
Bt202010 = 14,
Bt202012 = 15,
Smpte2084 = 16,
Smpte428 = 17,
AribStdB67 = 18,
}
/** ITU-T H.273 matrix coefficients codes. */
export enum ColorMatrix {
Gbr = 0,
Bt709 = 1,
Unknown = 2,
Reserved = 3,
Fcc = 4,
Bt470Bg = 5,
Smpte170M = 6,
Smpte240M = 7,
Ycgco = 8,
Bt2020Nc = 9,
Bt2020C = 10,
Smpte2085 = 11,
ChromaDerivedNc = 12,
ChromaDerivedC = 13,
Ictcp = 14,
}
/** H.264 `profile_idc` values. */
export enum H264Profile {
Baseline = 66,
ConstrainedBaseline = 66,
Main = 77,
Extended = 88,
High = 100,
High10 = 110,
High422 = 122,
High444Predictive = 244,
}
/** HEVC `profile_idc` values. */
export enum HevcProfile {
Main = 1,
Main10 = 2,
MainStillPicture = 3,
Rext = 4,
}
/** AV1 `seq_profile` values. */
export enum Av1Profile {
Main = 0,
High = 1,
Professional = 2,
}
/** MPEG-4 Audio Object Type values for AAC. */
export enum AacProfile {
Main = 1,
Lc = 2,
Ssr = 3,
Ltp = 4,
HeAac = 5,
Ld = 23,
HeAacv2 = 29,
Eld = 39,
XheAac = 42,
}
/** Dolby Vision bitstream profile numbers from the DOVI configuration record. */
export enum DvProfile {
Dvhe03 = 3,
Dvhe04 = 4,
Dvhe05 = 5,
Dvhe07 = 7,
Dvhe08 = 8,
Dvav09 = 9,
Dav110 = 10,
}
/**
* Dolby Vision base-layer signal-compatibility ID from the DOVI configuration record.
* Identifies what the base HEVC/AVC layer renders as on a non-DV decoder.
*/
export enum DvSignalCompatibility {
None = 0,
Hdr10 = 1,
Sdr709 = 2,
Hlg = 4,
Sdr2020 = 6,
}
export enum DatabaseExtension {
Cube = 'cube',
EarthDistance = 'earthdistance',

View File

@ -239,10 +239,71 @@ select
"asset_edit"."assetId" = "asset"."id"
) as agg
) as "edits",
to_json("asset_exif") as "exifInfo"
to_json("asset_exif") as "exifInfo",
(
select
to_json(obj)
from
(
select
"asset_video"."index",
"asset_video"."codecName",
"asset_video"."profile",
"asset_video"."level",
"asset_video"."bitrate",
"asset_exif"."exifImageWidth" as "width",
"asset_exif"."exifImageHeight" as "height",
"asset_video"."pixelFormat",
"asset_video"."frameCount",
"asset_exif"."fps" as "frameRate",
"asset_video"."timeBase",
case
when "asset_exif"."orientation" = '6' then -90
when "asset_exif"."orientation" = '8' then 90
when "asset_exif"."orientation" = '3' then 180
else 0
end as "rotation",
"asset_video"."colorPrimaries",
"asset_video"."colorMatrix",
"asset_video"."colorTransfer",
"asset_video"."dvProfile",
"asset_video"."dvLevel",
"asset_video"."dvBlSignalCompatibilityId"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "videoStream",
(
select
to_json(obj)
from
(
select
"asset_video"."formatName",
"asset_video"."formatLongName",
case
when "asset"."duration" ~ '^\d{2}:\d{2}:\d{2}\.\d{3}$' then substr(asset.duration, 1, 2)::int * 3600000 + substr(asset.duration, 4, 2)::int * 60000 + substr(asset.duration, 7, 2)::int * 1000 + substr(asset.duration, 10, 3)::int
else 0
end as "duration",
"asset_video"."bitrate"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "format"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
left join "asset_video" on "asset_video"."assetId" = "asset"."id"
where
"asset"."id" = $4
@ -554,9 +615,87 @@ select
where
"asset_file"."assetId" = "asset"."id"
) as agg
) as "files"
) as "files",
(
select
to_json(obj)
from
(
select
"asset_audio"."index",
"asset_audio"."codecName",
"asset_audio"."profile",
"asset_audio"."bitrate"
from
"asset_audio"
where
"asset_audio"."assetId" = "asset"."id"
) as obj
) as "audioStream",
(
select
to_json(obj)
from
(
select
"asset_video"."index",
"asset_video"."codecName",
"asset_video"."profile",
"asset_video"."level",
"asset_video"."bitrate",
"asset_exif"."exifImageWidth" as "width",
"asset_exif"."exifImageHeight" as "height",
"asset_video"."pixelFormat",
"asset_video"."frameCount",
"asset_exif"."fps" as "frameRate",
"asset_video"."timeBase",
case
when "asset_exif"."orientation" = '6' then -90
when "asset_exif"."orientation" = '8' then 90
when "asset_exif"."orientation" = '3' then 180
else 0
end as "rotation",
"asset_video"."colorPrimaries",
"asset_video"."colorMatrix",
"asset_video"."colorTransfer",
"asset_video"."dvProfile",
"asset_video"."dvLevel",
"asset_video"."dvBlSignalCompatibilityId"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "videoStream",
(
select
to_json(obj)
from
(
select
"asset_video"."formatName",
"asset_video"."formatLongName",
case
when "asset"."duration" ~ '^\d{2}:\d{2}:\d{2}\.\d{3}$' then substr(asset.duration, 1, 2)::int * 3600000 + substr(asset.duration, 4, 2)::int * 60000 + substr(asset.duration, 7, 2)::int * 1000 + substr(asset.duration, 10, 3)::int
else 0
end as "duration",
"asset_video"."bitrate"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "format"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
left join "asset_video" on "asset_video"."assetId" = "asset"."id"
where
"asset"."id" = $1
and "asset"."type" = 'VIDEO'

View File

@ -9,6 +9,7 @@ import { DB } from 'src/schema';
import {
anyUuid,
asUuid,
withAudioVideo,
withDefaultVisibility,
withEdits,
withExif,
@ -134,6 +135,7 @@ export class AssetJobRepository {
)
.select(withEdits)
.$call(withExifInner)
.$call(withAudioVideo)
.where('asset.id', '=', id)
.executeTakeFirst();
}
@ -333,8 +335,10 @@ export class AssetJobRepository {
getForVideoConversion(id: string) {
return this.db
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.select(['asset.id', 'asset.ownerId', 'asset.originalPath'])
.select(withFiles)
.$call((qb) => withAudioVideo(qb, true))
.where('asset.id', '=', id)
.where('asset.type', '=', sql.lit(AssetType.Video))
.executeTakeFirst();

View File

@ -19,6 +19,7 @@ import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetFileType, AssetOrder, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
import { DB } from 'src/schema';
import { AssetAudioTable, AssetKeyframeTable, AssetVideoTable } from 'src/schema/tables/asset-av.table';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { AssetFileTable } from 'src/schema/tables/asset-file.table';
import { AssetJobStatusTable } from 'src/schema/tables/asset-job-status.table';
@ -124,6 +125,14 @@ interface GetByIdsRelations {
edits?: boolean;
}
type UpsertExifOptions = {
exif: Insertable<AssetExifTable>;
audio?: Insertable<AssetAudioTable>;
video?: Insertable<AssetVideoTable>;
keyframes?: Insertable<AssetKeyframeTable>;
lockedPropertiesBehavior: 'override' | 'append' | 'skip';
};
const distinctLocked = <T extends LockableProperty[] | null>(eb: ExpressionBuilder<DB, 'asset_exif'>, columns: T) =>
sql<T>`nullif(array(select distinct unnest(${eb.ref('asset_exif.lockedProperties')} || ${columns})), '{}')`;
@ -161,15 +170,76 @@ export class AssetRepository {
@GenerateSql({
params: [
{ dateTimeOriginal: DummyValue.DATE, lockedProperties: ['dateTimeOriginal'] },
{ lockedPropertiesBehavior: 'append' },
{
exif: { dateTimeOriginal: DummyValue.DATE, lockedProperties: ['dateTimeOriginal'] },
lockedPropertiesBehavior: 'append',
},
],
})
async upsertExif(
exif: Insertable<AssetExifTable>,
{ lockedPropertiesBehavior }: { lockedPropertiesBehavior: 'override' | 'append' | 'skip' },
): Promise<void> {
await this.db
async upsertExif({ exif, audio, video, keyframes, lockedPropertiesBehavior }: UpsertExifOptions): Promise<void> {
let query = this.db;
if (audio) {
(query as any) = this.db.with('audio', (qb) =>
qb
.insertInto('asset_audio')
.values(audio)
.onConflict((oc) =>
oc.column('assetId').doUpdateSet(({ ref }) => ({
bitrate: ref('asset_audio.bitrate'),
index: ref('asset_audio.index'),
profile: ref('asset_audio.profile'),
codecName: ref('asset_audio.codecName'),
})),
),
);
}
if (video) {
(query as any) = query.with('video', (qb) =>
qb
.insertInto('asset_video')
.values(video)
.onConflict((oc) =>
oc.column('assetId').doUpdateSet(({ ref }) => ({
bitrate: ref('asset_video.bitrate'),
timeBase: ref('asset_video.timeBase'),
index: ref('asset_video.index'),
profile: ref('asset_video.profile'),
level: ref('asset_video.level'),
colorPrimaries: ref('asset_video.colorPrimaries'),
colorTransfer: ref('asset_video.colorTransfer'),
colorMatrix: ref('asset_video.colorMatrix'),
dvProfile: ref('asset_video.dvProfile'),
dvLevel: ref('asset_video.dvLevel'),
dvBlSignalCompatibilityId: ref('asset_video.dvBlSignalCompatibilityId'),
codecName: ref('asset_video.codecName'),
formatName: ref('asset_video.formatName'),
formatLongName: ref('asset_video.formatLongName'),
pixelFormat: ref('asset_video.pixelFormat'),
})),
),
);
}
if (keyframes) {
(query as any) = query.with('keyframe', (qb) =>
qb
.insertInto('asset_keyframe')
.values(keyframes)
.onConflict((oc) =>
oc.column('assetId').doUpdateSet(({ ref }) => ({
pts: ref('asset_keyframe.pts'),
accDuration: ref('asset_keyframe.accDuration'),
ownDuration: ref('asset_keyframe.ownDuration'),
totalDuration: ref('asset_keyframe.totalDuration'),
packetCount: ref('asset_keyframe.packetCount'),
outputFrames: ref('asset_keyframe.outputFrames'),
})),
),
);
}
await query
.insertInto('asset_exif')
.values(exif)
.onConflict((oc) =>

View File

@ -1,14 +1,30 @@
import { Injectable } from '@nestjs/common';
import { ExifDateTime, exiftool, WriteTags } from 'exiftool-vendored';
import ffmpeg, { FfprobeData } from 'fluent-ffmpeg';
import ffmpeg, { FfprobeData, FfprobeStream } from 'fluent-ffmpeg';
import _ from 'lodash';
import { Duration } from 'luxon';
import { execFile as execFileCb } from 'node:child_process';
import fs from 'node:fs/promises';
import { Writable } from 'node:stream';
import { promisify } from 'node:util';
import sharp from 'sharp';
import { ORIENTATION_TO_SHARP_ROTATION } from 'src/constants';
import { Exif } from 'src/database';
import { AssetEditActionItem } from 'src/dtos/editing.dto';
import { Colorspace, LogLevel, RawExtractedFormat } from 'src/enum';
import {
AacProfile,
Av1Profile,
ColorMatrix,
ColorPrimaries,
Colorspace,
ColorTransfer,
DvProfile,
DvSignalCompatibility,
H264Profile,
HevcProfile,
LogLevel,
RawExtractedFormat,
} from 'src/enum';
import { LoggingRepository } from 'src/repositories/logging.repository';
import {
DecodeToBufferOptions,
@ -18,6 +34,7 @@ import {
ProbeOptions,
TranscodeCommand,
VideoInfo,
VideoPacketInfo,
} from 'src/types';
import { handlePromiseError } from 'src/utils/misc';
import { createAffineMatrix } from 'src/utils/transform';
@ -26,9 +43,14 @@ const probe = (input: string, options: string[]): Promise<FfprobeData> =>
new Promise((resolve, reject) =>
ffmpeg.ffprobe(input, options, (error, data) => (error ? reject(error) : resolve(data))),
);
const execFile = promisify(execFileCb);
sharp.concurrency(0);
sharp.cache({ files: 0 });
const pascalCase = (str: string) => _.upperFirst(_.camelCase(str.toLowerCase()));
type ProgressEvent = {
frames: number;
currentFps: number;
@ -244,6 +266,7 @@ export class MediaRepository {
},
videoStreams: results.streams
.filter((stream) => stream.codec_type === 'video' && !stream.disposition?.attached_pic)
.sort((a, b) => this.compareStreams(a, b))
.map((stream) => {
const height = this.parseInt(stream.height);
const dar = this.getDar(stream.display_aspect_ratio);
@ -252,28 +275,111 @@ export class MediaRepository {
height,
width: dar ? Math.round(height * dar) : this.parseInt(stream.width),
codecName: stream.codec_name === 'h265' ? 'hevc' : stream.codec_name,
codecType: stream.codec_type,
profile: this.parseVideoProfile(stream.codec_name, stream.profile as string | undefined),
level: this.parseOptionalInt(stream.level),
frameCount: this.parseInt(options?.countFrames ? stream.nb_read_packets : stream.nb_frames),
frameRate: this.parseFrameRate(stream.r_frame_rate ?? stream.avg_frame_rate),
timeBase: this.parseRational(stream.time_base)?.den,
rotation: this.parseInt(stream.rotation),
isHDR: stream.color_transfer === 'smpte2084' || stream.color_transfer === 'arib-std-b67',
bitrate: this.parseInt(stream.bit_rate),
pixelFormat: stream.pix_fmt || 'yuv420p',
colorPrimaries: stream.color_primaries,
colorSpace: stream.color_space,
colorTransfer: stream.color_transfer,
colorPrimaries: this.parseEnum(ColorPrimaries, stream.color_primaries) ?? ColorPrimaries.Unknown,
colorMatrix: this.parseEnum(ColorMatrix, stream.color_space) ?? ColorMatrix.Unknown,
colorTransfer: this.parseEnum(ColorTransfer, stream.color_transfer) ?? ColorTransfer.Unknown,
dvProfile: this.parseOptionalInt(stream.dv_profile) as DvProfile | undefined,
dvLevel: this.parseOptionalInt(stream.dv_level),
dvBlSignalCompatibilityId: this.parseOptionalInt(stream.dv_bl_signal_compatibility_id) as
| DvSignalCompatibility
| undefined,
};
}),
audioStreams: results.streams
.filter((stream) => stream.codec_type === 'audio')
.sort((a, b) => this.compareStreams(a, b))
.map((stream) => ({
index: stream.index,
codecType: stream.codec_type,
codecName: stream.codec_name,
profile:
stream.codec_name === 'aac' ? this.parseEnum(AacProfile, stream.profile as string | undefined) : undefined,
bitrate: this.parseInt(stream.bit_rate),
})),
};
}
/**
* Needed for accurate segments, especially when remuxing, seeking and/or VFR is involved.
* Scanning packets for keyframes in JS is much faster than -skip_frame nokey since it avoids decoding the video.
*/
async probePackets(
input: string,
streamIndex: number,
timeBase: number,
formatDuration: number,
): Promise<VideoPacketInfo> {
const { stdout } = await execFile('ffprobe', [
'-v',
'error',
'-select_streams',
String(streamIndex),
'-show_entries',
'packet=pts,duration,flags',
'-of',
'csv=p=0',
input,
]);
let totalDuration = 0;
let packetCount = 0;
const keyframePts: number[] = [];
const keyframeAccDuration: number[] = [];
const keyframeOwnDuration: number[] = [];
const postDiscard: { pts: number; duration: number }[] = [];
for (const line of stdout.split('\n')) {
if (!line) {
continue;
}
const [ptsStr, durationStr, flags] = line.split(',');
const pts = Number.parseInt(ptsStr);
const duration = Number.parseInt(durationStr);
if (Number.isNaN(pts) || Number.isNaN(duration)) {
continue;
}
totalDuration += duration;
if (flags[1] !== 'D') {
packetCount++;
postDiscard.push({ pts, duration });
}
if (flags[0] === 'K') {
keyframePts.push(pts);
keyframeAccDuration.push(totalDuration);
keyframeOwnDuration.push(duration);
}
}
let outputFrames = 0;
if (packetCount > 0 && formatDuration > 0) {
postDiscard.sort((a, b) => a.pts - b.pts);
const firstPts = postDiscard[0].pts;
const slotsPerTick = packetCount / formatDuration / timeBase;
let nextPts = 0;
for (const pkt of postDiscard) {
const delta = (pkt.pts - firstPts) * slotsPerTick - nextPts + pkt.duration * slotsPerTick;
const nb = delta < -1.1 ? 0 : delta > 1.1 ? Math.round(delta) : 1;
outputFrames += nb;
nextPts += nb;
}
}
return {
totalDuration,
packetCount,
outputFrames,
keyframePts,
keyframeAccDuration,
keyframeOwnDuration,
};
}
transcode(input: string, output: string | Writable, options: TranscodeCommand): Promise<void> {
if (!options.twoPass) {
return new Promise((resolve, reject) => {
@ -356,6 +462,31 @@ export class MediaRepository {
return Number.parseFloat(value as string) || 0;
}
private parseOptionalInt(value: string | number | undefined): number | undefined {
const parsed = Number.parseInt(value as string);
return Number.isNaN(parsed) ? undefined : parsed;
}
private parseEnum<E extends Record<string, number | string>>(enumObj: E, value?: string) {
return value ? (enumObj[pascalCase(value)] as Extract<E[keyof E], number> | undefined) : undefined;
}
/** Parse a rational like "60000/1001" or "1/600" into `{ num, den }`. */
private parseRational(value: string | undefined): { num: number; den: number } | undefined {
if (!value) {
return;
}
const [num, den = 1] = value.split('/').map(Number);
if (num && den) {
return { num, den };
}
}
private parseFrameRate(value: string | undefined): number | undefined {
const r = this.parseRational(value);
return r ? r.num / r.den : undefined;
}
private getDar(dar: string | undefined): number {
if (dar) {
const [darW, darH] = dar.split(':').map(Number);
@ -366,4 +497,27 @@ export class MediaRepository {
return 0;
}
private parseVideoProfile(codec?: string, profile?: string) {
switch (codec) {
case 'h264': {
return this.parseEnum(H264Profile, profile);
}
case 'h265':
case 'hevc': {
return this.parseEnum(HevcProfile, profile);
}
case 'av1': {
return this.parseEnum(Av1Profile, profile);
}
}
}
private compareStreams(a: FfprobeStream, b: FfprobeStream): number {
const d = (b.disposition?.default ?? 0) - (a.disposition?.default ?? 0);
if (d !== 0) {
return d;
}
return this.parseInt(b.bit_rate) - this.parseInt(a.bit_rate);
}
}

View File

@ -33,6 +33,7 @@ import { AlbumUserTable } from 'src/schema/tables/album-user.table';
import { AlbumTable } from 'src/schema/tables/album.table';
import { ApiKeyTable } from 'src/schema/tables/api-key.table';
import { AssetAuditTable } from 'src/schema/tables/asset-audit.table';
import { AssetAudioTable, AssetKeyframeTable, AssetVideoTable } from 'src/schema/tables/asset-av.table';
import { AssetEditAuditTable } from 'src/schema/tables/asset-edit-audit.table';
import { AssetEditTable } from 'src/schema/tables/asset-edit.table';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
@ -196,6 +197,9 @@ export interface DB {
asset_metadata_audit: AssetMetadataAuditTable;
asset_job_status: AssetJobStatusTable;
asset_ocr: AssetOcrTable;
asset_audio: AssetAudioTable;
asset_video: AssetVideoTable;
asset_keyframe: AssetKeyframeTable;
ocr_search: OcrSearchTable;
face_search: FaceSearchTable;

View File

@ -0,0 +1,51 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE TABLE "asset_audio" (
"assetId" uuid NOT NULL,
"bitrate" integer NOT NULL,
"index" smallint NOT NULL,
"profile" smallint,
"codecName" text NOT NULL,
CONSTRAINT "asset_audio_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE NO ACTION ON DELETE CASCADE,
CONSTRAINT "asset_audio_pkey" PRIMARY KEY ("assetId")
);`.execute(db);
await sql`CREATE TABLE "asset_video" (
"assetId" uuid NOT NULL,
"bitrate" integer NOT NULL,
"frameCount" integer NOT NULL,
"timeBase" integer,
"index" smallint NOT NULL,
"profile" smallint,
"level" smallint,
"colorPrimaries" smallint NOT NULL,
"colorTransfer" smallint NOT NULL,
"colorMatrix" smallint NOT NULL,
"dvProfile" smallint,
"dvLevel" smallint,
"dvBlSignalCompatibilityId" smallint,
"codecName" text NOT NULL,
"formatName" text NOT NULL,
"formatLongName" text NOT NULL,
"pixelFormat" text NOT NULL,
CONSTRAINT "asset_video_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE NO ACTION ON DELETE CASCADE,
CONSTRAINT "asset_video_pkey" PRIMARY KEY ("assetId")
);`.execute(db);
await sql`CREATE TABLE "asset_keyframe" (
"assetId" uuid NOT NULL,
"pts" integer[] NOT NULL,
"accDuration" integer[] NOT NULL,
"ownDuration" integer[] NOT NULL,
"totalDuration" integer NOT NULL,
"packetCount" integer NOT NULL,
"outputFrames" integer NOT NULL,
CONSTRAINT "asset_keyframe_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE NO ACTION ON DELETE CASCADE,
CONSTRAINT "asset_keyframe_pkey" PRIMARY KEY ("assetId")
);`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`DROP TABLE "asset_audio";`.execute(db);
await sql`DROP TABLE "asset_video";`.execute(db);
await sql`DROP TABLE "asset_keyframe";`.execute(db);
}

View File

@ -0,0 +1,104 @@
import {
Column,
ForeignKeyColumn,
Table
} from '@immich/sql-tools';
import { AssetTable } from 'src/schema/tables/asset.table';
const smallint = 'smallint' as 'integer';
@Table('asset_audio')
export class AssetAudioTable {
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', primary: true })
assetId!: string;
@Column({ type: 'integer' })
bitrate!: number;
@Column({ type: smallint })
index!: number;
@Column({ type: smallint, nullable: true })
profile!: number | null;
@Column({ type: 'text' })
codecName!: string;
}
@Table('asset_video')
export class AssetVideoTable {
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', primary: true })
assetId!: string;
@Column({ type: 'integer' })
bitrate!: number;
@Column({ type: 'integer' })
frameCount!: number;
@Column({ type: 'integer', nullable: true })
timeBase!: number | null;
@Column({ type: smallint })
index!: number;
@Column({ type: smallint, nullable: true })
profile!: number | null;
@Column({ type: smallint, nullable: true })
level!: number | null;
@Column({ type: smallint })
colorPrimaries!: number;
@Column({ type: smallint })
colorTransfer!: number;
@Column({ type: smallint })
colorMatrix!: number;
@Column({ type: smallint, nullable: true })
dvProfile!: number | null;
@Column({ type: smallint, nullable: true })
dvLevel!: number | null;
@Column({ type: smallint, nullable: true })
dvBlSignalCompatibilityId!: number | null;
@Column({ type: 'text' })
codecName!: string;
@Column({ type: 'text' })
formatName!: string;
@Column({ type: 'text' })
formatLongName!: string;
@Column({ type: 'text' })
pixelFormat!: string;
}
@Table('asset_keyframe')
export class AssetKeyframeTable {
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', primary: true })
assetId!: string;
@Column({ type: 'integer', array: true })
pts!: number[];
@Column({ type: 'integer', array: true })
accDuration!: number[];
@Column({ type: 'integer', array: true })
ownDuration!: number[];
@Column({ type: 'integer' })
totalDuration!: number;
@Column({ type: 'integer' })
packetCount!: number;
@Column({ type: 'integer' })
outputFrames!: number;
}

View File

@ -351,10 +351,10 @@ export class AssetMediaService extends BaseService {
await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt));
}
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
await this.assetRepository.upsertExif(
{ assetId: asset.id, fileSizeInByte: file.size },
{ lockedPropertiesBehavior: 'override' },
);
await this.assetRepository.upsertExif({
exif: { assetId: asset.id, fileSizeInByte: file.size },
lockedPropertiesBehavior: 'override',
});
await this.eventRepository.emit('AssetCreate', { asset });

View File

@ -187,8 +187,10 @@ describe(AssetService.name, () => {
await sut.update(authStub.admin, asset.id, { description: 'Test description' });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: asset.id, description: 'Test description', lockedProperties: ['description'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: asset.id, description: 'Test description', lockedProperties: ['description'] },
lockedPropertiesBehavior: 'append',
}),
);
});
@ -201,12 +203,14 @@ describe(AssetService.name, () => {
await sut.update(authStub.admin, asset.id, { rating: 3 });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{
expect.objectContaining({
exif: {
assetId: asset.id,
rating: 3,
lockedProperties: ['rating'],
},
{ lockedPropertiesBehavior: 'append' },
lockedPropertiesBehavior: 'append',
}),
);
});

View File

@ -517,13 +517,13 @@ export class AssetService extends BaseService {
);
if (Object.keys(writes).length > 0) {
await this.assetRepository.upsertExif(
updateLockedColumns({
await this.assetRepository.upsertExif({
exif: updateLockedColumns({
assetId: id,
...writes,
}),
{ lockedPropertiesBehavior: 'append' },
);
lockedPropertiesBehavior: 'append',
});
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id } });
}
}

View File

@ -21,7 +21,7 @@ import {
VideoCodec,
} from 'src/enum';
import { MediaService } from 'src/services/media.service';
import { JobCounts, RawImageInfo } from 'src/types';
import { AudioStreamInfo, JobCounts, RawImageInfo, VideoFormat, VideoStreamInfo } from 'src/types';
import { AssetFaceFactory } from 'test/factories/asset-face.factory';
import { AssetFactory } from 'test/factories/asset.factory';
import { PersonFactory } from 'test/factories/person.factory';
@ -375,15 +375,16 @@ describe(MediaService.name, () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
await expect(sut.handleGenerateThumbnails({ id: asset.id })).resolves.toBe(JobStatus.Skipped);
expect(mocks.media.probe).not.toHaveBeenCalled();
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
expect(mocks.asset.update).not.toHaveBeenCalledWith();
});
it('should skip video thumbnail generation if no video stream', async () => {
const asset = AssetFactory.from({ type: AssetType.Video }).exif().build();
mocks.media.probe.mockResolvedValue(probeStub.noVideoStreams);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...getForGenerateThumbnail(asset),
...probeStub.noVideoStreams,
});
await expect(sut.handleGenerateThumbnails({ id: asset.id })).rejects.toThrowError();
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
expect(mocks.asset.update).not.toHaveBeenCalledWith();
@ -495,8 +496,10 @@ describe(MediaService.name, () => {
it('should generate a thumbnail for a video', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' }).exif().build();
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...getForGenerateThumbnail(asset),
...probeStub.videoStream2160p,
});
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.storage.mkdirSync).toHaveBeenCalledWith(expect.any(String));
@ -542,8 +545,10 @@ describe(MediaService.name, () => {
it('should tonemap thumbnail for hdr video', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' }).exif().build();
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...getForGenerateThumbnail(asset),
...probeStub.videoStreamHDR,
});
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.storage.mkdirSync).toHaveBeenCalledWith(expect.any(String));
@ -589,11 +594,13 @@ describe(MediaService.name, () => {
it('should always generate video thumbnail in one pass', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' }).exif().build();
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { twoPass: true, maxBitrate: '5000k' },
});
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...getForGenerateThumbnail(asset),
...probeStub.videoStreamHDR,
});
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -620,8 +627,10 @@ describe(MediaService.name, () => {
it('should not skip intra frames for MTS file', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' }).exif().build();
mocks.media.probe.mockResolvedValue(probeStub.videoStreamMTS);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...getForGenerateThumbnail(asset),
...probeStub.videoStreamMTS,
});
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -638,8 +647,10 @@ describe(MediaService.name, () => {
it('should override reserved color metadata', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' }).exif().build();
mocks.media.probe.mockResolvedValue(probeStub.videoStreamReserved);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...getForGenerateThumbnail(asset),
...probeStub.videoStreamReserved,
});
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -659,9 +670,11 @@ describe(MediaService.name, () => {
it('should use scaling divisible by 2 even when using quick sync', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' }).exif().build();
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...getForGenerateThumbnail(asset),
...probeStub.videoStream2160p,
});
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -855,11 +868,13 @@ describe(MediaService.name, () => {
it('should never set isProgressive for videos', async () => {
const asset = AssetFactory.from({ type: AssetType.Video, originalPath: '/original/path.ext' }).exif().build();
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.systemMetadata.get.mockResolvedValue({
image: { preview: { progressive: true }, thumbnail: { progressive: true } },
});
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...getForGenerateThumbnail(asset),
...probeStub.videoStreamHDR,
});
await sut.handleGenerateThumbnails({ id: asset.id });
@ -1921,26 +1936,33 @@ describe(MediaService.name, () => {
});
describe('handleVideoConversion', () => {
let asset: ReturnType<typeof AssetFactory.create> & {
videoStream: VideoStreamInfo | null;
audioStream: AudioStreamInfo | null;
format: VideoFormat | null;
};
beforeEach(() => {
const asset = AssetFactory.create({ id: 'video-id', type: AssetType.Video, originalPath: '/original/path.ext' });
asset = {
...AssetFactory.create({ id: 'video-id', type: AssetType.Video, originalPath: '/original/path.ext' }),
videoStream: null,
audioStream: null,
format: null,
};
mocks.assetJob.getForVideoConversion.mockResolvedValue(asset);
sut.videoInterfaces = { dri: ['renderD128'], mali: true };
});
it('should skip transcoding if asset not found', async () => {
mocks.assetJob.getForVideoConversion.mockResolvedValue(void 0);
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.probe).not.toHaveBeenCalled();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should transcode the highest bitrate video stream', async () => {
mocks.logger.isLevelEnabled.mockReturnValue(false);
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.multipleVideoStreams });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.probe).toHaveBeenCalledWith('/original/path.ext', { countFrames: false });
expect(mocks.systemMetadata.get).toHaveBeenCalled();
expect(mocks.storage.mkdirSync).toHaveBeenCalled();
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -1956,11 +1978,10 @@ describe(MediaService.name, () => {
it('should transcode the highest bitrate audio stream', async () => {
mocks.logger.isLevelEnabled.mockReturnValue(false);
mocks.media.probe.mockResolvedValue(probeStub.multipleAudioStreams);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.multipleAudioStreams });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.probe).toHaveBeenCalledWith('/original/path.ext', { countFrames: false });
expect(mocks.systemMetadata.get).toHaveBeenCalled();
expect(mocks.storage.mkdirSync).toHaveBeenCalled();
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -1975,19 +1996,19 @@ describe(MediaService.name, () => {
});
it('should skip a video without any streams', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noVideoStreams);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.noVideoStreams });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should skip a video without any height', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noHeight);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.noHeight });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should throw an error if an unknown transcode policy is configured', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noAudioStreams);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.noAudioStreams });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: 'foo' } } as never as SystemConfig);
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
@ -1995,7 +2016,7 @@ describe(MediaService.name, () => {
});
it('should throw an error if transcoding fails and hw acceleration is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.multipleVideoStreams });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.All, accel: TranscodeHardwareAcceleration.Disabled },
});
@ -2006,7 +2027,7 @@ describe(MediaService.name, () => {
});
it('should transcode when set to all', async () => {
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.multipleVideoStreams });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.All } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2021,7 +2042,7 @@ describe(MediaService.name, () => {
});
it('should transcode when optimal and too big', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream2160p });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Optimal } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2036,14 +2057,14 @@ describe(MediaService.name, () => {
});
it('should not transcode when policy bitrate and bitrate lower than max bitrate', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream40Mbps);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream40Mbps });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Bitrate, maxBitrate: '50M' } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should transcode when policy bitrate and bitrate higher than max bitrate', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream40Mbps);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream40Mbps });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Bitrate, maxBitrate: '30M' } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2058,21 +2079,21 @@ describe(MediaService.name, () => {
});
it('should not transcode when max bitrate is not a number', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream40Mbps);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream40Mbps });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Bitrate, maxBitrate: 'foo' } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should not transcode when max bitrate is 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream40Mbps);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream40Mbps });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Bitrate, maxBitrate: '0' } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should not scale resolution if no target resolution', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream2160p });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.All, targetResolution: 'original' },
});
@ -2089,7 +2110,7 @@ describe(MediaService.name, () => {
});
it('should scale horizontally when video is horizontal', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream2160p });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Optimal } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2104,7 +2125,7 @@ describe(MediaService.name, () => {
});
it('should scale vertically when video is vertical', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamVertical2160p });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Optimal } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2119,7 +2140,7 @@ describe(MediaService.name, () => {
});
it('should always scale video if height is uneven', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamOddHeight);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamOddHeight });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.All, targetResolution: 'original' },
});
@ -2136,7 +2157,7 @@ describe(MediaService.name, () => {
});
it('should always scale video if width is uneven', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamOddWidth);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamOddWidth });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.All, targetResolution: 'original' },
});
@ -2153,7 +2174,7 @@ describe(MediaService.name, () => {
});
it('should copy video stream when video matches target', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { targetVideoCodec: VideoCodec.Hevc, acceptedAudioCodecs: [AudioCodec.Aac] },
});
@ -2170,7 +2191,7 @@ describe(MediaService.name, () => {
});
it('should not include hevc tag when target is hevc and video stream is copied from a different codec', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamH264);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamH264 });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: {
targetVideoCodec: VideoCodec.Hevc,
@ -2191,7 +2212,7 @@ describe(MediaService.name, () => {
});
it('should include hevc tag when target is hevc and copying hevc video stream', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: {
targetVideoCodec: VideoCodec.Hevc,
@ -2212,7 +2233,7 @@ describe(MediaService.name, () => {
});
it('should copy audio stream when audio matches target', async () => {
mocks.media.probe.mockResolvedValue(probeStub.audioStreamAac);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.audioStreamAac });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Optimal } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2227,7 +2248,7 @@ describe(MediaService.name, () => {
});
it('should remux when input is not an accepted container', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamAvi);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamAvi });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@ -2241,7 +2262,7 @@ describe(MediaService.name, () => {
});
it('should throw an exception if transcode value is invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream2160p });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: 'invalid' as any } });
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
@ -2249,35 +2270,34 @@ describe(MediaService.name, () => {
});
it('should not transcode if transcoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream2160p });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Disabled } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should not remux when input is not an accepted container and transcoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Disabled } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should not transcode if target codec is invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream2160p });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: 'invalid' as any } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should delete existing transcode if current policy does not require transcoding', async () => {
const asset = AssetFactory.from({ type: AssetType.Video })
const localAsset = AssetFactory.from({ type: AssetType.Video })
.file({ type: AssetFileType.EncodedVideo, path: '/encoded/video/path.mp4' })
.build();
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Disabled } });
mocks.assetJob.getForVideoConversion.mockResolvedValue(asset);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...localAsset, ...probeStub.videoStream2160p });
await sut.handleVideoConversion({ id: asset.id });
await sut.handleVideoConversion({ id: localAsset.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
expect(mocks.job.queue).toHaveBeenCalledWith({
@ -2287,7 +2307,7 @@ describe(MediaService.name, () => {
});
it('should set max bitrate if above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { maxBitrate: '4500k' } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2302,7 +2322,7 @@ describe(MediaService.name, () => {
});
it('should default max bitrate to kbps if no unit is provided', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { maxBitrate: '4500' } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2317,7 +2337,7 @@ describe(MediaService.name, () => {
});
it('should transcode in two passes for h264/h265 when enabled and max bitrate is above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { twoPass: true, maxBitrate: '4500k' } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2341,7 +2361,7 @@ describe(MediaService.name, () => {
});
it('should fallback to one pass for h264/h265 if two-pass is enabled but no max bitrate is set', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { twoPass: true } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2356,7 +2376,7 @@ describe(MediaService.name, () => {
});
it('should transcode by bitrate in two passes for vp9 when two pass mode and max bitrate are enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: {
maxBitrate: '4500k',
@ -2377,7 +2397,7 @@ describe(MediaService.name, () => {
});
it('should transcode by crf in two passes for vp9 when two pass mode is enabled and max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: {
maxBitrate: '0',
@ -2398,7 +2418,7 @@ describe(MediaService.name, () => {
});
it('should configure preset for vp9', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Vp9, preset: 'slow' } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2413,7 +2433,7 @@ describe(MediaService.name, () => {
});
it('should not configure preset for vp9 if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { preset: 'invalid', targetVideoCodec: VideoCodec.Vp9 } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2428,7 +2448,7 @@ describe(MediaService.name, () => {
});
it('should configure threads if above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Vp9, threads: 2 } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2443,7 +2463,7 @@ describe(MediaService.name, () => {
});
it('should disable thread pooling for h264 if thread limit is 1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 1 } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2458,7 +2478,7 @@ describe(MediaService.name, () => {
});
it('should omit thread flags for h264 if thread limit is at or below 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 0 } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2473,7 +2493,7 @@ describe(MediaService.name, () => {
});
it('should disable thread pooling for hevc if thread limit is 1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamVp9 });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 1, targetVideoCodec: VideoCodec.Hevc } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2495,7 +2515,7 @@ describe(MediaService.name, () => {
});
it('should omit thread flags for hevc if thread limit is at or below 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamVp9 });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 0, targetVideoCodec: VideoCodec.Hevc } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2510,7 +2530,7 @@ describe(MediaService.name, () => {
});
it('should use av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamVp9 });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Av1 } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2544,7 +2564,7 @@ describe(MediaService.name, () => {
});
it('should map `veryslow` preset to 4 for av1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamVp9 });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Av1, preset: 'veryslow' } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2559,7 +2579,7 @@ describe(MediaService.name, () => {
});
it('should set max bitrate for av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamVp9 });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Av1, maxBitrate: '2M' } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2574,7 +2594,7 @@ describe(MediaService.name, () => {
});
it('should set threads for av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamVp9 });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.Av1, threads: 4 } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2589,7 +2609,7 @@ describe(MediaService.name, () => {
});
it('should set both bitrate and threads for av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamVp9 });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { targetVideoCodec: VideoCodec.Av1, threads: 4, maxBitrate: '2M' },
});
@ -2606,7 +2626,7 @@ describe(MediaService.name, () => {
});
it('should skip transcoding for audioless videos with optimal policy if video codec is correct', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noAudioStreams);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.noAudioStreams });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: {
targetVideoCodec: VideoCodec.Hevc,
@ -2635,15 +2655,15 @@ describe(MediaService.name, () => {
});
});
it.each(acceptedCodecs)('should skip $codec', async ({ probeStub }) => {
mocks.media.probe.mockResolvedValue(probeStub);
it.each(acceptedCodecs)('should skip $codec', async ({ probeStub: stub }) => {
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...stub });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
});
it('should use libopus audio encoder when target audio is opus', async () => {
mocks.media.probe.mockResolvedValue(probeStub.audioStreamAac);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.audioStreamAac });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: {
targetAudioCodec: AudioCodec.Opus,
@ -2663,7 +2683,7 @@ describe(MediaService.name, () => {
});
it('should fail if hwaccel is enabled for an unsupported codec', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, targetVideoCodec: VideoCodec.Vp9 },
});
@ -2672,14 +2692,14 @@ describe(MediaService.name, () => {
});
it('should fail if hwaccel option is invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: 'invalid' as any } });
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should set options for nvenc', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2725,7 +2745,7 @@ describe(MediaService.name, () => {
});
it('should set two pass options for nvenc when enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: {
accel: TranscodeHardwareAcceleration.Nvenc,
@ -2746,7 +2766,7 @@ describe(MediaService.name, () => {
});
it('should set vbr options for nvenc when max bitrate is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, maxBitrate: '10000k' },
});
@ -2763,7 +2783,7 @@ describe(MediaService.name, () => {
});
it('should set cq options for nvenc when max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, maxBitrate: '10000k' },
});
@ -2780,7 +2800,7 @@ describe(MediaService.name, () => {
});
it('should omit preset for nvenc if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, preset: 'invalid' },
});
@ -2797,7 +2817,7 @@ describe(MediaService.name, () => {
});
it('should ignore two pass for nvenc if max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -2812,7 +2832,7 @@ describe(MediaService.name, () => {
});
it('should use hardware decoding for nvenc if enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, accelDecode: true },
});
@ -2837,7 +2857,7 @@ describe(MediaService.name, () => {
});
it('should use hardware tone-mapping for nvenc if hardware decoding is enabled and should tone map', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamHDR });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, accelDecode: true },
});
@ -2858,7 +2878,7 @@ describe(MediaService.name, () => {
});
it('should set format to nv12 for nvenc if input is not yuv420p', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream10Bit });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Nvenc, accelDecode: true },
});
@ -2875,7 +2895,7 @@ describe(MediaService.name, () => {
});
it('should set options for qsv', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, maxBitrate: '10000k' },
});
@ -2928,7 +2948,7 @@ describe(MediaService.name, () => {
});
it('should set options for qsv with custom dri node', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: {
accel: TranscodeHardwareAcceleration.Qsv,
@ -2954,7 +2974,7 @@ describe(MediaService.name, () => {
});
it('should omit preset for qsv if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, preset: 'invalid' },
});
@ -2976,7 +2996,7 @@ describe(MediaService.name, () => {
});
it('should set low power mode for qsv if target video codec is vp9', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, targetVideoCodec: VideoCodec.Vp9 },
});
@ -2999,7 +3019,7 @@ describe(MediaService.name, () => {
it('should fail for qsv if no hw devices', async () => {
sut.videoInterfaces = { dri: [], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv } });
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
@ -3009,7 +3029,7 @@ describe(MediaService.name, () => {
it('should prefer higher index renderD* device for qsv', async () => {
sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -3029,7 +3049,7 @@ describe(MediaService.name, () => {
});
it('should use hardware decoding for qsv if enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, accelDecode: true },
});
@ -3060,7 +3080,7 @@ describe(MediaService.name, () => {
});
it('should use hardware tone-mapping for qsv if hardware decoding is enabled and should tone map', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamHDR });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, accelDecode: true },
});
@ -3093,7 +3113,7 @@ describe(MediaService.name, () => {
it('should use preferred device for qsv when hardware decoding', async () => {
sut.videoInterfaces = { dri: ['renderD128', 'renderD129', 'renderD130'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, accelDecode: true, preferredHwDevice: 'renderD129' },
});
@ -3111,7 +3131,7 @@ describe(MediaService.name, () => {
});
it('should set format to nv12 for qsv if input is not yuv420p', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream10Bit });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv, accelDecode: true },
});
@ -3139,7 +3159,7 @@ describe(MediaService.name, () => {
});
it('should set options for vaapi', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -3182,7 +3202,7 @@ describe(MediaService.name, () => {
});
it('should set vbr options for vaapi when max bitrate is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, maxBitrate: '10000k' },
});
@ -3215,7 +3235,7 @@ describe(MediaService.name, () => {
});
it('should set cq options for vaapi when max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -3246,7 +3266,7 @@ describe(MediaService.name, () => {
});
it('should omit preset for vaapi if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, preset: 'invalid' },
});
@ -3269,7 +3289,7 @@ describe(MediaService.name, () => {
it('should prefer higher index renderD* device for vaapi', async () => {
sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -3290,7 +3310,7 @@ describe(MediaService.name, () => {
it('should select specific gpu node if selected', async () => {
sut.videoInterfaces = { dri: ['renderD129', 'card1', 'card0', 'renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, preferredHwDevice: '/dev/dri/renderD128' },
});
@ -3312,7 +3332,7 @@ describe(MediaService.name, () => {
});
it('should use hardware decoding for vaapi if enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true },
});
@ -3341,7 +3361,7 @@ describe(MediaService.name, () => {
});
it('should use hardware tone-mapping for vaapi if hardware decoding is enabled and should tone map', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamHDR });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true },
});
@ -3371,7 +3391,7 @@ describe(MediaService.name, () => {
});
it('should set format to nv12 for vaapi if input is not yuv420p', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream10Bit });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true },
});
@ -3398,7 +3418,7 @@ describe(MediaService.name, () => {
it('should use preferred device for vaapi when hardware decoding', async () => {
sut.videoInterfaces = { dri: ['renderD128', 'renderD129', 'renderD130'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true, preferredHwDevice: 'renderD129' },
});
@ -3416,7 +3436,7 @@ describe(MediaService.name, () => {
});
it('should fallback to hw encoding and sw decoding if hw transcoding fails and hw decoding is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true },
});
@ -3440,7 +3460,7 @@ describe(MediaService.name, () => {
});
it('should fallback to sw decoding if fallback to sw decoding + hw encoding fails', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi, accelDecode: true },
});
@ -3460,7 +3480,7 @@ describe(MediaService.name, () => {
});
it('should fallback to sw transcoding if hw transcoding fails and hw decoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } });
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
await sut.handleVideoConversion({ id: 'video-id' });
@ -3478,14 +3498,14 @@ describe(MediaService.name, () => {
it('should fail for vaapi if no hw devices', async () => {
sut.videoInterfaces = { dri: [], mali: true };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Vaapi } });
await expect(sut.handleVideoConversion({ id: 'video-id' })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should set options for rkmpp', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true },
});
@ -3535,7 +3555,7 @@ describe(MediaService.name, () => {
});
it('should set vbr options for rkmpp when max bitrate is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamVp9 });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: {
accel: TranscodeHardwareAcceleration.Rkmpp,
@ -3573,7 +3593,7 @@ describe(MediaService.name, () => {
});
it('should set cqp options for rkmpp when max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true, crf: 30, maxBitrate: '0' },
});
@ -3606,7 +3626,7 @@ describe(MediaService.name, () => {
});
it('should set OpenCL tonemapping options for rkmpp when OpenCL is available', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamHDR });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true, crf: 30, maxBitrate: '0' },
});
@ -3635,7 +3655,7 @@ describe(MediaService.name, () => {
it('should set hardware decoding options for rkmpp when hardware decoding is enabled with no OpenCL on non-HDR file', async () => {
sut.videoInterfaces = { dri: ['renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.noAudioStreams);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.noAudioStreams });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true, crf: 30, maxBitrate: '0' },
});
@ -3661,7 +3681,7 @@ describe(MediaService.name, () => {
});
it('should use software decoding and tone-mapping if hardware decoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamHDR });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: false, crf: 30, maxBitrate: '0' },
});
@ -3683,7 +3703,7 @@ describe(MediaService.name, () => {
it('should use software tone-mapping if opencl is not available', async () => {
sut.videoInterfaces = { dri: ['renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamHDR });
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHardwareAcceleration.Rkmpp, accelDecode: true, crf: 30, maxBitrate: '0' },
});
@ -3704,7 +3724,7 @@ describe(MediaService.name, () => {
});
it('should tonemap when policy is required and video is hdr', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamHDR });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Required } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -3726,7 +3746,7 @@ describe(MediaService.name, () => {
});
it('should tonemap when policy is optimal and video is hdr', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStreamHDR });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Optimal } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -3748,7 +3768,7 @@ describe(MediaService.name, () => {
});
it('should transcode when policy is required and video is not yuv420p', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream10Bit });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Required } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -3763,7 +3783,7 @@ describe(MediaService.name, () => {
});
it('should convert to yuv420p when scaling without tone-mapping', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream4K10Bit);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream4K10Bit });
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.Required } });
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@ -3778,38 +3798,31 @@ describe(MediaService.name, () => {
});
it('should count frames for progress when log level is debug', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.matroskaContainer });
mocks.logger.isLevelEnabled.mockReturnValue(true);
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.probe).toHaveBeenCalledWith('/original/path.ext', { countFrames: true });
expect(mocks.media.transcode).toHaveBeenCalledWith('/original/path.ext', expect.any(String), {
inputOptions: expect.any(Array),
outputOptions: expect.any(Array),
twoPass: false,
progress: {
frameCount: probeStub.videoStream2160p.videoStreams[0].frameCount,
frameCount: probeStub.videoStream2160p.videoStream!.frameCount,
percentInterval: expect.any(Number),
},
});
});
it('should not count frames for progress when log level is not debug', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.videoStream2160p });
mocks.logger.isLevelEnabled.mockReturnValue(false);
await sut.handleVideoConversion({ id: 'video-id' });
expect(mocks.media.probe).toHaveBeenCalledWith('/original/path.ext', { countFrames: false });
});
it('should process unknown audio stream', async () => {
const asset = AssetFactory.create({
type: AssetType.Video,
originalPath: '/original/path.ext',
});
mocks.media.probe.mockResolvedValue(probeStub.audioStreamUnknown);
mocks.asset.getByIds.mockResolvedValue([asset]);
mocks.assetJob.getForVideoConversion.mockResolvedValue({ ...asset, ...probeStub.audioStreamUnknown });
await sut.handleVideoConversion({ id: asset.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(

View File

@ -15,7 +15,6 @@ import {
ImageFormat,
JobName,
JobStatus,
LogLevel,
QueueName,
RawExtractedFormat,
StorageFolder,
@ -506,10 +505,7 @@ export class MediaService extends BaseService {
};
}
private async generateVideoThumbnails(
asset: ThumbnailPathEntity & { originalPath: string },
{ ffmpeg, image }: SystemConfig,
) {
private async generateVideoThumbnails(asset: ThumbnailAsset, { ffmpeg, image }: SystemConfig) {
const previewFile = this.getImageFile(asset, {
fileType: AssetFileType.Preview,
format: image.preview.format,
@ -526,22 +522,15 @@ export class MediaService extends BaseService {
});
this.storageCore.ensureFolders(previewFile.path);
const { format, audioStreams, videoStreams } = await this.mediaRepository.probe(asset.originalPath);
const mainVideoStream = this.getMainStream(videoStreams);
if (!mainVideoStream) {
throw new Error(`No video streams found for asset ${asset.id}`);
const { videoStream, format } = asset;
if (!videoStream || !format) {
throw new Error(`Missing video metadata for asset ${asset.id}`);
}
const mainAudioStream = this.getMainStream(audioStreams);
const previewConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.preview.size.toString() });
const thumbnailConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.thumbnail.size.toString() });
const previewOptions = previewConfig.getCommand(TranscodeTarget.Video, mainVideoStream, mainAudioStream, format);
const thumbnailOptions = thumbnailConfig.getCommand(
TranscodeTarget.Video,
mainVideoStream,
mainAudioStream,
format,
);
const thumbConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.thumbnail.size.toString() });
const previewOptions = previewConfig.getCommand(TranscodeTarget.Video, videoStream, undefined, format ?? undefined);
const thumbnailOptions = thumbConfig.getCommand(TranscodeTarget.Video, videoStream, undefined, format ?? undefined);
await this.mediaRepository.transcode(asset.originalPath, previewFile.path, previewOptions);
await this.mediaRepository.transcode(asset.originalPath, thumbnailFile.path, thumbnailOptions);
@ -554,7 +543,7 @@ export class MediaService extends BaseService {
return {
files: [previewFile, thumbnailFile],
thumbhash,
fullsizeDimensions: { width: mainVideoStream.width, height: mainVideoStream.height },
fullsizeDimensions: { width: videoStream.width, height: videoStream.height },
};
}
@ -588,17 +577,14 @@ export class MediaService extends BaseService {
const output = StorageCore.getEncodedVideoPath(asset);
this.storageCore.ensureFolders(output);
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
countFrames: this.logger.isLevelEnabled(LogLevel.Debug), // makes frame count more reliable for progress logs
});
const videoStream = this.getMainStream(videoStreams);
const audioStream = this.getMainStream(audioStreams);
if (!videoStream || !format.formatName) {
const { videoStream, format } = asset;
const audioStream = asset.audioStream ?? undefined;
if (!videoStream || !format) {
this.logger.warn(`Skipped transcoding for asset ${asset.id}: missing metadata; re-run extraction first`);
return JobStatus.Failed;
}
if (!videoStream.height || !videoStream.width) {
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video dimensions`);
return JobStatus.Failed;
}
@ -667,12 +653,6 @@ export class MediaService extends BaseService {
return JobStatus.Success;
}
private getMainStream<T extends VideoStreamInfo | AudioStreamInfo>(streams: T[]): T {
return streams
.filter((stream) => stream.codecName !== 'unknown')
.toSorted((stream1, stream2) => stream2.bitrate - stream1.bitrate)[0];
}
private getTranscodeTarget(
config: SystemConfigFFmpegDto,
videoStream: VideoStreamInfo,

View File

@ -18,7 +18,7 @@ import { ImmichTags } from 'src/repositories/metadata.repository';
import { firstDateTime, MetadataService } from 'src/services/metadata.service';
import { AssetFactory } from 'test/factories/asset.factory';
import { PersonFactory } from 'test/factories/person.factory';
import { probeStub } from 'test/fixtures/media.stub';
import { videoInfoStub } from 'test/fixtures/media.stub';
import { tagStub } from 'test/fixtures/tag.stub';
import { getForMetadataExtraction, getForSidecarWrite } from 'test/mappers';
import { factory } from 'test/small.factory';
@ -59,6 +59,15 @@ const makeFaceTags = (face: Partial<{ Name: string }> = {}, orientation?: Immich
},
});
const emptyPackets = {
totalDuration: 0,
packetCount: 0,
outputFrames: 0,
keyframePts: [],
keyframeAccDuration: [],
keyframeOwnDuration: [],
};
describe(MetadataService.name, () => {
let sut: MetadataService;
let mocks: ServiceMocks;
@ -183,9 +192,12 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: sidecarDate }), {
lockedPropertiesBehavior: 'skip',
});
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({ dateTimeOriginal: sidecarDate }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
id: asset.id,
@ -212,8 +224,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ dateTimeOriginal: fileModifiedAt }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ dateTimeOriginal: fileModifiedAt }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: asset.id,
@ -242,8 +256,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ dateTimeOriginal: fileCreatedAt }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ dateTimeOriginal: fileCreatedAt }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: asset.id,
@ -265,9 +281,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
dateTimeOriginal: new Date('2022-01-01T00:00:00.000Z'),
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith(
@ -290,9 +308,12 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ iso: 160 }), {
lockedPropertiesBehavior: 'skip',
});
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({ iso: 160 }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: asset.id,
duration: null,
@ -323,8 +344,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ city: null, state: null, country: null }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ city: null, state: null, country: null }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: asset.id,
@ -353,8 +376,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: asset.id,
@ -378,8 +403,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ latitude: null, longitude: null }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ latitude: null, longitude: null }),
lockedPropertiesBehavior: 'skip',
}),
);
});
@ -585,7 +612,7 @@ describe(MetadataService.name, () => {
it('should not apply motion photos if asset is video', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.media.probe.mockResolvedValue(videoInfoStub.matroskaContainer);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
@ -611,15 +638,142 @@ describe(MetadataService.name, () => {
it('should extract the correct video orientation', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p);
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamVertical2160p);
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ orientation: ExifOrientation.Rotate270CW.toString() }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ orientation: ExifOrientation.Rotate270CW.toString() }),
lockedPropertiesBehavior: 'skip',
}),
);
});
it('should persist CICP smallints and profile/level for HDR10 video', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamHDR10);
mocks.media.probePackets.mockResolvedValue(emptyPackets);
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({ fps: 59.94 }),
video: expect.objectContaining({
codecName: 'hevc',
profile: 2,
level: 153,
pixelFormat: 'yuv420p10le',
colorPrimaries: 9,
colorTransfer: 16,
colorMatrix: 9,
dvProfile: undefined,
}),
}),
);
});
it('should persist Dolby Vision fields', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamDolbyVision);
mocks.media.probePackets.mockResolvedValue(emptyPackets);
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
video: expect.objectContaining({
dvProfile: 8,
dvLevel: 10,
dvBlSignalCompatibilityId: 4,
colorTransfer: 18, // ARIB_STD_B67
}),
}),
);
});
it('should persist packet-derived HLS fields', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamHDR10);
mocks.media.probePackets.mockResolvedValue({
totalDuration: 12_080,
packetCount: 1148,
outputFrames: 1149,
keyframePts: [-590, 10, 611, 1211],
keyframeAccDuration: [10, 610, 6110, 12_080],
keyframeOwnDuration: [10, 10, 10, 10],
});
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
video: expect.objectContaining({ timeBase: 600 }),
keyframes: expect.objectContaining({
totalDuration: 12_080,
packetCount: 1148,
outputFrames: 1149,
pts: [-590, 10, 611, 1211],
accDuration: [10, 610, 6110, 12_080],
ownDuration: [10, 10, 10, 10],
}),
}),
);
});
it('should omit the keyframe row when the probe returns no keyframes', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamHDR10);
mocks.media.probePackets.mockResolvedValue(emptyPackets);
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.not.objectContaining({ keyframes: expect.anything() }));
});
it('should prefer ffprobe frameRate over exiftool VideoFrameRate', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamHDR10);
mocks.media.probePackets.mockResolvedValue(emptyPackets);
mockReadTags({ VideoFrameRate: '30' });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({ fps: 59.94 }),
lockedPropertiesBehavior: 'skip',
}),
);
});
it('should not insert audio/video/keyframe rows for image assets', async () => {
const asset = AssetFactory.create({ type: AssetType.Image });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.media.probe).not.toHaveBeenCalled();
expect(mocks.media.probePackets).not.toHaveBeenCalled();
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.not.objectContaining({
audio: expect.anything(),
video: expect.anything(),
keyframes: expect.anything(),
}),
);
});
@ -909,7 +1063,8 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{
expect.objectContaining({
exif: {
assetId: asset.id,
bitsPerSample: expect.any(Number),
autoStackId: null,
@ -941,7 +1096,8 @@ describe(MetadataService.name, () => {
city: null,
tags: ['parent/child'],
},
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
@ -975,9 +1131,11 @@ describe(MetadataService.name, () => {
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
timeZone: 'UTC+0',
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
});
@ -985,9 +1143,9 @@ describe(MetadataService.name, () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue({
...probeStub.videoStreamH264,
...videoInfoStub.videoStreamH264,
format: {
...probeStub.videoStreamH264.format,
...videoInfoStub.videoStreamH264.format,
duration: 6.21,
},
});
@ -1008,9 +1166,9 @@ describe(MetadataService.name, () => {
const asset = AssetFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue({
...probeStub.videoStreamH264,
...videoInfoStub.videoStreamH264,
format: {
...probeStub.videoStreamH264.format,
...videoInfoStub.videoStreamH264.format,
duration: 6.21,
},
});
@ -1030,9 +1188,9 @@ describe(MetadataService.name, () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue({
...probeStub.videoStreamH264,
...videoInfoStub.videoStreamH264,
format: {
...probeStub.videoStreamH264.format,
...videoInfoStub.videoStreamH264.format,
duration: 0,
},
});
@ -1053,9 +1211,9 @@ describe(MetadataService.name, () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue({
...probeStub.videoStreamH264,
...videoInfoStub.videoStreamH264,
format: {
...probeStub.videoStreamH264.format,
...videoInfoStub.videoStreamH264.format,
duration: 604_800,
},
});
@ -1111,9 +1269,9 @@ describe(MetadataService.name, () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mockReadTags({ Duration: 123 }, {});
mocks.media.probe.mockResolvedValue({
...probeStub.videoStreamH264,
...videoInfoStub.videoStreamH264,
format: {
...probeStub.videoStreamH264.format,
...videoInfoStub.videoStreamH264.format,
duration: 456,
},
});
@ -1132,18 +1290,22 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
description: '',
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
mockReadTags({ ImageDescription: ' my\n description' });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
description: 'my\n description',
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
});
@ -1155,9 +1317,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
description: '1000',
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
});
@ -1388,9 +1552,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
modifyDate: expect.any(Date),
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
});
@ -1402,9 +1568,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
rating: null,
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
});
@ -1416,9 +1584,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
rating: 5,
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
});
@ -1430,9 +1600,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
rating: null,
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
});
@ -1444,9 +1616,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
rating: -1,
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
});
@ -1466,7 +1640,7 @@ describe(MetadataService.name, () => {
it('should handle not finding a match', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p);
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamVertical2160p);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mockReadTags({ ContentIdentifier: 'CID' });
@ -1578,9 +1752,12 @@ describe(MetadataService.name, () => {
mockReadTags(exif);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining(expected), {
lockedPropertiesBehavior: 'skip',
});
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining(expected),
lockedPropertiesBehavior: 'skip',
}),
);
});
it.each([
@ -1605,9 +1782,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({
lensModel: expected,
}),
{ lockedPropertiesBehavior: 'skip' },
lockedPropertiesBehavior: 'skip',
}),
);
});

View File

@ -58,6 +58,8 @@ const EXIF_DATE_TAGS: Array<keyof ImmichTags> = [
'SourceImageCreateTime' as keyof ImmichTags,
];
const nullIfEmpty = <T>(value: T[] | undefined): T[] | null => (value && value.length > 0 ? value : null);
export function firstDateTime(tags: ImmichTags) {
for (const tag of EXIF_DATE_TAGS) {
const tagValue = tags?.[tag];
@ -243,10 +245,11 @@ export class MetadataService extends BaseService {
return;
}
const [exifTags, stats] = await Promise.all([
const [exifResult, stats] = await Promise.all([
this.getExifTags(asset),
this.storageRepository.stat(asset.originalPath),
]);
const { tags: exifTags, audio, video, packets, format } = exifResult;
this.logger.verbose('Exif Tags', exifTags);
const dates = this.getDates(asset, exifTags, stats);
@ -287,14 +290,15 @@ export class MetadataService extends BaseService {
orientation: validate(exifTags.Orientation)?.toString() ?? null,
projectionType: exifTags.ProjectionType ? String(exifTags.ProjectionType).toUpperCase() : null,
bitsPerSample: this.getBitsPerSample(exifTags),
colorspace: exifTags.ColorSpace === undefined ? null : String(exifTags.ColorSpace),
colorspace:
asset.type !== AssetType.Image || exifTags.ColorSpace === undefined ? null : String(exifTags.ColorSpace),
// camera
make:
exifTags.Make ?? exifTags.Device?.Manufacturer ?? exifTags.AndroidMake ?? (exifTags.DeviceManufacturer || null),
model:
exifTags.Model ?? exifTags.Device?.ModelName ?? exifTags.AndroidModel ?? (exifTags.DeviceModelName || null),
fps: validate(Number.parseFloat(exifTags.VideoFrameRate!)),
fps: video?.frameRate ?? validate(Number.parseFloat(exifTags.VideoFrameRate!)),
iso: validate(exifTags.ISO) as number,
exposureTime: exifTags.ExposureTime ?? null,
lensModel: getLensModel(exifTags),
@ -313,6 +317,53 @@ export class MetadataService extends BaseService {
tags: tags.length > 0 ? tags : null,
};
const audioData =
format && audio?.codecName
? {
assetId: asset.id,
bitrate: audio.bitrate,
index: audio.index,
profile: audio.profile,
codecName: audio.codecName,
}
: undefined;
const videoData =
format?.formatName && format?.formatLongName && video?.codecName
? {
assetId: asset.id,
bitrate: video.bitrate,
frameCount: video.frameCount,
timeBase: video.timeBase,
index: video.index,
profile: video.profile,
level: video.level,
colorPrimaries: video.colorPrimaries,
colorTransfer: video.colorTransfer,
colorMatrix: video.colorMatrix,
dvProfile: video.dvProfile,
dvLevel: video.dvLevel,
dvBlSignalCompatibilityId: video.dvBlSignalCompatibilityId,
codecName: video.codecName,
formatName: format.formatName,
formatLongName: format.formatLongName,
pixelFormat: video.pixelFormat,
}
: undefined;
const keyframeData =
packets && packets.keyframePts.length > 0
? {
assetId: asset.id,
totalDuration: packets.totalDuration,
packetCount: packets.packetCount,
outputFrames: packets.outputFrames,
pts: packets.keyframePts,
accDuration: packets.keyframeAccDuration,
ownDuration: packets.keyframeOwnDuration,
}
: undefined;
const isSidewards = exifTags.Orientation && this.isOrientationSidewards(exifTags.Orientation);
const assetWidth = isSidewards ? validate(height) : validate(width);
const assetHeight = isSidewards ? validate(width) : validate(height);
@ -333,7 +384,13 @@ export class MetadataService extends BaseService {
height: !asset.isEdited || asset.height == null ? assetHeight : undefined,
}),
async () => {
await this.assetRepository.upsertExif(exifData, { lockedPropertiesBehavior: 'skip' });
await this.assetRepository.upsertExif({
exif: exifData,
audio: audioData,
video: videoData,
keyframes: keyframeData,
lockedPropertiesBehavior: 'skip',
});
await this.applyTagList(asset);
},
);
@ -523,13 +580,14 @@ export class MetadataService extends BaseService {
return { width, height };
}
private async getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
private async getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }) {
const { sidecarFile } = getAssetFiles(asset.files);
const shouldProbe = asset.type === AssetType.Video || asset.originalPath.toLowerCase().endsWith('.gif');
const [mediaTags, sidecarTags, videoTags] = await Promise.all([
const [mediaTags, sidecarTags, videoResult] = await Promise.all([
this.metadataRepository.readTags(asset.originalPath),
sidecarFile ? this.metadataRepository.readTags(sidecarFile.path) : null,
asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null,
shouldProbe ? this.getVideoTags(asset.originalPath) : null,
]);
// prefer dates from sidecar tags
@ -554,14 +612,20 @@ export class MetadataService extends BaseService {
// prefer duration from video tags
// don't save duration if asset is definitely not an animated image (see e.g. CR3 with Duration: 1s)
if (videoTags || !mimeTypes.isPossiblyAnimatedImage(asset.originalPath)) {
if (videoResult || !mimeTypes.isPossiblyAnimatedImage(asset.originalPath)) {
delete mediaTags.Duration;
}
// never use duration from sidecar
delete sidecarTags?.Duration;
return { ...mediaTags, ...videoTags, ...sidecarTags };
return {
tags: { ...mediaTags, ...videoResult?.tags, ...sidecarTags },
audio: videoResult?.audio,
video: videoResult?.video,
packets: videoResult?.packets,
format: videoResult?.format ?? null,
};
}
private getTagList(exifTags: ImmichTags): string[] {
@ -1016,20 +1080,25 @@ export class MetadataService extends BaseService {
}
private async getVideoTags(originalPath: string) {
const { videoStreams, format } = await this.mediaRepository.probe(originalPath);
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(originalPath);
const video = videoStreams[0];
const audio = audioStreams[0];
const packets =
video && video.timeBase
? await this.mediaRepository.probePackets(originalPath, video.index, video.timeBase, format.duration)
: undefined;
const tags: Pick<ImmichTags, 'Duration' | 'Orientation' | 'ImageWidth' | 'ImageHeight'> = {};
if (videoStreams[0]) {
// Set video dimensions
if (videoStreams[0].width) {
tags.ImageWidth = videoStreams[0].width;
if (video) {
if (video.width) {
tags.ImageWidth = video.width;
}
if (videoStreams[0].height) {
tags.ImageHeight = videoStreams[0].height;
if (video.height) {
tags.ImageHeight = video.height;
}
switch (videoStreams[0].rotation) {
switch (video.rotation) {
case -90: {
tags.Orientation = ExifOrientation.Rotate90CW;
break;
@ -1053,6 +1122,6 @@ export class MetadataService extends BaseService {
tags.Duration = format.duration;
}
return tags;
return { tags, audio, video, packets, format };
}
}

View File

@ -206,16 +206,22 @@ describe(TagService.name, () => {
count: 6,
});
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: 'asset-1', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: 'asset-1', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
lockedPropertiesBehavior: 'append',
}),
);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: 'asset-2', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: 'asset-2', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
lockedPropertiesBehavior: 'append',
}),
);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: 'asset-3', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: 'asset-3', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
lockedPropertiesBehavior: 'append',
}),
);
expect(mocks.tag.upsertAssetIds).toHaveBeenCalledWith([
{ tagId: 'tag-1', assetId: 'asset-1' },
@ -255,12 +261,16 @@ describe(TagService.name, () => {
]);
expect(mocks.asset.upsertExif).not.toHaveBeenCalledWith(
{ assetId: 'asset-1', lockedProperties: ['tags'], tags: ['tag-1'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: 'asset-1', lockedProperties: ['tags'], tags: ['tag-1'] },
lockedPropertiesBehavior: 'append',
}),
);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: 'asset-2', lockedProperties: ['tags'], tags: ['tag-1'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: 'asset-2', lockedProperties: ['tags'], tags: ['tag-1'] },
lockedPropertiesBehavior: 'append',
}),
);
expect(mocks.tag.getAssetIds).toHaveBeenCalledWith('tag-1', ['asset-1', 'asset-2']);
expect(mocks.tag.addAssetIds).toHaveBeenCalledWith('tag-1', ['asset-2']);

View File

@ -152,7 +152,8 @@ export class TagService extends BaseService {
private async updateTags(assetId: string) {
const { tags } = await this.assetRepository.getForUpdateTags(assetId);
await this.assetRepository.upsertExif(updateLockedColumns({ assetId, tags: tags.map(({ value }) => value) }), {
await this.assetRepository.upsertExif({
exif: updateLockedColumns({ assetId, tags: tags.map(({ value }) => value) }),
lockedPropertiesBehavior: 'append',
});
}

View File

@ -7,9 +7,18 @@ import { AuthDto } from 'src/dtos/auth.dto';
import { AssetEditActionItem } from 'src/dtos/editing.dto';
import { SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
import {
AacProfile,
AssetOrder,
AssetType,
Av1Profile,
ColorMatrix,
ColorPrimaries,
ColorTransfer,
DvProfile,
DvSignalCompatibility,
ExifOrientation,
H264Profile,
HevcProfile,
ImageFormat,
JobName,
MemoryType,
@ -81,21 +90,44 @@ export interface VideoStreamInfo {
width: number;
rotation: number;
codecName?: string;
profile?: H264Profile | HevcProfile | Av1Profile;
level?: number;
frameCount: number;
isHDR: boolean;
frameRate?: number;
timeBase?: number;
bitrate: number;
pixelFormat: string;
colorPrimaries?: string;
colorSpace?: string;
colorTransfer?: string;
colorPrimaries: ColorPrimaries;
colorMatrix: ColorMatrix;
colorTransfer: ColorTransfer;
dvProfile?: DvProfile;
dvLevel?: number;
dvBlSignalCompatibilityId?: DvSignalCompatibility;
}
export interface AudioStreamInfo {
index: number;
codecName?: string;
profile?: AacProfile;
bitrate: number;
}
/** Packet-derived video data needed for accurate HLS playlists. */
export interface VideoPacketInfo {
/** Sum of source packet duration across all packets (includes discard). */
totalDuration: number;
/** Post-discard packet count. */
packetCount: number;
/** Output CFR frame count at `packetCount / format.duration`. */
outputFrames: number;
/** All keyframe PTS in source ticks, including pre-roll discard keyframes. */
keyframePts: number[];
/** Cumulative packet duration through each keyframe, inclusive. */
keyframeAccDuration: number[];
/** Each keyframe's own packet duration (needed for VFR). */
keyframeOwnDuration: number[];
}
export interface VideoFormat {
formatName?: string;
formatLongName?: string;
@ -144,7 +176,7 @@ export interface VideoCodecSWConfig {
getCommand(
target: TranscodeTarget,
videoStream: VideoStreamInfo,
audioStream: AudioStreamInfo,
audioStream?: AudioStreamInfo,
format?: VideoFormat,
): TranscodeCommand;
}

View File

@ -17,11 +17,11 @@ import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
import { Notice, PostgresError } from 'postgres';
import { columns, lockableProperties, LockableProperty, Person } from 'src/database';
import { AssetEditActionItem } from 'src/dtos/editing.dto';
import { AssetFileType, AssetVisibility, DatabaseExtension } from 'src/enum';
import { AssetFileType, AssetVisibility, DatabaseExtension, ExifOrientation } from 'src/enum';
import { AssetSearchBuilderOptions } from 'src/repositories/search.repository';
import { DB } from 'src/schema';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { VectorExtension } from 'src/types';
import { AudioStreamInfo, VectorExtension, VideoFormat, VideoStreamInfo } from 'src/types';
export const getKyselyConfig = (connection: DatabaseConnectionParams): KyselyConfig => {
return {
@ -99,6 +99,87 @@ export function withExifInner<O>(qb: SelectQueryBuilder<DB, 'asset', O>) {
.$narrowType<{ exifInfo: NotNull }>();
}
export const dummy = sql`(select 1)`.as('dummy');
export function withAudioVideo<O>(qb: SelectQueryBuilder<DB, 'asset' | 'asset_exif', O>, withAudio = false) {
return qb
.$if(withAudio, (qb) =>
qb.select((eb) =>
jsonObjectFrom(
eb
.selectFrom('asset_audio')
.select(['asset_audio.index', 'asset_audio.codecName', 'asset_audio.profile', 'asset_audio.bitrate'])
.whereRef('asset_audio.assetId', '=', 'asset.id'),
)
.$castTo<AudioStreamInfo | null>()
.as('audioStream'),
),
)
.leftJoin('asset_video', 'asset_video.assetId', 'asset.id')
.select((eb) =>
jsonObjectFrom(
eb
.selectFrom(dummy)
.where('asset_video.assetId', 'is not', sql.lit(null))
.select((eb) => [
'asset_video.index',
'asset_video.codecName',
'asset_video.profile',
'asset_video.level',
'asset_video.bitrate',
'asset_exif.exifImageWidth as width',
'asset_exif.exifImageHeight as height',
'asset_video.pixelFormat',
'asset_video.frameCount',
'asset_exif.fps as frameRate',
'asset_video.timeBase',
eb
.case()
.when('asset_exif.orientation', '=', sql.lit(ExifOrientation.Rotate90CW.toString()))
.then(sql.lit(-90))
.when('asset_exif.orientation', '=', sql.lit(ExifOrientation.Rotate270CW.toString()))
.then(sql.lit(90))
.when('asset_exif.orientation', '=', sql.lit(ExifOrientation.Rotate180.toString()))
.then(sql.lit(180))
.else(0)
.end()
.as('rotation'),
'asset_video.colorPrimaries',
'asset_video.colorMatrix',
'asset_video.colorTransfer',
'asset_video.dvProfile',
'asset_video.dvLevel',
'asset_video.dvBlSignalCompatibilityId',
])
.$castTo<VideoStreamInfo | null>(),
).as('videoStream'),
)
.select((eb) =>
jsonObjectFrom(
eb
.selectFrom(dummy)
.where('asset_video.assetId', 'is not', sql.lit(null))
.select((eb) => [
'asset_video.formatName',
'asset_video.formatLongName',
// TODO: simplify after https://github.com/immich-app/immich/pull/28003
eb
.case()
.when('asset.duration', '~', sql<string>`'^\\d{2}:\\d{2}:\\d{2}\\.\\d{3}$'`)
.then(
sql<number>`substr(asset.duration, 1, 2)::int * 3600000 + substr(asset.duration, 4, 2)::int * 60000 + substr(asset.duration, 7, 2)::int * 1000 + substr(asset.duration, 10, 3)::int`,
)
.else(sql.lit(0))
.end()
.as('duration'),
'asset_video.bitrate',
]),
)
.$castTo<VideoFormat | null>()
.as('format'),
);
}
export function withSmartSearch<O>(qb: SelectQueryBuilder<DB, 'asset', O>) {
return qb
.leftJoin('smart_search', 'asset.id', 'smart_search.assetId')

View File

@ -1,6 +1,15 @@
import { AUDIO_ENCODER } from 'src/constants';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import { CQMode, ToneMapping, TranscodeHardwareAcceleration, TranscodeTarget, VideoCodec } from 'src/enum';
import {
ColorMatrix,
ColorPrimaries,
ColorTransfer,
CQMode,
ToneMapping,
TranscodeHardwareAcceleration,
TranscodeTarget,
VideoCodec,
} from 'src/enum';
import {
AudioStreamInfo,
BitrateDistribution,
@ -255,7 +264,10 @@ export class BaseConfig implements VideoCodecSWConfig {
}
shouldToneMap(videoStream: VideoStreamInfo) {
return videoStream.isHDR && this.config.tonemap !== ToneMapping.Disabled;
return (
this.config.tonemap !== ToneMapping.Disabled &&
(videoStream.colorTransfer === ColorTransfer.Smpte2084 || videoStream.colorTransfer === ColorTransfer.AribStdB67)
);
}
getScaling(videoStream: VideoStreamInfo, mult = 2) {
@ -409,15 +421,15 @@ export class ThumbnailConfig extends BaseConfig {
: ['-skip_frame', 'nointra', '-sws_flags', 'accurate_rnd+full_chroma_int'];
const metadataOverrides = [];
if (videoStream.colorPrimaries === 'reserved') {
if (videoStream.colorPrimaries === ColorPrimaries.Reserved) {
metadataOverrides.push('colour_primaries=1');
}
if (videoStream.colorSpace === 'reserved') {
if (videoStream.colorMatrix === ColorMatrix.Reserved) {
metadataOverrides.push('matrix_coefficients=1');
}
if (videoStream.colorTransfer === 'reserved') {
if (videoStream.colorTransfer === ColorTransfer.Reserved) {
metadataOverrides.push('transfer_characteristics=1');
}

View File

@ -1,3 +1,4 @@
import { ColorMatrix, ColorPrimaries, ColorTransfer, DvProfile, DvSignalCompatibility, VideoContainer } from 'src/enum';
import { AudioStreamInfo, VideoFormat, VideoInfo, VideoStreamInfo } from 'src/types';
const probeStubDefaultFormat: VideoFormat = {
@ -15,9 +16,12 @@ const probeStubDefaultVideoStream: VideoStreamInfo[] = [
codecName: 'hevc',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
];
@ -29,7 +33,8 @@ const probeStubDefault: VideoInfo = {
audioStreams: probeStubDefaultAudioStream,
};
export const probeStub = {
/** Fixtures in the shape `mediaRepository.probe()` returns (arrays of streams, raw ffprobe format). */
export const videoInfoStub = {
noVideoStreams: Object.freeze<VideoInfo>({ ...probeStubDefault, videoStreams: [] }),
noAudioStreams: Object.freeze<VideoInfo>({ ...probeStubDefault, audioStreams: [] }),
multipleVideoStreams: Object.freeze<VideoInfo>({
@ -42,9 +47,12 @@ export const probeStub = {
codecName: 'hevc',
frameCount: 1,
rotation: 0,
isHDR: false,
bitrate: 100,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
{
index: 1,
@ -53,9 +61,12 @@ export const probeStub = {
codecName: 'hevc',
frameCount: 2,
rotation: 0,
isHDR: false,
bitrate: 101,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
{
index: 2,
@ -64,9 +75,12 @@ export const probeStub = {
codecName: 'h7000',
frameCount: 3,
rotation: 0,
isHDR: false,
bitrate: 99,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@ -88,9 +102,12 @@ export const probeStub = {
codecName: 'hevc',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@ -104,9 +121,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@ -117,8 +137,10 @@ export const probeStub = {
videoStreamMTS: Object.freeze<VideoInfo>({
...probeStubDefault,
format: {
...probeStubDefaultFormat,
formatName: 'mpegts',
formatLongName: 'MPEG-TS (MPEG-2 Transport Stream)',
duration: 0,
bitrate: 0,
},
}),
videoStreamHDR: Object.freeze<VideoInfo>({
@ -131,9 +153,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: true,
colorPrimaries: ColorPrimaries.Bt2020,
colorMatrix: ColorMatrix.Bt2020Nc,
colorTransfer: ColorTransfer.Smpte2084,
bitrate: 0,
pixelFormat: 'yuv420p10le',
timeBase: 600,
},
],
}),
@ -147,9 +172,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p10le',
timeBase: 600,
},
],
}),
@ -163,9 +191,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p10le',
timeBase: 600,
},
],
}),
@ -179,9 +210,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 90,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@ -195,9 +229,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@ -211,9 +248,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@ -274,10 +314,238 @@ export const probeStub = {
videoStreams: [
{
...probeStubDefaultVideoStream[0],
colorPrimaries: 'reserved',
colorSpace: 'reserved',
colorTransfer: 'reserved',
colorPrimaries: ColorPrimaries.Reserved,
colorMatrix: ColorMatrix.Reserved,
colorTransfer: ColorTransfer.Reserved,
},
],
}),
videoStreamHDR10: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 2160,
width: 3840,
codecName: 'hevc',
profile: 2,
level: 153,
frameCount: 1208,
frameRate: 59.94,
rotation: 0,
bitrate: 64_000_000,
pixelFormat: 'yuv420p10le',
colorPrimaries: ColorPrimaries.Bt2020,
colorMatrix: ColorMatrix.Bt2020Nc,
colorTransfer: ColorTransfer.Smpte2084,
timeBase: 600,
},
],
}),
videoStreamDolbyVision: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 2160,
width: 3840,
codecName: 'hevc',
profile: 2,
level: 153,
frameCount: 1299,
frameRate: 59.94,
rotation: 0,
bitrate: 53_500_000,
pixelFormat: 'yuv420p10le',
colorPrimaries: ColorPrimaries.Bt2020,
colorMatrix: ColorMatrix.Bt2020Nc,
colorTransfer: ColorTransfer.AribStdB67,
dvProfile: DvProfile.Dvhe08,
dvLevel: 10,
dvBlSignalCompatibilityId: DvSignalCompatibility.Hlg,
timeBase: 600,
},
],
}),
videoStreamWithProfileLevel: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
...probeStubDefaultVideoStream[0],
codecName: 'h264',
profile: 100,
level: 40,
},
],
}),
audioStreamAAC: Object.freeze<VideoInfo>({
...probeStubDefault,
audioStreams: [
{
index: 1,
codecName: 'aac',
profile: 2,
bitrate: 128_000,
},
],
}),
};
/**
* Fixtures in the shape `AssetJobRepository.getForVideoConversion` /
* `getForGenerateThumbnailJob` return: a single `videoStream`/`audioStream` (the main one,
* already picked) and a container name already mapped to a `VideoContainer` value.
* Consumers spread these onto the mocked asset.
*/
interface VideoConversionStreams {
videoStream: VideoStreamInfo | null;
audioStream: AudioStreamInfo | null;
format: VideoFormat | null;
}
const defaultMovFormat: VideoFormat = { formatName: VideoContainer.Mov, duration: 0, bitrate: 0 };
const defaultAudioStream: AudioStreamInfo = { index: 3, codecName: 'mp3', bitrate: 100 };
const defaultVideoStream: VideoStreamInfo = {
index: 0,
height: 1080,
width: 1920,
codecName: 'hevc',
frameCount: 100,
rotation: 0,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
};
export const probeStub = {
multipleVideoStreams: {
videoStream: { ...defaultVideoStream, index: 1, width: 400, frameCount: 2, bitrate: 101 },
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
multipleAudioStreams: {
videoStream: defaultVideoStream,
audioStream: { index: 2, codecName: 'mp3', bitrate: 102 },
format: defaultMovFormat,
},
noVideoStreams: { videoStream: null, audioStream: defaultAudioStream, format: defaultMovFormat },
noAudioStreams: { videoStream: defaultVideoStream, audioStream: null, format: defaultMovFormat },
noHeight: {
videoStream: { ...defaultVideoStream, width: 400, height: 0 },
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
videoStream2160p: {
videoStream: { ...defaultVideoStream, height: 2160, width: 3840, codecName: 'h264' },
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
videoStream40Mbps: {
videoStream: { ...defaultVideoStream, bitrate: 40_000_000, codecName: 'h264' },
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
videoStreamHDR: {
videoStream: {
...defaultVideoStream,
height: 480,
width: 480,
codecName: 'h264',
colorPrimaries: ColorPrimaries.Bt2020,
colorMatrix: ColorMatrix.Bt2020Nc,
colorTransfer: ColorTransfer.Smpte2084,
pixelFormat: 'yuv420p10le',
},
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
videoStream10Bit: {
videoStream: { ...defaultVideoStream, height: 480, width: 480, codecName: 'h264', pixelFormat: 'yuv420p10le' },
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
videoStream4K10Bit: {
videoStream: {
...defaultVideoStream,
height: 2160,
width: 3840,
codecName: 'h264',
pixelFormat: 'yuv420p10le',
},
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
videoStreamVertical2160p: {
videoStream: { ...defaultVideoStream, height: 2160, width: 3840, codecName: 'h264', rotation: 90 },
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
videoStreamOddHeight: {
videoStream: { ...defaultVideoStream, height: 355, width: 1586, codecName: 'h264' },
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
videoStreamOddWidth: {
videoStream: { ...defaultVideoStream, height: 1586, width: 355, codecName: 'h264' },
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
videoStreamMTS: {
videoStream: defaultVideoStream,
audioStream: defaultAudioStream,
format: { formatName: 'mpegts', duration: 0, bitrate: 0 },
},
videoStreamReserved: {
videoStream: {
...defaultVideoStream,
colorPrimaries: ColorPrimaries.Reserved,
colorMatrix: ColorMatrix.Reserved,
colorTransfer: ColorTransfer.Reserved,
},
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
videoStreamAvi: {
videoStream: { ...defaultVideoStream, codecName: 'h264' },
audioStream: defaultAudioStream,
format: { formatName: 'avi', duration: 0, bitrate: 0 },
},
videoStreamVp9: {
videoStream: { ...defaultVideoStream, codecName: 'vp9' },
audioStream: defaultAudioStream,
format: { formatName: VideoContainer.Webm, duration: 0, bitrate: 0 },
},
videoStreamH264: {
videoStream: { ...defaultVideoStream, codecName: 'h264' },
audioStream: defaultAudioStream,
format: defaultMovFormat,
},
matroskaContainer: {
videoStream: defaultVideoStream,
audioStream: defaultAudioStream,
format: { formatName: VideoContainer.Webm, duration: 0, bitrate: 0 },
},
audioStreamAac: {
videoStream: defaultVideoStream,
audioStream: { index: 1, codecName: 'aac', bitrate: 100 },
format: defaultMovFormat,
},
audioStreamMp3: {
videoStream: defaultVideoStream,
audioStream: { index: 1, codecName: 'mp3', bitrate: 100 },
format: defaultMovFormat,
},
audioStreamOpus: {
videoStream: defaultVideoStream,
audioStream: { index: 1, codecName: 'opus', bitrate: 100 },
format: defaultMovFormat,
},
audioStreamUnknown: {
videoStream: defaultVideoStream,
audioStream: { index: 0, codecName: 'aac', bitrate: 100 },
format: defaultMovFormat,
},
} satisfies Record<string, VideoConversionStreams>;

View File

@ -4,6 +4,7 @@ import { AssetEditActionItem } from 'src/dtos/editing.dto';
import { ActivityTable } from 'src/schema/tables/activity.table';
import { AssetTable } from 'src/schema/tables/asset.table';
import { PartnerTable } from 'src/schema/tables/partner.table';
import { AudioStreamInfo, VideoFormat, VideoStreamInfo } from 'src/types';
import { AlbumFactory } from 'test/factories/album.factory';
import { AssetFaceFactory } from 'test/factories/asset-face.factory';
import { AssetFactory } from 'test/factories/asset.factory';
@ -155,6 +156,9 @@ export const getForGenerateThumbnail = (asset: ReturnType<AssetFactory['build']>
files: asset.files.map((file) => getDehydrated(file)),
exifInfo: getDehydrated(asset.exifInfo),
edits: asset.edits.map(({ action, parameters }) => ({ action, parameters })) as AssetEditActionItem[],
videoStream: null as VideoStreamInfo | null,
audioStream: null as AudioStreamInfo | null,
format: null as VideoFormat | null,
});
export const getForAssetFace = (face: ReturnType<AssetFaceFactory['build']>) => ({

View File

@ -218,7 +218,7 @@ export class MediumTestContext<S extends BaseService = BaseService> {
}
async newExif(dto: Insertable<AssetExifTable>) {
const result = await this.get(AssetRepository).upsertExif(dto, { lockedPropertiesBehavior: 'override' });
const result = await this.get(AssetRepository).upsertExif({ exif: dto, lockedPropertiesBehavior: 'override' });
return { result };
}

View File

@ -98,10 +98,7 @@ describe(AssetRepository.name, () => {
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal'] });
await sut.upsertExif(
{ assetId: asset.id, lockedProperties: ['description'] },
{ lockedPropertiesBehavior: 'append' },
);
await sut.upsertExif({ exif: { assetId: asset.id, lockedProperties: ['description'] }, lockedPropertiesBehavior: 'append' });
await expect(
ctx.database
@ -130,10 +127,7 @@ describe(AssetRepository.name, () => {
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal', 'description'] });
await sut.upsertExif(
{ assetId: asset.id, lockedProperties: ['description'] },
{ lockedPropertiesBehavior: 'append' },
);
await sut.upsertExif({ exif: { assetId: asset.id, lockedProperties: ['description'] }, lockedPropertiesBehavior: 'append' });
await expect(
ctx.database

View File

@ -289,13 +289,13 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
// update the asset
const assetRepository = ctx.get(AssetRepository);
await assetRepository.upsertExif(
updateLockedColumns({
await assetRepository.upsertExif({
exif: updateLockedColumns({
assetId: asset.id,
city: 'New City',
}),
{ lockedPropertiesBehavior: 'append' },
);
lockedPropertiesBehavior: 'append',
});
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
{
@ -350,13 +350,13 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
// update the asset
const assetRepository = ctx.get(AssetRepository);
await assetRepository.upsertExif(
updateLockedColumns({
await assetRepository.upsertExif({
exif: updateLockedColumns({
assetId: assetDelayedExif.id,
city: 'Delayed Exif',
}),
{ lockedPropertiesBehavior: 'append' },
);
lockedPropertiesBehavior: 'append',
});
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
{

View File

@ -11,6 +11,14 @@ export const newMediaRepositoryMock = (): Mocked<RepositoryInterface<MediaReposi
decodeImage: vitest.fn().mockResolvedValue({ data: Buffer.from(''), info: {} }),
extract: vitest.fn().mockResolvedValue(null),
probe: vitest.fn(),
probePackets: vitest.fn().mockResolvedValue({
totalDuration: 0,
packetCount: 0,
outputFrames: 0,
keyframePts: [],
keyframeAccDuration: [],
keyframeOwnDuration: [],
}),
transcode: vitest.fn(),
getImageMetadata: vitest.fn(),
};