Compare commits

...

27 Commits

Author SHA1 Message Date
mertalev 13f08a034f reject on variant switch 2026-05-09 02:24:49 -04:00
mertalev 5fcaa6ed6b fix serving incomplete segments 2026-05-09 01:44:36 -04:00
mertalev 530edde7f5 fix backward seek race 2026-05-08 22:12:11 -04:00
mertalev bd228040f8 linting 2026-05-08 14:29:17 -04:00
mertalev ac004e9d84 fix backpressure 2026-05-08 14:20:03 -04:00
mertalev 96ba978cc8 fix folder layout 2026-05-08 14:19:57 -04:00
mertalev c8bdf3678f hls_ prefix 2026-05-07 19:29:46 -04:00
mertalev 89bbe3dd93 always include lowest resolution 2026-05-07 18:58:30 -04:00
mertalev 4b7cff4b79 clarify resolution loop 2026-05-07 18:58:30 -04:00
mertalev 1b8a5f3307 review feedback 2026-05-07 18:58:30 -04:00
mertalev 4ec06aaa38 admin setting 2026-05-07 18:58:30 -04:00
mertalev ba2f1b9842 main playlist 2026-05-07 18:58:30 -04:00
mertalev e058ef868d unnecessary argument 2026-05-07 18:58:30 -04:00
mertalev 5243807395 stricter test 2026-05-07 18:58:30 -04:00
mertalev d7d25aa072 linting 2026-05-07 18:58:30 -04:00
mertalev fb5203dc4b move fixtures 2026-05-07 18:58:30 -04:00
mertalev edb8a0dcd8 unused constant 2026-05-07 18:58:30 -04:00
mertalev c5a88f0e51 add ffmpeg command tests 2026-05-07 18:58:30 -04:00
mertalev a88e69bc9d unused helper 2026-05-07 18:58:30 -04:00
mertalev 27cf03c850 fix test 2026-05-07 18:58:30 -04:00
mertalev f7d9f817ab update sql 2026-05-07 18:58:30 -04:00
mertalev 9b75b37d82 remove profiling 2026-05-07 18:58:30 -04:00
mertalev e4f6129419 permalink 2026-05-07 18:58:30 -04:00
mertalev f13627bcb2 early return 2026-05-07 18:58:29 -04:00
mertalev 1e7d9645e8 openapi 2026-05-07 18:58:29 -04:00
mertalev 60d4def0c8 transcoding service 2026-05-07 18:58:29 -04:00
mertalev 75db82da12 hls service and controller 2026-05-07 18:58:29 -04:00
44 changed files with 3192 additions and 206 deletions
+4
View File
@@ -401,6 +401,10 @@
"transcoding_preferred_hardware_device_description": "Applies only to VAAPI and QSV. Sets the dri node used for hardware transcoding.",
"transcoding_preset_preset": "Preset (-preset)",
"transcoding_preset_preset_description": "Compression speed. Slower presets produce smaller files, and increase quality when targeting a certain bitrate. VP9 ignores speeds above 'faster'.",
"transcoding_realtime": "Real-time Transcoding [EXPERIMENTAL]",
"transcoding_realtime_description": "Allows transcoding to be performed in real-time as the video is being streamed. Enables quality switching, but may cause higher playback latency and stuttering depending on server capabilities.",
"transcoding_realtime_enabled": "Enable real-time transcoding",
"transcoding_realtime_enabled_description": "If disabled, the server will refuse to start new real-time transcoding sessions.",
"transcoding_reference_frames": "Reference frames",
"transcoding_reference_frames_description": "The number of frames to reference when compressing a given frame. Higher values improve compression efficiency, but slow down encoding. 0 sets this value automatically.",
"transcoding_required_description": "Only videos not in an accepted format",
+5
View File
@@ -103,12 +103,16 @@ Class | Method | HTTP request | Description
*AssetsApi* | [**deleteBulkAssetMetadata**](doc//AssetsApi.md#deletebulkassetmetadata) | **DELETE** /assets/metadata | Delete asset metadata
*AssetsApi* | [**downloadAsset**](doc//AssetsApi.md#downloadasset) | **GET** /assets/{id}/original | Download original asset
*AssetsApi* | [**editAsset**](doc//AssetsApi.md#editasset) | **PUT** /assets/{id}/edits | Apply edits to an existing asset
*AssetsApi* | [**endSession**](doc//AssetsApi.md#endsession) | **DELETE** /assets/{id}/video/stream/{sessionId} | End HLS streaming session
*AssetsApi* | [**getAssetEdits**](doc//AssetsApi.md#getassetedits) | **GET** /assets/{id}/edits | Retrieve edits for an existing asset
*AssetsApi* | [**getAssetInfo**](doc//AssetsApi.md#getassetinfo) | **GET** /assets/{id} | Retrieve an asset
*AssetsApi* | [**getAssetMetadata**](doc//AssetsApi.md#getassetmetadata) | **GET** /assets/{id}/metadata | Get asset metadata
*AssetsApi* | [**getAssetMetadataByKey**](doc//AssetsApi.md#getassetmetadatabykey) | **GET** /assets/{id}/metadata/{key} | Retrieve asset metadata by key
*AssetsApi* | [**getAssetOcr**](doc//AssetsApi.md#getassetocr) | **GET** /assets/{id}/ocr | Retrieve asset OCR data
*AssetsApi* | [**getAssetStatistics**](doc//AssetsApi.md#getassetstatistics) | **GET** /assets/statistics | Get asset statistics
*AssetsApi* | [**getMainPlaylist**](doc//AssetsApi.md#getmainplaylist) | **GET** /assets/{id}/video/stream/main.m3u8 | Get HLS main playlist
*AssetsApi* | [**getMediaPlaylist**](doc//AssetsApi.md#getmediaplaylist) | **GET** /assets/{id}/video/stream/{sessionId}/{variantIndex}/playlist.m3u8 | Get HLS media playlist
*AssetsApi* | [**getSegment**](doc//AssetsApi.md#getsegment) | **GET** /assets/{id}/video/stream/{sessionId}/{variantIndex}/{filename} | Get HLS segment or init file
*AssetsApi* | [**playAssetVideo**](doc//AssetsApi.md#playassetvideo) | **GET** /assets/{id}/video/playback | Play asset video
*AssetsApi* | [**removeAssetEdits**](doc//AssetsApi.md#removeassetedits) | **DELETE** /assets/{id}/edits | Remove edits from an existing asset
*AssetsApi* | [**runAssetJobs**](doc//AssetsApi.md#runassetjobs) | **POST** /assets/jobs | Run an asset job
@@ -601,6 +605,7 @@ Class | Method | HTTP request | Description
- [SystemConfigBackupsDto](doc//SystemConfigBackupsDto.md)
- [SystemConfigDto](doc//SystemConfigDto.md)
- [SystemConfigFFmpegDto](doc//SystemConfigFFmpegDto.md)
- [SystemConfigFFmpegRealtimeDto](doc//SystemConfigFFmpegRealtimeDto.md)
- [SystemConfigFacesDto](doc//SystemConfigFacesDto.md)
- [SystemConfigGeneratedFullsizeImageDto](doc//SystemConfigGeneratedFullsizeImageDto.md)
- [SystemConfigGeneratedImageDto](doc//SystemConfigGeneratedImageDto.md)
+1
View File
@@ -349,6 +349,7 @@ part 'model/sync_user_v1.dart';
part 'model/system_config_backups_dto.dart';
part 'model/system_config_dto.dart';
part 'model/system_config_f_fmpeg_dto.dart';
part 'model/system_config_f_fmpeg_realtime_dto.dart';
part 'model/system_config_faces_dto.dart';
part 'model/system_config_generated_fullsize_image_dto.dart';
part 'model/system_config_generated_image_dto.dart';
+310
View File
@@ -416,6 +416,75 @@ class AssetsApi {
return null;
}
/// End HLS streaming session
///
/// Releases server resources for the streaming session.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] sessionId (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> endSessionWithHttpInfo(String id, String sessionId, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/{id}/video/stream/{sessionId}'
.replaceAll('{id}', id)
.replaceAll('{sessionId}', sessionId);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'DELETE',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// End HLS streaming session
///
/// Releases server resources for the streaming session.
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] sessionId (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<void> endSession(String id, String sessionId, { String? key, String? slug, }) async {
final response = await endSessionWithHttpInfo(id, sessionId, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Retrieve edits for an existing asset
///
/// Retrieve a series of edit actions (crop, rotate, mirror) associated with the specified asset.
@@ -809,6 +878,247 @@ class AssetsApi {
return null;
}
/// Get HLS main playlist
///
/// Returns an HLS main playlist with all available variants for the asset.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> getMainPlaylistWithHttpInfo(String id, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/{id}/video/stream/main.m3u8'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Get HLS main playlist
///
/// Returns an HLS main playlist with all available variants for the asset.
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<String?> getMainPlaylist(String id, { String? key, String? slug, }) async {
final response = await getMainPlaylistWithHttpInfo(id, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'String',) as String;
}
return null;
}
/// Get HLS media playlist
///
/// Returns an HLS media playlist for one variant of the streaming session.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] sessionId (required):
///
/// * [int] variantIndex (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> getMediaPlaylistWithHttpInfo(String id, String sessionId, int variantIndex, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/{id}/video/stream/{sessionId}/{variantIndex}/playlist.m3u8'
.replaceAll('{id}', id)
.replaceAll('{sessionId}', sessionId)
.replaceAll('{variantIndex}', variantIndex.toString());
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Get HLS media playlist
///
/// Returns an HLS media playlist for one variant of the streaming session.
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [String] sessionId (required):
///
/// * [int] variantIndex (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<String?> getMediaPlaylist(String id, String sessionId, int variantIndex, { String? key, String? slug, }) async {
final response = await getMediaPlaylistWithHttpInfo(id, sessionId, variantIndex, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'String',) as String;
}
return null;
}
/// Get HLS segment or init file
///
/// Streams an HLS init segment (init.mp4) or media segment (seg_N.m4s).
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] filename (required):
///
/// * [String] id (required):
///
/// * [String] sessionId (required):
///
/// * [int] variantIndex (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> getSegmentWithHttpInfo(String filename, String id, String sessionId, int variantIndex, { String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/{id}/video/stream/{sessionId}/{variantIndex}/{filename}'
.replaceAll('{filename}', filename)
.replaceAll('{id}', id)
.replaceAll('{sessionId}', sessionId)
.replaceAll('{variantIndex}', variantIndex.toString());
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
if (slug != null) {
queryParams.addAll(_queryParams('', 'slug', slug));
}
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Get HLS segment or init file
///
/// Streams an HLS init segment (init.mp4) or media segment (seg_N.m4s).
///
/// Parameters:
///
/// * [String] filename (required):
///
/// * [String] id (required):
///
/// * [String] sessionId (required):
///
/// * [int] variantIndex (required):
///
/// * [String] key:
///
/// * [String] slug:
Future<MultipartFile?> getSegment(String filename, String id, String sessionId, int variantIndex, { String? key, String? slug, }) async {
final response = await getSegmentWithHttpInfo(filename, id, sessionId, variantIndex, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'MultipartFile',) as MultipartFile;
}
return null;
}
/// Play asset video
///
/// Streams the video file for the specified asset. This endpoint also supports byte range requests.
+2
View File
@@ -744,6 +744,8 @@ class ApiClient {
return SystemConfigDto.fromJson(value);
case 'SystemConfigFFmpegDto':
return SystemConfigFFmpegDto.fromJson(value);
case 'SystemConfigFFmpegRealtimeDto':
return SystemConfigFFmpegRealtimeDto.fromJson(value);
case 'SystemConfigFacesDto':
return SystemConfigFacesDto.fromJson(value);
case 'SystemConfigGeneratedFullsizeImageDto':
+3
View File
@@ -52,6 +52,7 @@ class JobName {
static const librarySyncFilesQueueAll = JobName._(r'LibrarySyncFilesQueueAll');
static const librarySyncFiles = JobName._(r'LibrarySyncFiles');
static const libraryScanQueueAll = JobName._(r'LibraryScanQueueAll');
static const hlsSessionCleanup = JobName._(r'HlsSessionCleanup');
static const memoryCleanup = JobName._(r'MemoryCleanup');
static const memoryGenerate = JobName._(r'MemoryGenerate');
static const notificationsCleanup = JobName._(r'NotificationsCleanup');
@@ -110,6 +111,7 @@ class JobName {
librarySyncFilesQueueAll,
librarySyncFiles,
libraryScanQueueAll,
hlsSessionCleanup,
memoryCleanup,
memoryGenerate,
notificationsCleanup,
@@ -203,6 +205,7 @@ class JobNameTypeTransformer {
case r'LibrarySyncFilesQueueAll': return JobName.librarySyncFilesQueueAll;
case r'LibrarySyncFiles': return JobName.librarySyncFiles;
case r'LibraryScanQueueAll': return JobName.libraryScanQueueAll;
case r'HlsSessionCleanup': return JobName.hlsSessionCleanup;
case r'MemoryCleanup': return JobName.memoryCleanup;
case r'MemoryGenerate': return JobName.memoryGenerate;
case r'NotificationsCleanup': return JobName.notificationsCleanup;
+9 -1
View File
@@ -25,6 +25,7 @@ class SystemConfigFFmpegDto {
required this.maxBitrate,
required this.preferredHwDevice,
required this.preset,
required this.realtime,
required this.refs,
required this.targetAudioCodec,
required this.targetResolution,
@@ -79,6 +80,8 @@ class SystemConfigFFmpegDto {
/// Preset
String preset;
SystemConfigFFmpegRealtimeDto realtime;
/// References
///
/// Minimum value: 0
@@ -122,6 +125,7 @@ class SystemConfigFFmpegDto {
other.maxBitrate == maxBitrate &&
other.preferredHwDevice == preferredHwDevice &&
other.preset == preset &&
other.realtime == realtime &&
other.refs == refs &&
other.targetAudioCodec == targetAudioCodec &&
other.targetResolution == targetResolution &&
@@ -147,6 +151,7 @@ class SystemConfigFFmpegDto {
(maxBitrate.hashCode) +
(preferredHwDevice.hashCode) +
(preset.hashCode) +
(realtime.hashCode) +
(refs.hashCode) +
(targetAudioCodec.hashCode) +
(targetResolution.hashCode) +
@@ -158,7 +163,7 @@ class SystemConfigFFmpegDto {
(twoPass.hashCode);
@override
String toString() => 'SystemConfigFFmpegDto[accel=$accel, accelDecode=$accelDecode, acceptedAudioCodecs=$acceptedAudioCodecs, acceptedContainers=$acceptedContainers, acceptedVideoCodecs=$acceptedVideoCodecs, bframes=$bframes, cqMode=$cqMode, crf=$crf, gopSize=$gopSize, maxBitrate=$maxBitrate, preferredHwDevice=$preferredHwDevice, preset=$preset, refs=$refs, targetAudioCodec=$targetAudioCodec, targetResolution=$targetResolution, targetVideoCodec=$targetVideoCodec, temporalAQ=$temporalAQ, threads=$threads, tonemap=$tonemap, transcode=$transcode, twoPass=$twoPass]';
String toString() => 'SystemConfigFFmpegDto[accel=$accel, accelDecode=$accelDecode, acceptedAudioCodecs=$acceptedAudioCodecs, acceptedContainers=$acceptedContainers, acceptedVideoCodecs=$acceptedVideoCodecs, bframes=$bframes, cqMode=$cqMode, crf=$crf, gopSize=$gopSize, maxBitrate=$maxBitrate, preferredHwDevice=$preferredHwDevice, preset=$preset, realtime=$realtime, refs=$refs, targetAudioCodec=$targetAudioCodec, targetResolution=$targetResolution, targetVideoCodec=$targetVideoCodec, temporalAQ=$temporalAQ, threads=$threads, tonemap=$tonemap, transcode=$transcode, twoPass=$twoPass]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -174,6 +179,7 @@ class SystemConfigFFmpegDto {
json[r'maxBitrate'] = this.maxBitrate;
json[r'preferredHwDevice'] = this.preferredHwDevice;
json[r'preset'] = this.preset;
json[r'realtime'] = this.realtime;
json[r'refs'] = this.refs;
json[r'targetAudioCodec'] = this.targetAudioCodec;
json[r'targetResolution'] = this.targetResolution;
@@ -207,6 +213,7 @@ class SystemConfigFFmpegDto {
maxBitrate: mapValueOfType<String>(json, r'maxBitrate')!,
preferredHwDevice: mapValueOfType<String>(json, r'preferredHwDevice')!,
preset: mapValueOfType<String>(json, r'preset')!,
realtime: SystemConfigFFmpegRealtimeDto.fromJson(json[r'realtime'])!,
refs: mapValueOfType<int>(json, r'refs')!,
targetAudioCodec: AudioCodec.fromJson(json[r'targetAudioCodec'])!,
targetResolution: mapValueOfType<String>(json, r'targetResolution')!,
@@ -275,6 +282,7 @@ class SystemConfigFFmpegDto {
'maxBitrate',
'preferredHwDevice',
'preset',
'realtime',
'refs',
'targetAudioCodec',
'targetResolution',
@@ -0,0 +1,100 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class SystemConfigFFmpegRealtimeDto {
/// Returns a new [SystemConfigFFmpegRealtimeDto] instance.
SystemConfigFFmpegRealtimeDto({
required this.enabled,
});
/// Enable real-time HLS transcoding (alpha)
bool enabled;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigFFmpegRealtimeDto &&
other.enabled == enabled;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(enabled.hashCode);
@override
String toString() => 'SystemConfigFFmpegRealtimeDto[enabled=$enabled]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'enabled'] = this.enabled;
return json;
}
/// Returns a new [SystemConfigFFmpegRealtimeDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static SystemConfigFFmpegRealtimeDto? fromJson(dynamic value) {
upgradeDto(value, "SystemConfigFFmpegRealtimeDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return SystemConfigFFmpegRealtimeDto(
enabled: mapValueOfType<bool>(json, r'enabled')!,
);
}
return null;
}
static List<SystemConfigFFmpegRealtimeDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <SystemConfigFFmpegRealtimeDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = SystemConfigFFmpegRealtimeDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, SystemConfigFFmpegRealtimeDto> mapFromJson(dynamic json) {
final map = <String, SystemConfigFFmpegRealtimeDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = SystemConfigFFmpegRealtimeDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of SystemConfigFFmpegRealtimeDto-objects as value to a dart map
static Map<String, List<SystemConfigFFmpegRealtimeDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<SystemConfigFFmpegRealtimeDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = SystemConfigFFmpegRealtimeDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'enabled',
};
}
+362
View File
@@ -4300,6 +4300,351 @@
"x-immich-state": "Stable"
}
},
"/assets/{id}/video/stream/main.m3u8": {
"get": {
"description": "Returns an HLS main playlist with all available variants for the asset.",
"operationId": "getMainPlaylist",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$",
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/vnd.apple.mpegurl": {
"schema": {
"type": "string"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Get HLS main playlist",
"tags": [
"Assets"
],
"x-immich-history": [
{
"version": "v3",
"state": "Added"
},
{
"version": "v3",
"state": "Alpha"
}
],
"x-immich-permission": "asset.view",
"x-immich-state": "Alpha"
}
},
"/assets/{id}/video/stream/{sessionId}": {
"delete": {
"description": "Releases server resources for the streaming session.",
"operationId": "endSession",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$",
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "sessionId",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$",
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
}
],
"responses": {
"204": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "End HLS streaming session",
"tags": [
"Assets"
],
"x-immich-history": [
{
"version": "v3",
"state": "Added"
},
{
"version": "v3",
"state": "Alpha"
}
],
"x-immich-permission": "asset.view",
"x-immich-state": "Alpha"
}
},
"/assets/{id}/video/stream/{sessionId}/{variantIndex}/playlist.m3u8": {
"get": {
"description": "Returns an HLS media playlist for one variant of the streaming session.",
"operationId": "getMediaPlaylist",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$",
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "sessionId",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$",
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "variantIndex",
"required": true,
"in": "path",
"schema": {
"minimum": 0,
"maximum": 9007199254740991,
"type": "integer"
}
}
],
"responses": {
"200": {
"content": {
"application/vnd.apple.mpegurl": {
"schema": {
"type": "string"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Get HLS media playlist",
"tags": [
"Assets"
],
"x-immich-history": [
{
"version": "v3",
"state": "Added"
},
{
"version": "v3",
"state": "Alpha"
}
],
"x-immich-permission": "asset.view",
"x-immich-state": "Alpha"
}
},
"/assets/{id}/video/stream/{sessionId}/{variantIndex}/{filename}": {
"get": {
"description": "Streams an HLS init segment (init.mp4) or media segment (seg_N.m4s).",
"operationId": "getSegment",
"parameters": [
{
"name": "filename",
"required": true,
"in": "path",
"schema": {
"pattern": "^(init\\.mp4|seg_\\d+\\.m4s)$",
"type": "string"
}
},
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$",
"type": "string"
}
},
{
"name": "key",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "sessionId",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$",
"type": "string"
}
},
{
"name": "slug",
"required": false,
"in": "query",
"schema": {
"type": "string"
}
},
{
"name": "variantIndex",
"required": true,
"in": "path",
"schema": {
"minimum": 0,
"maximum": 9007199254740991,
"type": "integer"
}
}
],
"responses": {
"200": {
"content": {
"application/octet-stream": {
"schema": {
"format": "binary",
"type": "string"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Get HLS segment or init file",
"tags": [
"Assets"
],
"x-immich-history": [
{
"version": "v3",
"state": "Added"
},
{
"version": "v3",
"state": "Alpha"
}
],
"x-immich-permission": "asset.view",
"x-immich-state": "Alpha"
}
},
"/auth/admin-sign-up": {
"post": {
"description": "Create the first admin user in the system.",
@@ -17897,6 +18242,7 @@
"LibrarySyncFilesQueueAll",
"LibrarySyncFiles",
"LibraryScanQueueAll",
"HlsSessionCleanup",
"MemoryCleanup",
"MemoryGenerate",
"NotificationsCleanup",
@@ -24095,6 +24441,9 @@
"description": "Preset",
"type": "string"
},
"realtime": {
"$ref": "#/components/schemas/SystemConfigFFmpegRealtimeDto"
},
"refs": {
"description": "References",
"maximum": 6,
@@ -24145,6 +24494,7 @@
"maxBitrate",
"preferredHwDevice",
"preset",
"realtime",
"refs",
"targetAudioCodec",
"targetResolution",
@@ -24157,6 +24507,18 @@
],
"type": "object"
},
"SystemConfigFFmpegRealtimeDto": {
"properties": {
"enabled": {
"description": "Enable real-time HLS transcoding (alpha)",
"type": "boolean"
}
},
"required": [
"enabled"
],
"type": "object"
},
"SystemConfigFacesDto": {
"properties": {
"import": {
@@ -2307,6 +2307,10 @@ export type DatabaseBackupConfig = {
export type SystemConfigBackupsDto = {
database: DatabaseBackupConfig;
};
export type SystemConfigFFmpegRealtimeDto = {
/** Enable real-time HLS transcoding (alpha) */
enabled: boolean;
};
export type SystemConfigFFmpegDto = {
accel: TranscodeHWAccel;
/** Accelerated decode */
@@ -2330,6 +2334,7 @@ export type SystemConfigFFmpegDto = {
preferredHwDevice: string;
/** Preset */
preset: string;
realtime: SystemConfigFFmpegRealtimeDto;
/** References */
refs: number;
targetAudioCodec: AudioCodec;
@@ -4264,6 +4269,82 @@ export function playAssetVideo({ id, key, slug }: {
...opts
}));
}
/**
* Get HLS main playlist
*/
export function getMainPlaylist({ id, key, slug }: {
id: string;
key?: string;
slug?: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchBlob<{
status: 200;
data: string;
}>(`/assets/${encodeURIComponent(id)}/video/stream/main.m3u8${QS.query(QS.explode({
key,
slug
}))}`, {
...opts
}));
}
/**
* End HLS streaming session
*/
export function endSession({ id, key, sessionId, slug }: {
id: string;
key?: string;
sessionId: string;
slug?: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/assets/${encodeURIComponent(id)}/video/stream/${encodeURIComponent(sessionId)}${QS.query(QS.explode({
key,
slug
}))}`, {
...opts,
method: "DELETE"
}));
}
/**
* Get HLS media playlist
*/
export function getMediaPlaylist({ id, key, sessionId, slug, variantIndex }: {
id: string;
key?: string;
sessionId: string;
slug?: string;
variantIndex: number;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchBlob<{
status: 200;
data: string;
}>(`/assets/${encodeURIComponent(id)}/video/stream/${encodeURIComponent(sessionId)}/${encodeURIComponent(variantIndex)}/playlist.m3u8${QS.query(QS.explode({
key,
slug
}))}`, {
...opts
}));
}
/**
* Get HLS segment or init file
*/
export function getSegment({ filename, id, key, sessionId, slug, variantIndex }: {
filename: string;
id: string;
key?: string;
sessionId: string;
slug?: string;
variantIndex: number;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchBlob<{
status: 200;
data: Blob;
}>(`/assets/${encodeURIComponent(id)}/video/stream/${encodeURIComponent(sessionId)}/${encodeURIComponent(variantIndex)}/${encodeURIComponent(filename)}${QS.query(QS.explode({
key,
slug
}))}`, {
...opts
}));
}
/**
* Register admin
*/
@@ -7098,6 +7179,7 @@ export enum JobName {
LibrarySyncFilesQueueAll = "LibrarySyncFilesQueueAll",
LibrarySyncFiles = "LibrarySyncFiles",
LibraryScanQueueAll = "LibraryScanQueueAll",
HlsSessionCleanup = "HlsSessionCleanup",
MemoryCleanup = "MemoryCleanup",
MemoryGenerate = "MemoryGenerate",
NotificationsCleanup = "NotificationsCleanup",
+6
View File
@@ -45,6 +45,9 @@ export type SystemConfig = {
accel: TranscodeHardwareAcceleration;
accelDecode: boolean;
tonemap: ToneMapping;
realtime: {
enabled: boolean;
};
};
job: Record<ConcurrentQueueName, { concurrency: number }>;
logging: {
@@ -224,6 +227,9 @@ export const defaults = Object.freeze<SystemConfig>({
tonemap: ToneMapping.Hable,
accel: TranscodeHardwareAcceleration.Disabled,
accelDecode: false,
realtime: {
enabled: false,
},
},
job: {
[QueueName.BackgroundTask]: { concurrency: 5 },
+41 -1
View File
@@ -1,7 +1,15 @@
import { readFileSync } from 'node:fs';
import { dirname, join } from 'node:path';
import { SemVer } from 'semver';
import { ApiTag, AudioCodec, DatabaseExtension, ExifOrientation, VectorIndex } from 'src/enum';
import {
ApiTag,
AudioCodec,
DatabaseExtension,
ExifOrientation,
TranscodeHardwareAcceleration,
VectorIndex,
VideoCodec,
} from 'src/enum';
export const IMMICH_SERVER_START = 'Immich Server is listening';
@@ -203,3 +211,35 @@ export const AUDIO_ENCODER: Record<AudioCodec, string> = {
[AudioCodec.Opus]: 'libopus',
[AudioCodec.PcmS16le]: 'pcm_s16le',
};
export const SUPPORTED_HWA_CODECS: Record<TranscodeHardwareAcceleration, VideoCodec[]> = {
[TranscodeHardwareAcceleration.Nvenc]: [VideoCodec.H264, VideoCodec.Hevc, VideoCodec.Av1],
[TranscodeHardwareAcceleration.Qsv]: [VideoCodec.H264, VideoCodec.Hevc, VideoCodec.Vp9, VideoCodec.Av1],
[TranscodeHardwareAcceleration.Vaapi]: [VideoCodec.H264, VideoCodec.Hevc, VideoCodec.Vp9, VideoCodec.Av1],
[TranscodeHardwareAcceleration.Rkmpp]: [VideoCodec.H264, VideoCodec.Hevc],
[TranscodeHardwareAcceleration.Disabled]: [VideoCodec.H264, VideoCodec.Hevc, VideoCodec.Vp9, VideoCodec.Av1],
};
export const HLS_BACKPRESSURE_PAUSE_SEGMENTS = 30;
export const HLS_BACKPRESSURE_RESUME_SEGMENTS = 15;
export const HLS_CLEANUP_INTERVAL_MS = 60 * 1000;
export const HLS_INACTIVITY_TIMEOUT_MS = 5 * 60 * 1000;
export const HLS_LEASE_DURATION_MS = 30 * 60 * 1000;
export const HLS_PLAYLIST_CONTENT_TYPE = 'application/vnd.apple.mpegurl';
export const HLS_SEGMENT_DURATION = 2;
export const HLS_SEGMENT_FILENAME_REGEX = /^seg_(\d+)\.m4s$/;
export const HLS_VARIANTS = [
{ resolution: 480, codec: VideoCodec.Av1, bitrate: 1_000_000, codecString: 'av01.0.04M.08' },
{ resolution: 480, codec: VideoCodec.Hevc, bitrate: 1_200_000, codecString: 'hvc1.1.6.L90.B0' },
{ resolution: 480, codec: VideoCodec.H264, bitrate: 2_500_000, codecString: 'avc1.64001e' },
{ resolution: 720, codec: VideoCodec.Av1, bitrate: 2_000_000, codecString: 'av01.0.08M.08' },
{ resolution: 720, codec: VideoCodec.Hevc, bitrate: 2_500_000, codecString: 'hvc1.1.6.L93.B0' },
{ resolution: 720, codec: VideoCodec.H264, bitrate: 5_000_000, codecString: 'avc1.64001f' },
{ resolution: 1080, codec: VideoCodec.Av1, bitrate: 4_000_000, codecString: 'av01.0.09M.08' },
{ resolution: 1080, codec: VideoCodec.Hevc, bitrate: 4_500_000, codecString: 'hvc1.1.6.L120.B0' },
{ resolution: 1080, codec: VideoCodec.H264, bitrate: 8_000_000, codecString: 'avc1.640028' },
{ resolution: 1440, codec: VideoCodec.Av1, bitrate: 7_000_000, codecString: 'av01.0.12M.08' },
{ resolution: 1440, codec: VideoCodec.Hevc, bitrate: 8_000_000, codecString: 'hvc1.2.4.L150.B0' },
{ resolution: 1440, codec: VideoCodec.H264, bitrate: 16_000_000, codecString: 'avc1.640032' },
];
export const HLS_VERSION = 6;
+2
View File
@@ -35,6 +35,7 @@ import { TimelineController } from 'src/controllers/timeline.controller';
import { TrashController } from 'src/controllers/trash.controller';
import { UserAdminController } from 'src/controllers/user-admin.controller';
import { UserController } from 'src/controllers/user.controller';
import { VideoStreamController } from 'src/controllers/video-stream.controller';
import { ViewController } from 'src/controllers/view.controller';
import { WorkflowController } from 'src/controllers/workflow.controller';
@@ -76,6 +77,7 @@ export const controllers = [
TrashController,
UserAdminController,
UserController,
VideoStreamController,
ViewController,
WorkflowController,
];
@@ -0,0 +1,79 @@
import { Controller, Delete, Get, Header, HttpCode, HttpStatus, Next, Param, Res } from '@nestjs/common';
import { ApiProduces, ApiTags } from '@nestjs/swagger';
import { NextFunction, Response } from 'express';
import { HLS_PLAYLIST_CONTENT_TYPE } from 'src/constants';
import { Endpoint, HistoryBuilder } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto';
import { HlsSegmentParamDto, HlsSessionParamDto, HlsVariantParamDto } from 'src/dtos/streaming.dto';
import { ApiTag, Permission, RouteKey } from 'src/enum';
import { Auth, Authenticated, FileResponse } from 'src/middleware/auth.guard';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { HlsService } from 'src/services/hls.service';
import { sendFile } from 'src/utils/file';
import { UUIDParamDto } from 'src/validation';
@ApiTags(ApiTag.Assets)
@Controller(RouteKey.Asset)
export class VideoStreamController {
constructor(
private logger: LoggingRepository,
private service: HlsService,
) {}
@Get(':id/video/stream/main.m3u8')
@Authenticated({ permission: Permission.AssetView, sharedLink: true })
@Header('Cache-Control', 'no-cache')
@Header('Content-Type', HLS_PLAYLIST_CONTENT_TYPE)
@ApiProduces(HLS_PLAYLIST_CONTENT_TYPE)
@Endpoint({
summary: 'Get HLS main playlist',
description: 'Returns an HLS main playlist with all available variants for the asset.',
history: new HistoryBuilder().added('v3').alpha('v3'),
})
getMainPlaylist(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto) {
return this.service.getMainPlaylist(auth, id);
}
@Get(':id/video/stream/:sessionId/:variantIndex/playlist.m3u8')
@Authenticated({ permission: Permission.AssetView, sharedLink: true })
@Header('Cache-Control', 'no-cache')
@Header('Content-Type', HLS_PLAYLIST_CONTENT_TYPE)
@ApiProduces(HLS_PLAYLIST_CONTENT_TYPE)
@Endpoint({
summary: 'Get HLS media playlist',
description: 'Returns an HLS media playlist for one variant of the streaming session.',
history: new HistoryBuilder().added('v3').alpha('v3'),
})
getMediaPlaylist(@Auth() auth: AuthDto, @Param() { id, sessionId }: HlsVariantParamDto) {
return this.service.getMediaPlaylist(auth, id, sessionId);
}
@Get(':id/video/stream/:sessionId/:variantIndex/:filename')
@FileResponse()
@Authenticated({ permission: Permission.AssetView, sharedLink: true })
@Endpoint({
summary: 'Get HLS segment or init file',
description: 'Streams an HLS init segment (init.mp4) or media segment (seg_N.m4s).',
history: new HistoryBuilder().added('v3').alpha('v3'),
})
async getSegment(
@Auth() auth: AuthDto,
@Param() { id, sessionId, variantIndex, filename }: HlsSegmentParamDto,
@Res() res: Response,
@Next() next: NextFunction,
) {
await sendFile(res, next, () => this.service.getSegment(auth, id, sessionId, variantIndex, filename), this.logger);
}
@Delete(':id/video/stream/:sessionId')
@HttpCode(HttpStatus.NO_CONTENT)
@Authenticated({ permission: Permission.AssetView, sharedLink: true })
@Endpoint({
summary: 'End HLS streaming session',
description: 'Releases server resources for the streaming session.',
history: new HistoryBuilder().added('v3').alpha('v3'),
})
async endSession(@Auth() auth: AuthDto, @Param() { id, sessionId }: HlsSessionParamDto) {
await this.service.endSession(auth, id, sessionId);
}
}
+40
View File
@@ -18,6 +18,7 @@ import { MoveRepository } from 'src/repositories/move.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { VideoInterfaces } from 'src/types';
import { getAssetFile } from 'src/utils/asset.util';
import { getConfig } from 'src/utils/config';
@@ -34,6 +35,10 @@ export interface MoveRequest {
export type ThumbnailPathEntity = { id: string; ownerId: string };
export type HlsSessionFolder = { ownerId: string; sessionId: string };
export type HlsVariantFolder = { ownerId: string; sessionId: string; variantIndex: number };
export type ImagePathOptions = { fileType: AssetFileType; format: ImageFormat | RawExtractedFormat; isEdited: boolean };
let instance: StorageCore | null;
@@ -124,6 +129,14 @@ export class StorageCore {
return StorageCore.getNestedPath(StorageFolder.EncodedVideo, asset.ownerId, `${asset.id}.mp4`);
}
static getHlsSessionFolder({ ownerId, sessionId }: HlsSessionFolder) {
return StorageCore.getNestedPath(StorageFolder.EncodedVideo, ownerId, sessionId);
}
static getHlsVariantFolder({ ownerId, sessionId, variantIndex }: HlsVariantFolder) {
return join(StorageCore.getHlsSessionFolder({ ownerId, sessionId }), variantIndex.toString());
}
static getAndroidMotionPath(asset: ThumbnailPathEntity, uuid: string) {
return StorageCore.getNestedPath(StorageFolder.EncodedVideo, asset.ownerId, `${uuid}-MP.mp4`);
}
@@ -299,6 +312,11 @@ export class StorageCore {
return this.storageRepository.removeEmptyDirs(StorageCore.getBaseFolder(folder));
}
async getVideoInterfaces(): Promise<VideoInterfaces> {
const [dri, mali] = await Promise.all([this.getDevices(), this.hasMaliOpenCL()]);
return { dri, mali };
}
private savePath(pathType: PathType, id: string, newPath: string) {
switch (pathType) {
case AssetPathType.Original: {
@@ -330,4 +348,26 @@ export class StorageCore {
static getTempPathInDir(dir: string): string {
return join(dir, `${randomUUID()}.tmp`);
}
private async getDevices() {
try {
return await this.storageRepository.readdir('/dev/dri');
} catch {
this.logger.debug('No devices found in /dev/dri.');
return [];
}
}
private async hasMaliOpenCL() {
try {
const [maliIcdStat, maliDeviceStat] = await Promise.all([
this.storageRepository.stat('/etc/OpenCL/vendors/mali.icd'),
this.storageRepository.stat('/dev/mali0'),
]);
return maliIcdStat.isFile() && maliDeviceStat.isCharacterDevice();
} catch {
this.logger.debug('OpenCL not available for transcoding, so RKMPP acceleration will use CPU tonemapping');
return false;
}
}
}
+26
View File
@@ -0,0 +1,26 @@
import { createZodDto } from 'nestjs-zod';
import z from 'zod';
const HlsSessionParamSchema = z.object({
id: z.uuidv4(),
sessionId: z.uuidv4(),
});
export class HlsSessionParamDto extends createZodDto(HlsSessionParamSchema) {}
const HlsVariantParamSchema = z.object({
id: z.uuidv4(),
sessionId: z.uuidv4(),
variantIndex: z.coerce.number().int().min(0),
});
export class HlsVariantParamDto extends createZodDto(HlsVariantParamSchema) {}
const HlsSegmentParamSchema = z.object({
id: z.uuidv4(),
sessionId: z.uuidv4(),
variantIndex: z.coerce.number().int().min(0),
filename: z.string().regex(/^(init\.mp4|seg_\d+\.m4s)$/, { error: 'Invalid HLS segment filename' }),
});
export class HlsSegmentParamDto extends createZodDto(HlsSegmentParamSchema) {}
+5
View File
@@ -83,6 +83,11 @@ const SystemConfigFFmpegSchema = z
accel: TranscodeHardwareAccelerationSchema,
accelDecode: configBool.describe('Accelerated decode'),
tonemap: ToneMappingSchema,
realtime: z
.object({
enabled: configBool.describe('Enable real-time HLS transcoding (alpha)'),
})
.meta({ id: 'SystemConfigFFmpegRealtimeDto' }),
})
.meta({ id: 'SystemConfigFFmpegDto' });
+4 -5
View File
@@ -445,11 +445,7 @@ export enum VideoCodec {
export const VideoCodecSchema = z.enum(VideoCodec).describe('Target video codec').meta({ id: 'VideoCodec' });
export enum VideoSegmentCodec {
Av1 = 'av1',
Hevc = 'hevc',
H264 = 'h264',
}
export type VideoSegmentCodec = VideoCodec.Av1 | VideoCodec.Hevc | VideoCodec.H264;
export enum AudioCodec {
Mp3 = 'mp3',
@@ -818,6 +814,8 @@ export enum JobName {
LibrarySyncFiles = 'LibrarySyncFiles',
LibraryScanQueueAll = 'LibraryScanQueueAll',
HlsSessionCleanup = 'HlsSessionCleanup',
MemoryCleanup = 'MemoryCleanup',
MemoryGenerate = 'MemoryGenerate',
@@ -910,6 +908,7 @@ export enum DatabaseLock {
MaintenanceOperation = 621,
MemoryCreation = 777,
VersionCheck = 800,
HlsSessionCleanup = 850,
}
export enum MaintenanceAction {
+255 -2
View File
@@ -7,6 +7,7 @@ from
"video_stream_session"
where
"id" = $1
and "expiresAt" > $2
-- VideoStreamRepository.getVariant
select
@@ -27,11 +28,13 @@ where
-- VideoStreamRepository.getExpiredSessions
select
"id"
"video_stream_session"."id",
"asset"."ownerId"
from
"video_stream_session"
inner join "asset" on "asset"."id" = "video_stream_session"."assetId"
where
"expiresAt" <= $1
"video_stream_session"."expiresAt" <= $1
-- VideoStreamRepository.extendSession
update "video_stream_session"
@@ -44,3 +47,253 @@ where
delete from "video_stream_session"
where
"id" = $1
-- VideoStreamRepository.getForMainPlaylist
select
(
select
to_json(obj)
from
(
select
"asset_video"."index",
"asset_video"."codecName",
"asset_video"."profile",
"asset_video"."level",
"asset_video"."bitrate",
"asset_exif"."exifImageWidth" as "width",
"asset_exif"."exifImageHeight" as "height",
"asset_video"."pixelFormat",
"asset_video"."frameCount",
"asset_exif"."fps" as "frameRate",
"asset_video"."timeBase",
case
when "asset_exif"."orientation" = '6' then -90
when "asset_exif"."orientation" = '8' then 90
when "asset_exif"."orientation" = '3' then 180
else 0
end as "rotation",
"asset_video"."colorPrimaries",
"asset_video"."colorMatrix",
"asset_video"."colorTransfer",
"asset_video"."dvProfile",
"asset_video"."dvLevel",
"asset_video"."dvBlSignalCompatibilityId"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "videoStream",
(
select
to_json(obj)
from
(
select
"asset_keyframe"."pts" as "keyframePts",
"asset_keyframe"."accDuration" as "keyframeAccDuration",
"asset_keyframe"."ownDuration" as "keyframeOwnDuration",
"asset_keyframe"."totalDuration",
"asset_keyframe"."packetCount",
"asset_keyframe"."outputFrames"
from
(
select
1
) as "dummy"
where
"asset_keyframe"."assetId" is not null
) as obj
) as "packets"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
inner join "asset_video" on "asset"."id" = "asset_video"."assetId"
inner join "asset_keyframe" on "asset"."id" = "asset_keyframe"."assetId"
where
"asset"."id" = $1
-- VideoStreamRepository.getForMediaPlaylist
select
(
select
to_json(obj)
from
(
select
"asset_video"."index",
"asset_video"."codecName",
"asset_video"."profile",
"asset_video"."level",
"asset_video"."bitrate",
"asset_exif"."exifImageWidth" as "width",
"asset_exif"."exifImageHeight" as "height",
"asset_video"."pixelFormat",
"asset_video"."frameCount",
"asset_exif"."fps" as "frameRate",
"asset_video"."timeBase",
case
when "asset_exif"."orientation" = '6' then -90
when "asset_exif"."orientation" = '8' then 90
when "asset_exif"."orientation" = '3' then 180
else 0
end as "rotation",
"asset_video"."colorPrimaries",
"asset_video"."colorMatrix",
"asset_video"."colorTransfer",
"asset_video"."dvProfile",
"asset_video"."dvLevel",
"asset_video"."dvBlSignalCompatibilityId"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "videoStream",
(
select
to_json(obj)
from
(
select
"asset_keyframe"."pts" as "keyframePts",
"asset_keyframe"."accDuration" as "keyframeAccDuration",
"asset_keyframe"."ownDuration" as "keyframeOwnDuration",
"asset_keyframe"."totalDuration",
"asset_keyframe"."packetCount",
"asset_keyframe"."outputFrames"
from
(
select
1
) as "dummy"
where
"asset_keyframe"."assetId" is not null
) as obj
) as "packets"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
inner join "video_stream_session" on "asset"."id" = "video_stream_session"."assetId"
inner join "asset_video" on "asset"."id" = "asset_video"."assetId"
inner join "asset_keyframe" on "asset"."id" = "asset_keyframe"."assetId"
where
"asset"."id" = $1
and "video_stream_session"."id" = $2
and "video_stream_session"."expiresAt" > $3
-- VideoStreamRepository.getForTranscoding
select
"asset"."originalPath",
(
select
to_json(obj)
from
(
select
"asset_audio"."index",
"asset_audio"."codecName",
"asset_audio"."profile",
"asset_audio"."bitrate"
from
(
select
1
) as "dummy"
where
"asset_audio"."assetId" is not null
) as obj
) as "audioStream",
(
select
to_json(obj)
from
(
select
"asset_video"."index",
"asset_video"."codecName",
"asset_video"."profile",
"asset_video"."level",
"asset_video"."bitrate",
"asset_exif"."exifImageWidth" as "width",
"asset_exif"."exifImageHeight" as "height",
"asset_video"."pixelFormat",
"asset_video"."frameCount",
"asset_exif"."fps" as "frameRate",
"asset_video"."timeBase",
case
when "asset_exif"."orientation" = '6' then -90
when "asset_exif"."orientation" = '8' then 90
when "asset_exif"."orientation" = '3' then 180
else 0
end as "rotation",
"asset_video"."colorPrimaries",
"asset_video"."colorMatrix",
"asset_video"."colorTransfer",
"asset_video"."dvProfile",
"asset_video"."dvLevel",
"asset_video"."dvBlSignalCompatibilityId"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "videoStream",
(
select
to_json(obj)
from
(
select
"asset_video"."formatName",
"asset_video"."formatLongName",
"asset"."duration",
"asset_video"."bitrate"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "format",
(
select
to_json(obj)
from
(
select
"asset_keyframe"."pts" as "keyframePts",
"asset_keyframe"."accDuration" as "keyframeAccDuration",
"asset_keyframe"."ownDuration" as "keyframeOwnDuration",
"asset_keyframe"."totalDuration",
"asset_keyframe"."packetCount",
"asset_keyframe"."outputFrames"
from
(
select
1
) as "dummy"
where
"asset_keyframe"."assetId" is not null
) as obj
) as "packets"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
left join "asset_audio" on "asset"."id" = "asset_audio"."assetId"
inner join "asset_video" on "asset"."id" = "asset_video"."assetId"
inner join "asset_keyframe" on "asset"."id" = "asset_keyframe"."assetId"
where
"asset"."id" = $1
@@ -92,6 +92,14 @@ type EventMap = {
AuthChangePassword: [{ userId: string; currentSessionId?: string; invalidateSessions?: boolean }];
// hls streaming events
HlsSegmentRequest: [{ sessionId: string; assetId: string; variantIndex: number; segmentIndex: number }];
HlsSegmentResult: [{ sessionId: string; variantIndex: number; segmentIndex: number; error?: string }];
HlsHeartbeat: [{ sessionId: string; variantIndex?: number; segmentIndex?: number }];
HlsSessionRequest: [{ sessionId: string; assetId: string; ownerId: string }];
HlsSessionResult: [{ sessionId: string; error?: string }];
HlsSessionEnd: [{ sessionId: string }];
// websocket events
WebsocketConnect: [{ userId: string }];
};
+29 -4
View File
@@ -509,18 +509,43 @@ export class MediaRepository {
return this.parseInt(b.bit_rate) - this.parseInt(a.bit_rate);
}
/* Ported from https://code.ffmpeg.org/FFmpeg/FFmpeg/src/commit/5c44245878e235ae64fe87fb9877644856d33d1d/fftools/ffmpeg_filter.c
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright (c) FFmpeg authors and contributors — https://ffmpeg.org/
* Modifications: TS port operating on probe-derived packet metadata rather than decoded AVFrames. */
private cfrOutputFrames(packets: { pts: number; duration: number }[], slotsPerTick: number) {
// Packets may be out of PTS order due to B-frames
packets.sort((a, b) => a.pts - b.pts);
const firstPts = packets[0].pts;
let outputFrames = 0;
let nextPts = 0;
const history = [0, 0, 0];
for (const pkt of packets) {
const delta = (pkt.pts - firstPts) * slotsPerTick - nextPts + pkt.duration * slotsPerTick;
const nb = delta < -1.1 ? 0 : delta > 1.1 ? Math.round(delta) : 1;
const syncIpts = (pkt.pts - firstPts) * slotsPerTick;
const duration = pkt.duration * slotsPerTick;
let delta0 = syncIpts - nextPts;
const delta = delta0 + duration;
if (delta0 < 0 && delta > 0) {
delta0 = 0;
}
let nb = 1;
let nbPrev = 0;
if (delta < -1.1) {
nb = 0;
} else if (delta > 1.1) {
nb = Math.round(delta);
if (delta0 > 1.1) {
nbPrev = Math.round(delta0 - 0.6);
}
}
outputFrames += nb;
nextPts += nb;
history[2] = history[1];
history[1] = history[0];
history[0] = nbPrev;
}
return outputFrames;
const median = history.sort((a, b) => a - b)[1];
return outputFrames + median;
}
}
@@ -1,12 +1,10 @@
import { Injectable } from '@nestjs/common';
import { ChildProcessWithoutNullStreams, fork, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
import { fork, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
import { Duplex } from 'node:stream';
@Injectable()
export class ProcessRepository {
spawn(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
return spawn(command, args, options);
}
spawn = spawn;
spawnDuplexStream(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): Duplex {
let stdinClosed = false;
+11 -1
View File
@@ -2,7 +2,15 @@ import { Injectable } from '@nestjs/common';
import archiver from 'archiver';
import chokidar, { ChokidarOptions } from 'chokidar';
import { escapePath, glob, globStream } from 'fast-glob';
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync, ReadOptionsWithBuffer } from 'node:fs';
import {
constants,
createReadStream,
createWriteStream,
existsSync,
mkdirSync,
ReadOptionsWithBuffer,
watch,
} from 'node:fs';
import fs from 'node:fs/promises';
import path from 'node:path';
import { PassThrough, Readable, Writable } from 'node:stream';
@@ -258,6 +266,8 @@ export class StorageRepository {
return () => watcher.close();
}
watchDir = watch; // Native fs.watch without chokidar overhead
private asGlob(pathToCrawl: string): string {
const escapedPath = escapePath(pathToCrawl).replaceAll('"', '["]').replaceAll("'", "[']").replaceAll('`', '[`]');
const extensions = `*{${mimeTypes.getSupportedFileExtensions().join(',')}}`;
@@ -8,6 +8,7 @@ import {
VideoStreamSessionTable,
VideoStreamVariantTable,
} from 'src/schema/tables/video-stream.table';
import { withAudioStream, withVideoFormat, withVideoPackets, withVideoStream } from 'src/utils/database';
@Injectable()
export class VideoStreamRepository {
@@ -27,7 +28,12 @@ export class VideoStreamRepository {
@GenerateSql({ params: [DummyValue.UUID] })
getSession(id: string) {
return this.db.selectFrom('video_stream_session').selectAll().where('id', '=', id).executeTakeFirst();
return this.db
.selectFrom('video_stream_session')
.selectAll()
.where('id', '=', id)
.where('expiresAt', '>', new Date())
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
@@ -47,7 +53,12 @@ export class VideoStreamRepository {
@GenerateSql()
getExpiredSessions() {
return this.db.selectFrom('video_stream_session').select(['id']).where('expiresAt', '<=', new Date()).execute();
return this.db
.selectFrom('video_stream_session')
.innerJoin('asset', 'asset.id', 'video_stream_session.assetId')
.select(['video_stream_session.id', 'asset.ownerId'])
.where('video_stream_session.expiresAt', '<=', new Date())
.execute();
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.DATE] })
@@ -59,4 +70,50 @@ export class VideoStreamRepository {
async deleteSession(id: string) {
await this.db.deleteFrom('video_stream_session').where('id', '=', id).execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
async getForMainPlaylist(id: string) {
return this.db
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.where('asset.id', '=', id)
.innerJoin('asset_video', 'asset.id', 'asset_video.assetId')
.innerJoin('asset_keyframe', 'asset.id', 'asset_keyframe.assetId')
.select((eb) => withVideoStream(eb).$notNull().as('videoStream'))
.select((eb) => withVideoPackets(eb).$notNull().as('packets'))
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
async getForMediaPlaylist(id: string, sessionId: string) {
return this.db
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.innerJoin('video_stream_session', 'asset.id', 'video_stream_session.assetId')
.where('asset.id', '=', id)
.where('video_stream_session.id', '=', sessionId)
.where('video_stream_session.expiresAt', '>', new Date())
.innerJoin('asset_video', 'asset.id', 'asset_video.assetId')
.innerJoin('asset_keyframe', 'asset.id', 'asset_keyframe.assetId')
.select((eb) => withVideoStream(eb).$notNull().as('videoStream'))
.select((eb) => withVideoPackets(eb).$notNull().as('packets'))
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
async getForTranscoding(id: string) {
return this.db
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.where('asset.id', '=', id)
.leftJoin('asset_audio', 'asset.id', 'asset_audio.assetId')
.innerJoin('asset_video', 'asset.id', 'asset_video.assetId')
.innerJoin('asset_keyframe', 'asset.id', 'asset_keyframe.assetId')
.select('asset.originalPath')
.select((eb) => withAudioStream(eb).as('audioStream'))
.select((eb) => withVideoStream(eb).$notNull().as('videoStream'))
.select((eb) => withVideoFormat(eb).$notNull().as('format'))
.select((eb) => withVideoPackets(eb).$notNull().as('packets'))
.executeTakeFirst();
}
}
@@ -16,7 +16,16 @@ import { AppRestartEvent, ArgsOf, EventRepository } from 'src/repositories/event
import { LoggingRepository } from 'src/repositories/logging.repository';
import { handlePromiseError } from 'src/utils/misc';
export const serverEvents = ['ConfigUpdate', 'AppRestart'] as const;
export const serverEvents = [
'ConfigUpdate',
'AppRestart',
'HlsSegmentRequest',
'HlsSegmentResult',
'HlsHeartbeat',
'HlsSessionRequest',
'HlsSessionResult',
'HlsSessionEnd',
] as const;
export type ServerEvents = (typeof serverEvents)[number];
export interface ClientEventMap {
+2 -9
View File
@@ -1,12 +1,5 @@
import { registerEnum } from '@immich/sql-tools';
import {
AlbumUserRole,
AssetStatus,
AssetVisibility,
ChecksumAlgorithm,
SourceType,
VideoSegmentCodec,
} from 'src/enum';
import { AlbumUserRole, AssetStatus, AssetVisibility, ChecksumAlgorithm, SourceType, VideoCodec } from 'src/enum';
export const album_user_role_enum = registerEnum({
name: 'album_user_role_enum',
@@ -35,5 +28,5 @@ export const asset_checksum_algorithm_enum = registerEnum({
export const video_stream_variant_codec_enum = registerEnum({
name: 'video_stream_variant_codec_enum',
values: Object.values(VideoSegmentCodec),
values: [VideoCodec.Av1, VideoCodec.Hevc, VideoCodec.H264],
});
+333
View File
@@ -0,0 +1,333 @@
import { BadRequestException, NotFoundException } from '@nestjs/common';
import { TranscodeHardwareAcceleration } from 'src/enum';
import { HlsService } from 'src/services/hls.service';
import { eiffelTower, train, waterfall } from 'test/fixtures/media.stub';
import { factory } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
// EXTINF values come from FFmpeg's playlist to enforce an exact match
const eiffelExpectedMediaPlaylist = `#EXTM3U
#EXT-X-VERSION:6
#EXT-X-TARGETDURATION:2
#EXT-X-MEDIA-SEQUENCE:0
#EXT-X-PLAYLIST-TYPE:VOD
#EXT-X-MAP:URI="init.mp4"
#EXTINF:2.007222,
seg_0.m4s
#EXTINF:2.007222,
seg_1.m4s
#EXTINF:2.007222,
seg_2.m4s
#EXTINF:2.007222,
seg_3.m4s
#EXTINF:2.007222,
seg_4.m4s
#EXTINF:2.007222,
seg_5.m4s
#EXTINF:2.007222,
seg_6.m4s
#EXTINF:2.007222,
seg_7.m4s
#EXTINF:2.007222,
seg_8.m4s
#EXTINF:2.007222,
seg_9.m4s
#EXTINF:2.007222,
seg_10.m4s
#EXTINF:0.281011,
seg_11.m4s
#EXT-X-ENDLIST
`;
const waterfallExpectedMediaPlaylist = `#EXTM3U
#EXT-X-VERSION:6
#EXT-X-TARGETDURATION:2
#EXT-X-MEDIA-SEQUENCE:0
#EXT-X-PLAYLIST-TYPE:VOD
#EXT-X-MAP:URI="init.mp4"
#EXTINF:2.011405,
seg_0.m4s
#EXTINF:2.011405,
seg_1.m4s
#EXTINF:2.011405,
seg_2.m4s
#EXTINF:2.011405,
seg_3.m4s
#EXTINF:2.011405,
seg_4.m4s
#EXTINF:0.301711,
seg_5.m4s
#EXT-X-ENDLIST
`;
const trainExpectedMediaPlaylist = `#EXTM3U
#EXT-X-VERSION:6
#EXT-X-TARGETDURATION:2
#EXT-X-MEDIA-SEQUENCE:0
#EXT-X-PLAYLIST-TYPE:VOD
#EXT-X-MAP:URI="init.mp4"
#EXTINF:2.000000,
seg_0.m4s
#EXTINF:2.000000,
seg_1.m4s
#EXTINF:2.000000,
seg_2.m4s
#EXTINF:2.000000,
seg_3.m4s
#EXTINF:2.000000,
seg_4.m4s
#EXTINF:2.000000,
seg_5.m4s
#EXTINF:2.000000,
seg_6.m4s
#EXTINF:2.000000,
seg_7.m4s
#EXTINF:2.000000,
seg_8.m4s
#EXTINF:2.000000,
seg_9.m4s
#EXTINF:1.733333,
seg_10.m4s
#EXT-X-ENDLIST
`;
const sessionId = '00000000-0000-0000-0000-000000000000';
const eiffelExpectedMasterDisabled = `#EXTM3U
#EXT-X-VERSION:6
#EXT-X-STREAM-INF:BANDWIDTH=1000000,RESOLUTION=480x852,CODECS="av01.0.04M.08,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/0/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=1200000,RESOLUTION=480x852,CODECS="hvc1.1.6.L90.B0,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/1/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=2500000,RESOLUTION=480x852,CODECS="avc1.64001e,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/2/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=2000000,RESOLUTION=720x1280,CODECS="av01.0.08M.08,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/3/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=2500000,RESOLUTION=720x1280,CODECS="hvc1.1.6.L93.B0,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/4/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=5000000,RESOLUTION=720x1280,CODECS="avc1.64001f,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/5/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=4000000,RESOLUTION=1080x1920,CODECS="av01.0.09M.08,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/6/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=4500000,RESOLUTION=1080x1920,CODECS="hvc1.1.6.L120.B0,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/7/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=8000000,RESOLUTION=1080x1920,CODECS="avc1.640028,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/8/playlist.m3u8
`;
const eiffelExpectedMasterRkmpp = `#EXTM3U
#EXT-X-VERSION:6
#EXT-X-STREAM-INF:BANDWIDTH=1200000,RESOLUTION=480x852,CODECS="hvc1.1.6.L90.B0,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/1/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=2500000,RESOLUTION=480x852,CODECS="avc1.64001e,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/2/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=2500000,RESOLUTION=720x1280,CODECS="hvc1.1.6.L93.B0,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/4/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=5000000,RESOLUTION=720x1280,CODECS="avc1.64001f,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/5/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=4500000,RESOLUTION=1080x1920,CODECS="hvc1.1.6.L120.B0,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/7/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=8000000,RESOLUTION=1080x1920,CODECS="avc1.640028,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=24.910
${sessionId}/8/playlist.m3u8
`;
const waterfallExpectedMasterDisabled = `#EXTM3U
#EXT-X-VERSION:6
#EXT-X-STREAM-INF:BANDWIDTH=1000000,RESOLUTION=480x852,CODECS="av01.0.04M.08,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/0/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=1200000,RESOLUTION=480x852,CODECS="hvc1.1.6.L90.B0,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/1/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=2500000,RESOLUTION=480x852,CODECS="avc1.64001e,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/2/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=2000000,RESOLUTION=720x1280,CODECS="av01.0.08M.08,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/3/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=2500000,RESOLUTION=720x1280,CODECS="hvc1.1.6.L93.B0,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/4/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=5000000,RESOLUTION=720x1280,CODECS="avc1.64001f,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/5/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=4000000,RESOLUTION=1080x1920,CODECS="av01.0.09M.08,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/6/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=4500000,RESOLUTION=1080x1920,CODECS="hvc1.1.6.L120.B0,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/7/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=8000000,RESOLUTION=1080x1920,CODECS="avc1.640028,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/8/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=7000000,RESOLUTION=1440x2560,CODECS="av01.0.12M.08,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/9/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=8000000,RESOLUTION=1440x2560,CODECS="hvc1.2.4.L150.B0,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/10/playlist.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=16000000,RESOLUTION=1440x2560,CODECS="avc1.640032,mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=29.830
${sessionId}/11/playlist.m3u8
`;
describe(HlsService.name, () => {
let sut: HlsService;
let mocks: ServiceMocks;
beforeEach(() => {
({ sut, mocks } = newTestService(HlsService));
});
describe('getMainPlaylist', () => {
const auth = factory.auth();
const assetId = 'asset-1';
const setup = (asset: typeof eiffelTower | typeof waterfall, accel: TranscodeHardwareAcceleration) => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetId]));
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { realtime: { enabled: true }, accel } });
mocks.videoStream.getForMainPlaylist.mockResolvedValue(asset);
mocks.crypto.randomUUID.mockReturnValue(sessionId);
mocks.websocket.serverSend.mockImplementation((event, ...rest) => {
if (event === 'HlsSessionRequest') {
const { sessionId: id } = rest[0] as { sessionId: string };
queueMicrotask(() => sut.onSessionResult({ sessionId: id }));
}
});
};
it('returns main playlist for eiffel-tower (1080p portrait, no acceleration)', async () => {
setup(eiffelTower, TranscodeHardwareAcceleration.Disabled);
await expect(sut.getMainPlaylist(auth, assetId)).resolves.toBe(eiffelExpectedMasterDisabled);
});
it('returns main playlist for eiffel-tower with RKMPP (no AV1 variants)', async () => {
setup(eiffelTower, TranscodeHardwareAcceleration.Rkmpp);
await expect(sut.getMainPlaylist(auth, assetId)).resolves.toBe(eiffelExpectedMasterRkmpp);
});
it('returns main playlist for waterfall (4K landscape) with no acceleration', async () => {
setup(waterfall, TranscodeHardwareAcceleration.Disabled);
await expect(sut.getMainPlaylist(auth, assetId)).resolves.toBe(waterfallExpectedMasterDisabled);
});
it('throws BadRequestException when realtime transcoding is disabled', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetId]));
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { realtime: { enabled: false } } });
await expect(sut.getMainPlaylist(auth, assetId)).rejects.toBeInstanceOf(BadRequestException);
});
it('throws NotFoundException when asset is not yet ready for streaming', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetId]));
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { realtime: { enabled: true } } });
await expect(sut.getMainPlaylist(auth, assetId)).rejects.toBeInstanceOf(NotFoundException);
});
});
describe('getMediaPlaylist', () => {
const auth = factory.auth();
const assetId = 'asset-1';
const fixtures = [
{ data: eiffelTower, playlist: eiffelExpectedMediaPlaylist },
{ data: waterfall, playlist: waterfallExpectedMediaPlaylist },
{ data: train, playlist: trainExpectedMediaPlaylist },
];
it.each(fixtures)('matches FFmpeg for $data.originalPath', async ({ data, playlist }) => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetId]));
mocks.videoStream.getForMediaPlaylist.mockResolvedValue(data);
await expect(sut.getMediaPlaylist(auth, assetId, sessionId)).resolves.toBe(playlist);
});
it('throws NotFoundException when the session/asset cannot be loaded', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetId]));
await expect(sut.getMediaPlaylist(auth, assetId, sessionId)).rejects.toBeInstanceOf(NotFoundException);
});
});
describe('getSegment', () => {
const auth = factory.auth();
const assetId = 'asset-1';
const variantIndex = 0;
beforeEach(() => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetId]));
mocks.videoStream.getSession.mockResolvedValue({ id: sessionId, assetId } as never);
mocks.storage.checkFileExists.mockResolvedValue(true);
});
it('emits HlsHeartbeat with segmentIndex 0 for the first init.mp4 request', async () => {
await sut.getSegment(auth, assetId, sessionId, variantIndex, 'init.mp4');
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('HlsHeartbeat', {
sessionId,
variantIndex,
segmentIndex: 0,
});
});
it('emits HlsHeartbeat with the parsed segment number for seg_K.m4s', async () => {
await sut.getSegment(auth, assetId, sessionId, variantIndex, 'seg_5.m4s');
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('HlsHeartbeat', {
sessionId,
variantIndex,
segmentIndex: 5,
});
});
it('returns lastRequested + 1 for init.mp4 after a segment has been served', async () => {
await sut.getSegment(auth, assetId, sessionId, variantIndex, 'seg_5.m4s');
mocks.websocket.serverSend.mockClear();
await sut.getSegment(auth, assetId, sessionId, variantIndex, 'init.mp4');
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('HlsHeartbeat', {
sessionId,
variantIndex,
segmentIndex: 6,
});
});
it('updates lastRequested on a backward-seek segment request', async () => {
await sut.getSegment(auth, assetId, sessionId, variantIndex, 'seg_5.m4s');
await sut.getSegment(auth, assetId, sessionId, variantIndex, 'seg_3.m4s');
mocks.websocket.serverSend.mockClear();
await sut.getSegment(auth, assetId, sessionId, variantIndex, 'init.mp4');
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('HlsHeartbeat', {
sessionId,
variantIndex,
segmentIndex: 4,
});
});
it('tracks segment state per session independently', async () => {
await sut.getSegment(auth, assetId, 'session-a', variantIndex, 'seg_5.m4s');
await sut.getSegment(auth, assetId, 'session-b', variantIndex, 'seg_2.m4s');
mocks.websocket.serverSend.mockClear();
await sut.getSegment(auth, assetId, 'session-a', variantIndex, 'init.mp4');
await sut.getSegment(auth, assetId, 'session-b', variantIndex, 'init.mp4');
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('HlsHeartbeat', {
sessionId: 'session-a',
variantIndex,
segmentIndex: 6,
});
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('HlsHeartbeat', {
sessionId: 'session-b',
variantIndex,
segmentIndex: 3,
});
});
it('rejects pending waiters for the previous variant on variant change', async () => {
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
const pending = sut.getSegment(auth, assetId, sessionId, 0, 'seg_1.m4s');
await new Promise((resolve) => setImmediate(resolve));
await sut.getSegment(auth, assetId, sessionId, 1, 'seg_1.m4s');
await expect(pending).rejects.toThrow('Variant changed');
});
it('throws NotFoundException when the session does not exist', async () => {
mocks.videoStream.getSession.mockReset();
await expect(sut.getSegment(auth, assetId, sessionId, variantIndex, 'init.mp4')).rejects.toBeInstanceOf(
NotFoundException,
);
});
});
describe('endSession', () => {
it('emits HlsSessionEnd', async () => {
const auth = factory.auth();
const assetId = 'asset-1';
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetId]));
await sut.endSession(auth, assetId, sessionId);
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('HlsSessionEnd', { sessionId });
});
});
});
+198
View File
@@ -0,0 +1,198 @@
import { BadRequestException, Injectable, NotFoundException } from '@nestjs/common';
import { constants } from 'node:fs';
import { join } from 'node:path';
import {
HLS_SEGMENT_DURATION,
HLS_SEGMENT_FILENAME_REGEX,
HLS_VARIANTS,
HLS_VERSION,
SUPPORTED_HWA_CODECS,
} from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import { CacheControl, ImmichWorker, Permission } from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import { VideoPacketInfo, VideoStreamInfo } from 'src/types';
import { PendingEvents } from 'src/utils/event';
import { ImmichFileResponse } from 'src/utils/file';
import { getOutputSize } from 'src/utils/media';
type AssetWithStreamInfo = { videoStream: VideoStreamInfo & { timeBase: number }; packets: VideoPacketInfo };
type ApiSession = { lastRequestedSegment: number | null; lastVariantIndex: number | null };
@Injectable()
export class HlsService extends BaseService {
private pendingSegments = new PendingEvents<'HlsSegmentResult'>({ timeoutMs: 15_000 });
private pendingSessions = new PendingEvents<'HlsSessionResult'>({ timeoutMs: 5000 });
private sessions = new Map<string, ApiSession>();
@OnEvent({ name: 'HlsSessionResult', server: true, workers: [ImmichWorker.Api] })
onSessionResult(event: ArgOf<'HlsSessionResult'>) {
this.pendingSessions.complete(event.sessionId, event);
if (event.error) {
this.sessions.delete(event.sessionId);
this.pendingSegments.rejectByPrefix(`${event.sessionId}:`, event.error);
}
}
@OnEvent({ name: 'HlsSessionEnd', server: true, workers: [ImmichWorker.Api] })
onSessionEnd({ sessionId }: ArgOf<'HlsSessionEnd'>) {
this.sessions.delete(sessionId);
this.pendingSegments.rejectByPrefix(`${sessionId}:`, 'Session ended');
}
@OnEvent({ name: 'HlsSegmentResult', server: true, workers: [ImmichWorker.Api] })
onSegmentResult(event: ArgOf<'HlsSegmentResult'>) {
this.pendingSegments.complete(this.getSegmentKey(event), event);
}
async getMainPlaylist(auth: AuthDto, assetId: string) {
await this.requireAccess({ auth, permission: Permission.AssetView, ids: [assetId] });
const { ffmpeg } = await this.getConfig({ withCache: true });
if (!ffmpeg.realtime.enabled) {
throw new BadRequestException('Real-time transcoding is not enabled');
}
const asset = await this.videoStreamRepository.getForMainPlaylist(assetId);
if (!asset) {
throw new NotFoundException('Asset is not yet ready for streaming');
}
// Sharing the sessionId allows only one microservices worker to successfully insert to the session table.
// The microservices worker that creates a session owns the transcoding lifecycle for it.
const sessionId = this.cryptoRepository.randomUUID();
this.websocketRepository.serverSend('HlsSessionRequest', { sessionId, assetId, ownerId: auth.user.id });
await this.pendingSessions.wait(sessionId);
this.trackSession(sessionId);
return this.generateMainPlaylist(sessionId, ffmpeg, asset);
}
async getMediaPlaylist(auth: AuthDto, assetId: string, sessionId: string) {
await this.requireAccess({ auth, permission: Permission.AssetView, ids: [assetId] });
const asset = await this.videoStreamRepository.getForMediaPlaylist(assetId, sessionId);
if (!asset) {
throw new NotFoundException('Asset not found or not yet ready for streaming');
}
return this.generateMediaPlaylist(asset);
}
async getSegment(auth: AuthDto, assetId: string, sessionId: string, variantIndex: number, filename: string) {
await this.requireAccess({ auth, permission: Permission.AssetView, ids: [assetId] });
const session = await this.videoStreamRepository.getSession(sessionId);
if (!session) {
throw new NotFoundException('Session not found');
}
const variantDir = StorageCore.getHlsVariantFolder({ ownerId: auth.user.id, sessionId, variantIndex });
const path = join(variantDir, filename);
const response = new ImmichFileResponse({
path,
contentType: 'video/mp4',
cacheControl: CacheControl.PrivateWithCache,
});
const apiSession = this.trackSession(sessionId, variantIndex);
const segmentIndex = this.getSegmentIndex(apiSession, filename);
this.websocketRepository.serverSend('HlsHeartbeat', { sessionId, variantIndex, segmentIndex });
if (await this.storageRepository.checkFileExists(path, constants.R_OK)) {
return response;
}
this.websocketRepository.serverSend('HlsSegmentRequest', { sessionId, assetId, variantIndex, segmentIndex });
await this.pendingSegments.wait(this.getSegmentKey({ sessionId, variantIndex, segmentIndex }));
return response;
}
async endSession(auth: AuthDto, assetId: string, sessionId: string): Promise<void> {
await this.requireAccess({ auth, permission: Permission.AssetView, ids: [assetId] });
this.websocketRepository.serverSend('HlsSessionEnd', { sessionId });
}
private generateMainPlaylist(sessionId: string, ffmpeg: SystemConfigFFmpegDto, asset: AssetWithStreamInfo) {
const fps = ((asset.packets.packetCount * asset.videoStream.timeBase) / asset.packets.totalDuration).toFixed(3);
const sourceResolution = Math.min(asset.videoStream.height, asset.videoStream.width);
const targetResolution = Math.max(sourceResolution, HLS_VARIANTS[0].resolution);
const lines = ['#EXTM3U', `#EXT-X-VERSION:${HLS_VERSION}`];
for (let i = 0; i < HLS_VARIANTS.length; i++) {
const { resolution, bitrate, codec, codecString } = HLS_VARIANTS[i];
if (resolution > targetResolution || !SUPPORTED_HWA_CODECS[ffmpeg.accel].includes(codec)) {
continue;
}
const { width, height } = getOutputSize(asset.videoStream, resolution);
lines.push(
`#EXT-X-STREAM-INF:BANDWIDTH=${bitrate},RESOLUTION=${width}x${height},CODECS="${codecString},mp4a.40.2",VIDEO-RANGE=SDR,FRAME-RATE=${fps}`,
`${sessionId}/${i}/playlist.m3u8`,
);
}
lines.push('');
if (lines.length === 3) {
throw new NotFoundException('No supported variants for this video');
}
return lines.join('\n');
}
private generateMediaPlaylist({ videoStream, packets }: AssetWithStreamInfo) {
const fps = (packets.packetCount * videoStream.timeBase) / packets.totalDuration;
const framesPerSegment = Math.ceil(HLS_SEGMENT_DURATION * fps);
const fullSegmentDuration = framesPerSegment / fps;
const segmentCount = Math.ceil(packets.outputFrames / framesPerSegment);
const lastSegmentFrames = packets.outputFrames - framesPerSegment * (segmentCount - 1);
const lastSegmentDuration = lastSegmentFrames / fps;
const lines = [
'#EXTM3U',
`#EXT-X-VERSION:${HLS_VERSION}`,
`#EXT-X-TARGETDURATION:${HLS_SEGMENT_DURATION}`,
'#EXT-X-MEDIA-SEQUENCE:0',
'#EXT-X-PLAYLIST-TYPE:VOD',
'#EXT-X-MAP:URI="init.mp4"',
];
for (let i = 0; i < segmentCount - 1; i++) {
lines.push(`#EXTINF:${fullSegmentDuration.toFixed(6)},`, `seg_${i}.m4s`);
}
lines.push(`#EXTINF:${lastSegmentDuration.toFixed(6)},`, `seg_${segmentCount - 1}.m4s`, '#EXT-X-ENDLIST', '');
return lines.join('\n');
}
private getSegmentKey({ sessionId, variantIndex, segmentIndex }: ArgOf<'HlsSegmentResult'>) {
return `${sessionId}:${variantIndex}:${segmentIndex}`;
}
private getSegmentIndex(session: ApiSession, filename: string) {
if (filename.endsWith('.mp4')) {
return (session.lastRequestedSegment ?? -1) + 1;
}
const segmentIndex = Number.parseInt(HLS_SEGMENT_FILENAME_REGEX.exec(filename)![1]);
session.lastRequestedSegment = segmentIndex;
return segmentIndex;
}
private trackSession(id: string, variantIndex: number | null = null) {
const session = this.sessions.get(id);
if (!session) {
const newSession = { lastRequestedSegment: null, lastVariantIndex: variantIndex };
this.sessions.set(id, newSession);
return newSession;
}
if (session.lastVariantIndex !== null && session.lastVariantIndex !== variantIndex) {
this.pendingSegments.rejectByPrefix(`${id}:${session.lastVariantIndex}:`, 'Variant changed');
}
session.lastVariantIndex = variantIndex;
return session;
}
}
+4
View File
@@ -11,6 +11,7 @@ import { DatabaseBackupService } from 'src/services/database-backup.service';
import { DatabaseService } from 'src/services/database.service';
import { DownloadService } from 'src/services/download.service';
import { DuplicateService } from 'src/services/duplicate.service';
import { HlsService } from 'src/services/hls.service';
import { JobService } from 'src/services/job.service';
import { LibraryService } from 'src/services/library.service';
import { MaintenanceService } from 'src/services/maintenance.service';
@@ -39,6 +40,7 @@ import { SystemMetadataService } from 'src/services/system-metadata.service';
import { TagService } from 'src/services/tag.service';
import { TelemetryService } from 'src/services/telemetry.service';
import { TimelineService } from 'src/services/timeline.service';
import { TranscodingService } from 'src/services/transcoding.service';
import { TrashService } from 'src/services/trash.service';
import { UserAdminService } from 'src/services/user-admin.service';
import { UserService } from 'src/services/user.service';
@@ -60,6 +62,7 @@ export const services = [
DatabaseService,
DownloadService,
DuplicateService,
HlsService,
JobService,
LibraryService,
MaintenanceService,
@@ -88,6 +91,7 @@ export const services = [
TagService,
TelemetryService,
TimelineService,
TranscodingService,
TrashService,
UserAdminService,
UserService,
+3 -25
View File
@@ -13,6 +13,7 @@ import {
AudioCodec,
Colorspace,
ImageFormat,
ImmichWorker,
JobName,
JobStatus,
QueueName,
@@ -60,10 +61,9 @@ type ThumbnailAsset = NonNullable<Awaited<ReturnType<AssetJobRepository['getForG
export class MediaService extends BaseService {
videoInterfaces: VideoInterfaces = { dri: [], mali: false };
@OnEvent({ name: 'AppBootstrap' })
@OnEvent({ name: 'AppBootstrap', workers: [ImmichWorker.Microservices] })
async onBootstrap() {
const [dri, mali] = await Promise.all([this.getDevices(), this.hasMaliOpenCL()]);
this.videoInterfaces = { dri, mali };
this.videoInterfaces = await this.storageCore.getVideoInterfaces();
}
@OnJob({ name: JobName.AssetGenerateThumbnailsQueueAll, queue: QueueName.ThumbnailGeneration })
@@ -789,28 +789,6 @@ export class MediaService extends BaseService {
return extractedSize >= targetSize;
}
private async getDevices() {
try {
return await this.storageRepository.readdir('/dev/dri');
} catch {
this.logger.debug('No devices found in /dev/dri.');
return [];
}
}
private async hasMaliOpenCL() {
try {
const [maliIcdStat, maliDeviceStat] = await Promise.all([
this.storageRepository.stat('/etc/OpenCL/vendors/mali.icd'),
this.storageRepository.stat('/dev/mali0'),
]);
return maliIcdStat.isFile() && maliDeviceStat.isCharacterDevice();
} catch {
this.logger.debug('OpenCL not available for transcoding, so RKMPP acceleration will use CPU tonemapping');
return false;
}
}
private async syncFiles(
oldFiles: (AssetFile & { isProgressive: boolean; isTransparent: boolean })[],
newFiles: UpsertFileOptions[],
@@ -41,6 +41,7 @@ describe(QueueService.name, () => {
{ name: JobName.PersonCleanup },
{ name: JobName.MemoryCleanup },
{ name: JobName.SessionCleanup },
{ name: JobName.HlsSessionCleanup },
{ name: JobName.AuditTableCleanup },
{ name: JobName.MemoryGenerate },
{ name: JobName.UserSyncUsage },
+1
View File
@@ -269,6 +269,7 @@ export class QueueService extends BaseService {
{ name: JobName.PersonCleanup },
{ name: JobName.MemoryCleanup },
{ name: JobName.SessionCleanup },
{ name: JobName.HlsSessionCleanup },
{ name: JobName.AuditTableCleanup },
);
}
@@ -72,6 +72,9 @@ const updatedConfig = Object.freeze<SystemConfig>({
accel: TranscodeHardwareAcceleration.Disabled,
accelDecode: false,
tonemap: ToneMapping.Hable,
realtime: {
enabled: false,
},
},
logging: {
enabled: true,
@@ -0,0 +1,516 @@
import {
HLS_BACKPRESSURE_PAUSE_SEGMENTS,
HLS_BACKPRESSURE_RESUME_SEGMENTS,
HLS_CLEANUP_INTERVAL_MS,
HLS_INACTIVITY_TIMEOUT_MS,
HLS_LEASE_DURATION_MS,
} from 'src/constants';
import { TranscodingService } from 'src/services/transcoding.service';
import { VIDEO_STREAM_SESSION_PK_CONSTRAINT } from 'src/utils/database';
import { eiffelTower, train, waterfall } from 'test/fixtures/media.stub';
import { mockSpawn, newTestService, ServiceMocks } from 'test/utils';
import { vi } from 'vitest';
describe(TranscodingService.name, () => {
let sut: TranscodingService;
let mocks: ServiceMocks;
const sessionId = 'session-1';
const assetId = 'asset-1';
const ownerId = 'user-1';
const completeSegment = (index: number) => {
const listener = vi.mocked(mocks.storage.watchDir).mock.lastCall?.[1];
expect(listener).toBeDefined();
listener!('rename', `seg_${index}.m4s`);
};
const completeSegmentsThrough = (start: number, end: number) => {
for (let i = start; i <= end; i++) {
completeSegment(i);
}
};
beforeEach(() => {
({ sut, mocks } = newTestService(TranscodingService));
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { realtime: { enabled: true } } });
mocks.videoStream.getForTranscoding.mockResolvedValue(eiffelTower);
});
describe('onSessionRequest', () => {
it('creates the session row and emits HlsSessionResult on success', async () => {
await sut.onSessionRequest({ sessionId, assetId, ownerId });
expect(mocks.videoStream.createSession).toHaveBeenCalledWith({
id: sessionId,
assetId,
expiresAt: expect.any(Date),
});
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('HlsSessionResult', { sessionId });
});
it('treats a primary-key conflict as a no-op for replay tolerance', async () => {
mocks.videoStream.createSession.mockRejectedValue({ constraint_name: VIDEO_STREAM_SESSION_PK_CONSTRAINT });
await sut.onSessionRequest({ sessionId, assetId, ownerId });
expect(mocks.websocket.serverSend).not.toHaveBeenCalled();
});
it('emits HlsSessionResult with an error on other DB failures', async () => {
mocks.videoStream.createSession.mockRejectedValue(new Error('database is down'));
await sut.onSessionRequest({ sessionId, assetId, ownerId });
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('HlsSessionResult', {
sessionId,
error: 'Failed to create HLS session',
});
});
});
describe('onSessionEnd', () => {
it('removes the session, kills the transcode, and deletes the dir + DB row', async () => {
await sut.onSessionRequest({ sessionId, assetId, ownerId });
const process = mockSpawn(0, '', '');
mocks.process.spawn.mockReturnValue(process);
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 0, segmentIndex: 0 });
await sut.onSessionEnd({ sessionId });
expect(process.kill).toHaveBeenCalled();
expect(mocks.storage.unlinkDir).toHaveBeenCalled();
expect(mocks.videoStream.deleteSession).toHaveBeenCalledWith(sessionId);
});
it('is a no-op when the session is unknown', async () => {
await sut.onSessionEnd({ sessionId: 'never-created' });
expect(mocks.videoStream.deleteSession).not.toHaveBeenCalled();
expect(mocks.storage.unlinkDir).not.toHaveBeenCalled();
});
});
describe('onHeartbeat', () => {
it('extends the DB lease when remaining time falls below half', async () => {
vi.useFakeTimers();
try {
await sut.onSessionRequest({ sessionId, assetId, ownerId });
vi.setSystemTime(Date.now() + HLS_LEASE_DURATION_MS / 2 + 1);
await sut.onHeartbeat({ sessionId });
expect(mocks.videoStream.extendSession).toHaveBeenCalledWith(sessionId, expect.any(Date));
} finally {
vi.useRealTimers();
}
});
it('does not extend the lease while it is still fresh', async () => {
await sut.onSessionRequest({ sessionId, assetId, ownerId });
await sut.onHeartbeat({ sessionId });
expect(mocks.videoStream.extendSession).not.toHaveBeenCalled();
});
it('is a no-op when the session is unknown', async () => {
await sut.onHeartbeat({ sessionId: 'never-created' });
expect(mocks.videoStream.extendSession).not.toHaveBeenCalled();
});
});
describe('onSegmentRequest', () => {
beforeEach(async () => {
await sut.onSessionRequest({ sessionId, assetId, ownerId });
mocks.websocket.serverSend.mockClear();
});
it('spawns FFmpeg on the first request', async () => {
mocks.process.spawn.mockReturnValue(mockSpawn(0, '', ''));
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 0, segmentIndex: 0 });
expect(mocks.process.spawn).toHaveBeenCalledTimes(1);
expect(mocks.process.spawn).toHaveBeenCalledWith('ffmpeg', expect.any(Array), expect.any(Object));
});
it('kills and respawns when the variant changes', async () => {
const first = mockSpawn(0, '', '');
const second = mockSpawn(0, '', '');
mocks.process.spawn.mockReturnValueOnce(first).mockReturnValueOnce(second);
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 0, segmentIndex: 0 });
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 1, segmentIndex: 0 });
expect(first.kill).toHaveBeenCalled();
expect(mocks.process.spawn).toHaveBeenCalledTimes(2);
});
it('kills and respawns when seeking before the start segment', async () => {
const first = mockSpawn(0, '', '');
const second = mockSpawn(0, '', '');
mocks.process.spawn.mockReturnValueOnce(first).mockReturnValueOnce(second);
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 0, segmentIndex: 5 });
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 0, segmentIndex: 2 });
expect(first.kill).toHaveBeenCalled();
expect(mocks.process.spawn).toHaveBeenCalledTimes(2);
});
it('kills and respawns when the requested segment is too far ahead', async () => {
const first = mockSpawn(0, '', '');
const second = mockSpawn(0, '', '');
mocks.process.spawn.mockReturnValueOnce(first).mockReturnValueOnce(second);
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 0, segmentIndex: 0 });
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 0, segmentIndex: 5 });
expect(first.kill).toHaveBeenCalled();
expect(mocks.process.spawn).toHaveBeenCalledTimes(2);
});
it('does not spawn when the session is unknown', async () => {
await sut.onSegmentRequest({ sessionId: 'never-created', assetId, variantIndex: 0, segmentIndex: 0 });
expect(mocks.process.spawn).not.toHaveBeenCalled();
});
});
describe('backpressure', () => {
let proc: ReturnType<typeof mockSpawn>;
beforeEach(async () => {
proc = mockSpawn(0, '', '');
mocks.process.spawn.mockReturnValue(proc);
await sut.onSessionRequest({ sessionId, assetId, ownerId });
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 0, segmentIndex: 0 });
});
it('pauses the transcode once the lead exceeds HLS_BACKPRESSURE_PAUSE_SEGMENTS', async () => {
completeSegmentsThrough(0, HLS_BACKPRESSURE_PAUSE_SEGMENTS + 1);
await sut.onHeartbeat({ sessionId, segmentIndex: 0 });
expect(proc.kill).toHaveBeenCalledWith('SIGSTOP');
});
it('does not pause when the lead equals the pause threshold', async () => {
completeSegmentsThrough(0, HLS_BACKPRESSURE_PAUSE_SEGMENTS);
await sut.onHeartbeat({ sessionId, segmentIndex: 0 });
expect(proc.kill).not.toHaveBeenCalled();
});
it('resumes once the lead drops below HLS_BACKPRESSURE_RESUME_SEGMENTS', async () => {
completeSegmentsThrough(0, HLS_BACKPRESSURE_PAUSE_SEGMENTS + 1);
await sut.onHeartbeat({ sessionId, segmentIndex: 0 });
expect(proc.kill).toHaveBeenCalledWith('SIGSTOP');
vi.mocked(proc.kill).mockClear();
const requested = HLS_BACKPRESSURE_PAUSE_SEGMENTS + 1 - (HLS_BACKPRESSURE_RESUME_SEGMENTS - 1);
await sut.onHeartbeat({ sessionId, segmentIndex: requested });
expect(proc.kill).toHaveBeenCalledWith('SIGCONT');
});
it('stays paused while the lead is in the dead-band', async () => {
completeSegmentsThrough(0, HLS_BACKPRESSURE_PAUSE_SEGMENTS + 1);
await sut.onHeartbeat({ sessionId, segmentIndex: 0 });
vi.mocked(proc.kill).mockClear();
const requested = HLS_BACKPRESSURE_PAUSE_SEGMENTS + 1 - HLS_BACKPRESSURE_RESUME_SEGMENTS;
await sut.onHeartbeat({ sessionId, segmentIndex: requested });
expect(proc.kill).not.toHaveBeenCalled();
});
it('is a no-op when no segment has completed yet', async () => {
await sut.onHeartbeat({ sessionId, segmentIndex: 0 });
expect(proc.kill).not.toHaveBeenCalled();
});
it('is a no-op when the heartbeat omits segmentIndex', async () => {
completeSegmentsThrough(0, HLS_BACKPRESSURE_PAUSE_SEGMENTS + 1);
await sut.onHeartbeat({ sessionId });
expect(proc.kill).not.toHaveBeenCalled();
});
it('resumes the paused transcode when the client requests the next in-range segment', async () => {
completeSegmentsThrough(0, HLS_BACKPRESSURE_PAUSE_SEGMENTS + 1);
await sut.onHeartbeat({ sessionId, segmentIndex: 0 });
expect(proc.kill).toHaveBeenCalledWith('SIGSTOP');
vi.mocked(proc.kill).mockClear();
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 0, segmentIndex: 1 });
expect(proc.kill).toHaveBeenCalledWith('SIGCONT');
expect(mocks.process.spawn).toHaveBeenCalledTimes(1);
});
it('does not re-pause a freshly spawned transcode after a seek-driven restart', async () => {
const newProc = mockSpawn(0, '', '');
mocks.process.spawn.mockReturnValueOnce(newProc);
completeSegmentsThrough(0, HLS_BACKPRESSURE_PAUSE_SEGMENTS + 1);
await sut.onHeartbeat({ sessionId, segmentIndex: 0 });
expect(proc.kill).toHaveBeenCalledWith('SIGSTOP');
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 1, segmentIndex: 0 });
vi.mocked(newProc.kill).mockClear();
await sut.onHeartbeat({ sessionId, segmentIndex: 0 });
expect(newProc.kill).not.toHaveBeenCalled();
});
it('ignores stale segment events from the prior transcode after a backward seek', async () => {
const newProc = mockSpawn(0, '', '');
mocks.process.spawn.mockReturnValueOnce(newProc);
const completedAhead = HLS_BACKPRESSURE_PAUSE_SEGMENTS + 5;
completeSegmentsThrough(1, completedAhead); // seg_0 was emitted in beforeEach
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 1, segmentIndex: 0 });
vi.mocked(newProc.kill).mockClear();
mocks.websocket.serverSend.mockClear();
completeSegment(completedAhead + 1);
expect(mocks.websocket.serverSend).not.toHaveBeenCalledWith(
'HlsSegmentResult',
expect.objectContaining({ segmentIndex: completedAhead + 1 }),
);
expect(newProc.kill).not.toHaveBeenCalled();
completeSegment(0);
expect(mocks.websocket.serverSend).toHaveBeenCalledWith(
'HlsSegmentResult',
expect.objectContaining({ segmentIndex: 0 }),
);
});
});
describe('inactivity sweeper', () => {
it('reaps a session whose last activity exceeds the inactivity timeout', async () => {
vi.useFakeTimers();
try {
await sut.onSessionRequest({ sessionId, assetId, ownerId });
mocks.websocket.serverSend.mockClear();
await vi.advanceTimersByTimeAsync(HLS_INACTIVITY_TIMEOUT_MS + HLS_CLEANUP_INTERVAL_MS);
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('HlsSessionEnd', { sessionId });
expect(mocks.videoStream.deleteSession).toHaveBeenCalledWith(sessionId);
} finally {
vi.useRealTimers();
}
});
});
describe('onShutdown', () => {
it('ends every active session', async () => {
await sut.onSessionRequest({ sessionId: 'session-a', assetId, ownerId });
await sut.onSessionRequest({ sessionId: 'session-b', assetId, ownerId });
await sut.onShutdown();
expect(mocks.videoStream.deleteSession).toHaveBeenCalledWith('session-a');
expect(mocks.videoStream.deleteSession).toHaveBeenCalledWith('session-b');
});
});
describe('onHlsSessionCleanup', () => {
it('reaps DB-expired sessions under a database lock', async () => {
mocks.database.withLock.mockImplementation(async (_, fn) => fn());
mocks.videoStream.getExpiredSessions.mockResolvedValue([
{ id: 'expired-1', ownerId: 'user-a' },
{ id: 'expired-2', ownerId: 'user-b' },
]);
await sut.onHlsSessionCleanup();
expect(mocks.videoStream.deleteSession).toHaveBeenCalledWith('expired-1');
expect(mocks.videoStream.deleteSession).toHaveBeenCalledWith('expired-2');
expect(mocks.storage.unlinkDir).toHaveBeenCalledTimes(2);
});
});
describe('FFmpeg full command', () => {
const baseCommand = [
'-nostdin',
'-nostats',
'-i',
'eiffel-tower.mp4',
'-map',
'0:0',
'-map_metadata',
'-1',
'-map',
'0:1',
'-g',
'50',
'-keyint_min',
'50',
'-crf',
'23',
'-copyts',
'-r',
'50130000/2012441',
'-avoid_negative_ts',
'disabled',
'-f',
'hls',
'-hls_time',
'2',
'-hls_list_size',
'0',
'-hls_segment_type',
'fmp4',
'-hls_fmp4_init_filename',
'init.mp4',
'-hls_segment_options',
'movflags=+frag_discont',
'-hls_flags',
'temp_file',
'-start_number',
'0',
];
it.each([
{
variantIndex: 6,
expected: [
...baseCommand,
'-c:v',
'libsvtav1',
'-c:a',
'aac',
'-preset',
'12',
'-svtav1-params',
'hierarchical-levels=3:lookahead=0:enable-tf=0:mbr=4000k',
'-hls_segment_filename',
'/data/encoded-video/user-1/se/ss/session-1/6/seg_%d.m4s',
'/data/encoded-video/user-1/se/ss/session-1/6/playlist.m3u8',
].sort(),
},
{
variantIndex: 4,
expected: [
...baseCommand,
'-c:v',
'hevc',
'-c:a',
'aac',
'-tag:v',
'hvc1',
'-preset',
'ultrafast',
'-maxrate',
'2500k',
'-bufsize',
'5000k',
'-x265-params',
'no-scenecut=1:no-open-gop=1',
'-vf',
'scale=720:-2',
'-hls_segment_filename',
'/data/encoded-video/user-1/se/ss/session-1/4/seg_%d.m4s',
'/data/encoded-video/user-1/se/ss/session-1/4/playlist.m3u8',
].sort(),
},
{
variantIndex: 2,
expected: [
...baseCommand,
'-c:v',
'h264',
'-c:a',
'aac',
'-preset',
'ultrafast',
'-maxrate',
'2500k',
'-bufsize',
'5000k',
'-sc_threshold:v',
'0',
'-vf',
'scale=480:-2',
'-hls_segment_filename',
'/data/encoded-video/user-1/se/ss/session-1/2/seg_%d.m4s',
'/data/encoded-video/user-1/se/ss/session-1/2/playlist.m3u8',
].sort(),
},
])('builds the expected FFmpeg command for $codec (variant $variantIndex)', async ({ variantIndex, expected }) => {
mocks.process.spawn.mockReturnValue(mockSpawn(0, '', ''));
await sut.onSessionRequest({ sessionId, assetId, ownerId });
await sut.onSegmentRequest({ sessionId, assetId, variantIndex, segmentIndex: 0 });
expect(mocks.process.spawn.mock.calls[0][1].toSorted()).toEqual(expected);
});
});
describe('FFmpeg seek per segment', () => {
const eiffelSeeks = [
0, 1.987_15, 3.994_372_222_222_222, 6.001_594_444_444_444, 8.008_816_666_666_666, 10.016_038_888_888_888,
12.023_261_111_111_111, 14.030_483_333_333_333, 16.037_705_555_555_554, 18.044_927_777_777_776,
20.052_149_999_999_997, 22.059_372_222_222_223,
];
const waterfallSeeks = [
0, 1.994_642_826_321_467, 4.006_047_357_065_803, 6.017_451_887_810_139_5, 8.028_856_418_554_476,
10.040_260_949_298_812,
];
const trainSeeks = [
0, 1.991_666_666_666_666_7, 3.991_666_666_666_666_7, 5.991_666_666_666_666, 7.991_666_666_666_666,
9.991_666_666_666_667, 11.991_666_666_666_667, 13.991_666_666_666_667, 15.991_666_666_666_667,
17.991_666_666_666_667, 19.991_666_666_666_667,
];
const cases = [
...eiffelSeeks.map((expected, segmentIndex) => ({
name: `${eiffelTower.originalPath} K=${segmentIndex}`,
fixture: eiffelTower,
segmentIndex,
expected,
})),
...waterfallSeeks.map((expected, segmentIndex) => ({
name: `${waterfall.originalPath} K=${segmentIndex}`,
fixture: waterfall,
segmentIndex,
expected,
})),
...trainSeeks.map((expected, segmentIndex) => ({
name: `${train.originalPath} K=${segmentIndex}`,
fixture: train,
segmentIndex,
expected,
})),
];
it.each(cases)('$name', async ({ fixture, segmentIndex, expected }) => {
mocks.videoStream.getForTranscoding.mockResolvedValue(fixture);
mocks.process.spawn.mockReturnValue(mockSpawn(0, '', ''));
await sut.onSessionRequest({ sessionId, assetId, ownerId });
await sut.onSegmentRequest({ sessionId, assetId, variantIndex: 0, segmentIndex });
const args = mocks.process.spawn.mock.calls[0][1] as string[];
if (expected === 0) {
expect(args).toEqual(expect.arrayContaining(['-copyts', '-avoid_negative_ts', 'disabled']));
expect(args).not.toContain('-ss');
} else {
expect(args).toEqual(
expect.arrayContaining(['-ss', String(expected), '-copyts', '-avoid_negative_ts', 'disabled']),
);
}
});
});
});
+386
View File
@@ -0,0 +1,386 @@
import { Injectable } from '@nestjs/common';
import { ChildProcess } from 'node:child_process';
import { join } from 'node:path';
import {
HLS_BACKPRESSURE_PAUSE_SEGMENTS,
HLS_BACKPRESSURE_RESUME_SEGMENTS,
HLS_CLEANUP_INTERVAL_MS,
HLS_INACTIVITY_TIMEOUT_MS,
HLS_LEASE_DURATION_MS,
HLS_SEGMENT_DURATION,
HLS_SEGMENT_FILENAME_REGEX,
HLS_VARIANTS,
} from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent, OnJob } from 'src/decorators';
import { DatabaseLock, ImmichWorker, JobName, QueueName, TranscodeTarget } from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import { VideoInterfaces } from 'src/types';
import { isVideoStreamSessionPkConstraint } from 'src/utils/database';
import { BaseConfig } from 'src/utils/media';
type Session = {
assetId: string;
expiresAt: Date;
id: string;
lastActivityTime: Date;
lastClientRequestedSegment: number | null;
lastCompletedSegment: number | null;
ownerId: string;
paused: boolean;
process: ChildProcess | null;
startSegment: number | null;
variantIndex: number | null;
};
@Injectable()
export class TranscodingService extends BaseService {
private sessions = new Map<string, Session>();
private videoInterfaces: VideoInterfaces = { dri: [], mali: false };
private cleanupInterval: NodeJS.Timeout | null = null;
@OnEvent({ name: 'AppBootstrap', workers: [ImmichWorker.Microservices] })
async onBootstrap() {
const [videoInterfaces] = await Promise.all([this.storageCore.getVideoInterfaces(), this.removeExpiredSessions()]);
this.videoInterfaces = videoInterfaces;
}
@OnEvent({ name: 'AppShutdown', workers: [ImmichWorker.Microservices] })
onShutdown() {
if (this.cleanupInterval) {
clearInterval(this.cleanupInterval);
this.cleanupInterval = null;
}
return Promise.all([...this.sessions.values()].map(({ id }) => this.onSessionEnd({ sessionId: id })));
}
@OnJob({ name: JobName.HlsSessionCleanup, queue: QueueName.BackgroundTask })
onHlsSessionCleanup() {
return this.removeExpiredSessions();
}
@OnEvent({ name: 'HlsSessionRequest', server: true, workers: [ImmichWorker.Microservices] })
async onSessionRequest({ assetId, sessionId, ownerId }: ArgOf<'HlsSessionRequest'>) {
try {
const expiresAt = new Date(Date.now() + HLS_LEASE_DURATION_MS);
await this.videoStreamRepository.createSession({ id: sessionId, assetId, expiresAt });
this.sessions.set(sessionId, {
assetId,
expiresAt,
id: sessionId,
lastActivityTime: new Date(),
lastClientRequestedSegment: null,
lastCompletedSegment: null,
ownerId,
paused: false,
process: null,
startSegment: null,
variantIndex: null,
});
this.cleanupInterval ??= setInterval(() => void this.removeInactiveSessions(), HLS_CLEANUP_INTERVAL_MS);
this.websocketRepository.serverSend('HlsSessionResult', { sessionId });
} catch (error) {
// If insertion failed due to a PK constraint, another worker has already created a session for this ID.
if (!isVideoStreamSessionPkConstraint(error)) {
this.logger.error(`Failed to create HLS session ${sessionId}: ${error}`);
this.websocketRepository.serverSend('HlsSessionResult', { sessionId, error: 'Failed to create HLS session' });
}
}
}
@OnEvent({ name: 'HlsSessionEnd', server: true, workers: [ImmichWorker.Microservices] })
async onSessionEnd({ sessionId }: ArgOf<'HlsSessionEnd'>) {
const session = this.sessions.get(sessionId);
if (!session) {
return;
}
this.sessions.delete(sessionId);
if (this.cleanupInterval && this.sessions.size === 0) {
clearInterval(this.cleanupInterval);
this.cleanupInterval = null;
}
this.stopTranscode(session);
await this.removeSessionDir(session);
await this.videoStreamRepository.deleteSession(sessionId);
}
@OnEvent({ name: 'HlsHeartbeat', server: true, workers: [ImmichWorker.Microservices] })
async onHeartbeat({ sessionId, segmentIndex }: ArgOf<'HlsHeartbeat'>) {
const session = this.sessions.get(sessionId);
if (!session) {
return;
}
session.lastActivityTime = new Date();
if (segmentIndex !== undefined) {
session.lastClientRequestedSegment = segmentIndex;
this.applyBackpressure(session);
}
const remaining = session.expiresAt.getTime() - Date.now();
if (remaining < HLS_LEASE_DURATION_MS / 2) {
session.expiresAt = new Date(Date.now() + HLS_LEASE_DURATION_MS);
await this.videoStreamRepository.extendSession(sessionId, session.expiresAt);
}
}
@OnEvent({ name: 'HlsSegmentRequest', server: true, workers: [ImmichWorker.Microservices] })
async onSegmentRequest({ sessionId, variantIndex, segmentIndex }: ArgOf<'HlsSegmentRequest'>) {
const session = this.sessions.get(sessionId);
if (!session) {
return;
}
session.variantIndex ??= variantIndex;
session.startSegment ??= segmentIndex;
const curSegment = session.lastCompletedSegment === null ? session.startSegment : session.lastCompletedSegment + 1;
const needsRestart =
session.variantIndex !== variantIndex || segmentIndex < session.startSegment || segmentIndex > curSegment + 1;
if (needsRestart) {
this.stopTranscode(session);
session.variantIndex = variantIndex;
session.startSegment = segmentIndex;
} else if (session.process) {
this.resumeTranscode(session);
return;
}
const process = await this.startTranscode(session, variantIndex, segmentIndex);
if (process) {
session.process = process;
}
}
private applyBackpressure(session: Session) {
if (session.lastCompletedSegment === null || session.lastClientRequestedSegment === null) {
return;
}
const lead = session.lastCompletedSegment - session.lastClientRequestedSegment;
this.logger.debug(`Session ${session.id} lead is ${lead} segments`);
if (!session.paused && lead > HLS_BACKPRESSURE_PAUSE_SEGMENTS) {
this.pauseTranscode(session);
} else if (session.paused && lead < HLS_BACKPRESSURE_RESUME_SEGMENTS) {
this.resumeTranscode(session);
}
}
private async startTranscode(session: Session, variantIndex: number, startSegment: number) {
const { ffmpeg } = await this.getConfig({ withCache: true });
const asset = await this.videoStreamRepository.getForTranscoding(session.assetId);
if (!asset) {
this.logger.error(`Asset ${session.assetId} not found for HLS transcoding`);
return;
}
if (session.variantIndex !== variantIndex || session.startSegment !== startSegment) {
return;
}
const variant = HLS_VARIANTS[variantIndex];
if (!variant) {
this.logger.error(`Variant ${variantIndex} out of range for asset ${session.assetId}`);
await this.failSession(session, `Invalid variant index ${variantIndex}`);
return;
}
const variantDir = StorageCore.getHlsVariantFolder({
ownerId: session.ownerId,
sessionId: session.id,
variantIndex,
});
this.storageRepository.mkdirSync(variantDir);
// Encoder runs at fps = packetCount × timeBase / totalDuration with
// gop = ceil(SEGMENT_DURATION × fps). To start segment K's content at
// exactly cfr slot K × gop, seek to the midpoint between slots K×gop1 and
// K×gop. accurate_seek's "discard < target" then keeps the source frame
// that quantizes to slot K×gop and discards the one quantizing to K×gop1.
const fps = (asset.packets.packetCount * asset.videoStream.timeBase) / asset.packets.totalDuration;
const gop = Math.ceil(HLS_SEGMENT_DURATION * fps);
const seekSeconds = startSegment > 0 ? (startSegment * gop - 0.5) / fps : 0;
let config;
try {
config = BaseConfig.create(
{
...ffmpeg,
targetVideoCodec: variant.codec,
targetResolution: String(variant.resolution),
maxBitrate: `${Math.round(variant.bitrate / 1000)}k`,
gopSize: gop,
},
this.videoInterfaces,
{ strictGop: true, lowLatency: true },
);
} catch (error: any) {
this.logger.error(
`Failed to create transcode config for variant ${variantIndex} asset ${session.assetId}: ${error?.message ?? error}`,
);
await this.failSession(session, `Failed to start transcode: ${error?.message ?? 'unknown error'}`);
return;
}
const args = config.getHlsCommand(
{
initFilename: 'init.mp4',
inputPath: asset.originalPath,
packetCount: asset.packets.packetCount,
playlistFilename: join(variantDir, 'playlist.m3u8'),
seekSeconds,
segmentDuration: HLS_SEGMENT_DURATION,
segmentFilename: join(variantDir, 'seg_%d.m4s'),
startSegment,
target: TranscodeTarget.All,
timeBase: asset.videoStream.timeBase,
totalDuration: asset.packets.totalDuration,
},
asset.videoStream,
asset.audioStream ?? undefined,
);
this.logger.log(
`Starting HLS transcode for asset ${session.assetId} variant ${variantIndex} with command: ffmpeg ${args.join(' ')}`,
);
const process = this.processRepository.spawn('ffmpeg', args, { stdio: ['ignore', 'ignore', 'pipe'] });
this.attachProcessHandlers(process, session, variantIndex);
return process;
}
private failSession(session: Session, error: string) {
this.websocketRepository.serverSend('HlsSessionResult', { sessionId: session.id, error });
return this.onSessionEnd({ sessionId: session.id });
}
private attachProcessHandlers(process: ChildProcess, session: Session, variantIndex: number) {
let stderr = '';
const variantDir = StorageCore.getHlsVariantFolder({
ownerId: session.ownerId,
sessionId: session.id,
variantIndex,
});
// hlsenc writes each segment as `seg_K.m4s.tmp` then renames to
// `seg_K.m4s`. The rename event fires the moment the renamed file is
// observable — the only signal we need to tell the API worker the
// segment is ready to serve.
const watcher = this.storageRepository.watchDir(variantDir, (eventType, filename) => {
if (eventType !== 'rename' || !filename || session.process !== process) {
return;
}
const match = HLS_SEGMENT_FILENAME_REGEX.exec(filename);
if (!match) {
return;
}
const segmentIndex = Number.parseInt(match[1]);
const expected = session.lastCompletedSegment === null ? session.startSegment : session.lastCompletedSegment + 1;
// Ignore stale events from old process after seek
if (expected === null || segmentIndex !== expected) {
return;
}
session.lastCompletedSegment = segmentIndex;
this.websocketRepository.serverSend('HlsSegmentResult', {
sessionId: session.id,
variantIndex,
segmentIndex,
});
this.applyBackpressure(session);
});
watcher.on('error', (error) => {
this.logger.error(`watcher error for ${variantDir}: ${error}`);
});
process.stderr!.on('data', (chunk: Buffer) => {
if (session.process !== process) {
return;
}
stderr += chunk.toString();
});
process.on('exit', (code) => {
watcher.close();
if (session.process !== process || session.variantIndex !== variantIndex) {
return;
}
session.paused = false;
session.process = null;
if (code) {
this.logger.error(
`FFmpeg exited with code ${code} for variant ${variantIndex} asset ${session.assetId}\n${stderr}`,
);
void this.failSession(session, `Transcoding process exited unexpectedly with code ${code}`).catch((error) =>
this.logger.error(`Failed to end session ${session.id} after ffmpeg exit: ${error}`),
);
}
});
}
private stopTranscode(session: Session) {
if (!session.process) {
return;
}
// SIGTERM makes it rename .tmp segments to .m4s even if they're still incomplete
session.process.kill('SIGKILL');
session.process = null;
session.lastCompletedSegment = null;
session.paused = false;
this.logger.debug(`Stopped transcoding for session ${session.id}`);
}
private pauseTranscode(session: Session) {
if (session.paused || !session.process) {
return;
}
session.process.kill('SIGSTOP');
session.paused = true;
this.logger.debug(`Paused transcoding for session ${session.id}`);
}
private resumeTranscode(session: Session) {
if (!session.paused || !session.process) {
return;
}
session.process.kill('SIGCONT');
session.paused = false;
this.logger.debug(`Resumed transcoding for session ${session.id}`);
}
private async removeSessionDir(session: { ownerId: string; id: string }) {
const dir = StorageCore.getHlsSessionFolder({ ownerId: session.ownerId, sessionId: session.id });
try {
await this.storageRepository.unlinkDir(dir, { recursive: true, force: true });
} catch (error) {
if ((error as NodeJS.ErrnoException)?.code !== 'ENOENT') {
throw error;
}
this.logger.warn(`Session dir ${dir} does not exist.`);
}
}
private removeInactiveSessions() {
const cutoff = Date.now() - HLS_INACTIVITY_TIMEOUT_MS;
const inactiveSessions = [...this.sessions.values()].filter((s) => s.lastActivityTime.getTime() < cutoff);
return Promise.all(
inactiveSessions.map(async (session) => {
try {
this.websocketRepository.serverSend('HlsSessionEnd', { sessionId: session.id });
await this.onSessionEnd({ sessionId: session.id });
} catch (error) {
this.logger.error(`Failed to sweep inactive HLS session ${session.id}: ${error}`);
}
}),
);
}
private removeExpiredSessions() {
return this.databaseRepository.withLock(DatabaseLock.HlsSessionCleanup, async () => {
const expiredSessions = await this.videoStreamRepository.getExpiredSessions();
await Promise.all(
expiredSessions.map(async (session) => {
await this.removeSessionDir(session);
await this.videoStreamRepository.deleteSession(session.id);
}),
);
});
}
}
+23 -7
View File
@@ -29,7 +29,6 @@ import {
SystemMetadataKey,
TranscodeTarget,
UserMetadataKey,
VideoCodec,
} from 'src/enum';
export type DeepPartial<T> =
@@ -160,6 +159,25 @@ export interface TranscodeCommand {
};
}
export interface VideoTuning {
strictGop: boolean;
lowLatency: boolean;
}
export interface HlsCommandOptions {
initFilename: string;
inputPath: string;
packetCount: number;
playlistFilename: string;
seekSeconds?: number;
segmentDuration: number;
segmentFilename: string;
startSegment: number;
target: TranscodeTarget;
timeBase: number;
totalDuration: number;
}
export interface BitrateDistribution {
max: number;
target: number;
@@ -175,14 +193,11 @@ export interface ImageBuffer {
export interface VideoCodecSWConfig {
getCommand(
target: TranscodeTarget,
videoStream: VideoStreamInfo,
audioStream?: AudioStreamInfo,
video: VideoStreamInfo,
audio?: AudioStreamInfo,
format?: VideoFormat,
): TranscodeCommand;
}
export interface VideoCodecHWConfig extends VideoCodecSWConfig {
getSupportedCodecs(): Array<VideoCodec>;
getHlsCommand(options: HlsCommandOptions, video: VideoStreamInfo, audio?: AudioStreamInfo): string[];
}
export interface ProbeOptions {
@@ -380,6 +395,7 @@ export type JobItem =
// Cleanup
| { name: JobName.SessionCleanup; data?: IBaseJob }
| { name: JobName.HlsSessionCleanup; data?: IBaseJob }
// Tags
| { name: JobName.TagCleanup; data?: IBaseJob }
+6 -3
View File
@@ -71,10 +71,13 @@ export const removeUndefinedKeys = <T extends object>(update: T, template: unkno
};
export const ASSET_CHECKSUM_CONSTRAINT = 'UQ_assets_owner_checksum';
export const VIDEO_STREAM_SESSION_PK_CONSTRAINT = 'video_stream_session_pkey';
export const isAssetChecksumConstraint = (error: unknown) => {
return (error as PostgresError)?.constraint_name === 'UQ_assets_owner_checksum';
};
export const isAssetChecksumConstraint = (error: unknown) =>
(error as PostgresError)?.constraint_name === ASSET_CHECKSUM_CONSTRAINT;
export const isVideoStreamSessionPkConstraint = (error: unknown) =>
(error as PostgresError)?.constraint_name === VIDEO_STREAM_SESSION_PK_CONSTRAINT;
export function withDefaultVisibility<O>(qb: SelectQueryBuilder<DB, 'asset', O>) {
return qb.where('asset.visibility', 'in', [sql.lit(AssetVisibility.Archive), sql.lit(AssetVisibility.Timeline)]);
+50
View File
@@ -0,0 +1,50 @@
import { ArgOf, EmitEvent } from 'src/repositories/event.repository';
export class PendingEvents<T extends { [T in EmitEvent]: ArgOf<T> extends { error?: string } ? T : never }[EmitEvent]> {
private pending = new Map<string, { completers: PromiseWithResolvers<ArgOf<T>>[]; timeout: NodeJS.Timeout }>();
private timeoutMs: number;
constructor({ timeoutMs }: { timeoutMs: number }) {
this.timeoutMs = timeoutMs;
}
wait(key: string): Promise<ArgOf<T>> {
const completer = Promise.withResolvers<ArgOf<T>>();
const existing = this.pending.get(key);
if (existing) {
existing.completers.push(completer);
return completer.promise;
}
const timeout = setTimeout(() => this.complete(key, { error: 'Request timed out' }), this.timeoutMs);
this.pending.set(key, { completers: [completer], timeout });
return completer.promise;
}
complete(key: string, value: ArgOf<T> | { error: string }) {
const pending = this.pending.get(key);
if (!pending) {
return;
}
clearTimeout(pending.timeout);
this.pending.delete(key);
if ('error' in value) {
const error = new Error(value.error);
for (const completer of pending.completers) {
completer.reject(error);
}
} else {
for (const completer of pending.completers) {
completer.resolve(value);
}
}
}
rejectByPrefix(prefix: string, error: string) {
for (const key of this.pending.keys()) {
if (key.startsWith(prefix)) {
this.complete(key, { error });
}
}
}
}
+184 -136
View File
@@ -1,4 +1,4 @@
import { AUDIO_ENCODER } from 'src/constants';
import { AUDIO_ENCODER, SUPPORTED_HWA_CODECS } from 'src/constants';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import {
ColorMatrix,
@@ -13,38 +13,56 @@ import {
import {
AudioStreamInfo,
BitrateDistribution,
HlsCommandOptions,
TranscodeCommand,
VideoCodecHWConfig,
VideoCodecSWConfig,
VideoFormat,
VideoInterfaces,
VideoStreamInfo,
VideoTuning,
} from 'src/types';
export const isVideoRotated = (videoStream: VideoStreamInfo): boolean => Math.abs(videoStream.rotation) === 90;
export const isVideoVertical = (videoStream: VideoStreamInfo): boolean =>
videoStream.height > videoStream.width || isVideoRotated(videoStream);
export const getOutputSize = (videoStream: VideoStreamInfo, targetRes: number) => {
const factor = Math.max(videoStream.height, videoStream.width) / Math.min(videoStream.height, videoStream.width);
let larger = Math.round(targetRes * factor);
if (larger % 2 !== 0) {
larger -= 1;
}
return isVideoVertical(videoStream) ? { width: targetRes, height: larger } : { width: larger, height: targetRes };
};
export class BaseConfig implements VideoCodecSWConfig {
readonly presets = ['veryslow', 'slower', 'slow', 'medium', 'fast', 'faster', 'veryfast', 'superfast', 'ultrafast'];
protected constructor(protected config: SystemConfigFFmpegDto) {}
protected constructor(
protected config: SystemConfigFFmpegDto,
protected tune: VideoTuning = { strictGop: false, lowLatency: false },
) {}
static create(config: SystemConfigFFmpegDto, interfaces: VideoInterfaces): VideoCodecSWConfig {
static create(config: SystemConfigFFmpegDto, interfaces: VideoInterfaces, tune?: VideoTuning) {
if (config.accel === TranscodeHardwareAcceleration.Disabled) {
return this.getSWCodecConfig(config);
return this.getSWCodecConfig(config, tune);
}
return this.getHWCodecConfig(config, interfaces);
return this.getHWCodecConfig(config, interfaces, tune);
}
private static getSWCodecConfig(config: SystemConfigFFmpegDto) {
private static getSWCodecConfig(config: SystemConfigFFmpegDto, tune?: VideoTuning): VideoCodecSWConfig {
switch (config.targetVideoCodec) {
case VideoCodec.H264: {
return new H264Config(config);
return new H264Config(config, tune);
}
case VideoCodec.Hevc: {
return new HEVCConfig(config);
return new HEVCConfig(config, tune);
}
case VideoCodec.Vp9: {
return new VP9Config(config);
return new VP9Config(config, tune);
}
case VideoCodec.Av1: {
return new AV1Config(config);
return new AV1Config(config, tune);
}
default: {
throw new Error(`Codec '${config.targetVideoCodec}' is unsupported`);
@@ -52,72 +70,122 @@ export class BaseConfig implements VideoCodecSWConfig {
}
}
private static getHWCodecConfig(config: SystemConfigFFmpegDto, interfaces: VideoInterfaces) {
let handler: VideoCodecHWConfig;
private static getHWCodecConfig(config: SystemConfigFFmpegDto, interfaces: VideoInterfaces, tune?: VideoTuning) {
if (!SUPPORTED_HWA_CODECS[config.accel].includes(config.targetVideoCodec)) {
throw new Error(
`${config.accel.toUpperCase()} acceleration does not support codec '${config.targetVideoCodec.toUpperCase()}'. Supported codecs: ${SUPPORTED_HWA_CODECS[config.accel]}`,
);
}
let handler: VideoCodecSWConfig;
switch (config.accel) {
case TranscodeHardwareAcceleration.Nvenc: {
handler = config.accelDecode
? new NvencHwDecodeConfig(config, interfaces)
: new NvencSwDecodeConfig(config, interfaces);
? new NvencHwDecodeConfig(config, interfaces, tune)
: new NvencSwDecodeConfig(config, interfaces, tune);
break;
}
case TranscodeHardwareAcceleration.Qsv: {
handler = config.accelDecode
? new QsvHwDecodeConfig(config, interfaces)
: new QsvSwDecodeConfig(config, interfaces);
? new QsvHwDecodeConfig(config, interfaces, tune)
: new QsvSwDecodeConfig(config, interfaces, tune);
break;
}
case TranscodeHardwareAcceleration.Vaapi: {
handler = config.accelDecode
? new VaapiHwDecodeConfig(config, interfaces)
: new VaapiSwDecodeConfig(config, interfaces);
? new VaapiHwDecodeConfig(config, interfaces, tune)
: new VaapiSwDecodeConfig(config, interfaces, tune);
break;
}
case TranscodeHardwareAcceleration.Rkmpp: {
handler = config.accelDecode
? new RkmppHwDecodeConfig(config, interfaces)
: new RkmppSwDecodeConfig(config, interfaces);
? new RkmppHwDecodeConfig(config, interfaces, tune)
: new RkmppSwDecodeConfig(config, interfaces, tune);
break;
}
default: {
throw new Error(`${config.accel.toUpperCase()} acceleration is unsupported`);
}
}
if (!handler.getSupportedCodecs().includes(config.targetVideoCodec)) {
throw new Error(
`${config.accel.toUpperCase()} acceleration does not support codec '${config.targetVideoCodec.toUpperCase()}'. Supported codecs: ${handler.getSupportedCodecs()}`,
);
}
return handler;
}
getCommand(
target: TranscodeTarget,
videoStream: VideoStreamInfo,
audioStream?: AudioStreamInfo,
format?: VideoFormat,
) {
getCommand(target: TranscodeTarget, video: VideoStreamInfo, audio?: AudioStreamInfo, format?: VideoFormat) {
const options = {
inputOptions: this.getBaseInputOptions(videoStream, format),
outputOptions: [...this.getBaseOutputOptions(target, videoStream, audioStream), '-v', 'verbose'],
inputOptions: this.getBaseInputOptions(video, format),
outputOptions: [
...this.getBaseOutputOptions(target, video, audio),
...this.getPresetOptions(),
...this.getBitrateOptions(),
...this.getEncoderOptions(),
'-movflags',
'faststart',
'-fps_mode',
'passthrough',
'-v',
'verbose',
],
twoPass: this.eligibleForTwoPass(),
progress: { frameCount: videoStream.frameCount, percentInterval: 5 },
progress: { frameCount: video.frameCount, percentInterval: 5 },
} as TranscodeCommand;
if ([TranscodeTarget.All, TranscodeTarget.Video].includes(target)) {
const filters = this.getFilterOptions(videoStream);
const filters = this.getFilterOptions(video);
if (filters.length > 0) {
options.outputOptions.push('-vf', filters.join(','));
}
}
options.outputOptions.push(
return options;
}
getHlsCommand(options: HlsCommandOptions, video: VideoStreamInfo, audio?: AudioStreamInfo) {
const args: string[] = this.getBaseInputOptions(video);
if (options.seekSeconds) {
args.push('-ss', String(options.seekSeconds));
}
args.push(
'-nostdin',
'-nostats',
'-i',
options.inputPath,
...this.getBaseOutputOptions(options.target, video, audio),
...this.getPresetOptions(),
...this.getOutputThreadOptions(),
...this.getBitrateOptions(),
...this.getEncoderOptions(),
'-copyts',
'-r',
`${options.packetCount * options.timeBase}/${options.totalDuration}`,
'-avoid_negative_ts',
'disabled',
'-f',
'hls',
'-hls_time',
String(options.segmentDuration),
'-hls_list_size',
'0',
'-hls_segment_type',
'fmp4',
'-hls_fmp4_init_filename',
options.initFilename,
'-hls_segment_options',
'movflags=+frag_discont',
'-hls_flags',
'temp_file',
'-hls_segment_filename',
options.segmentFilename,
'-start_number',
String(options.startSegment),
);
return options;
if ([TranscodeTarget.All, TranscodeTarget.Video].includes(options.target)) {
const filters = this.getFilterOptions(video);
if (filters.length > 0) {
args.push('-vf', filters.join(','));
}
}
args.push(options.playlistFilename);
return args;
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
@@ -129,23 +197,7 @@ export class BaseConfig implements VideoCodecSWConfig {
const videoCodec = [TranscodeTarget.All, TranscodeTarget.Video].includes(target) ? this.getVideoCodec() : 'copy';
const audioCodec = [TranscodeTarget.All, TranscodeTarget.Audio].includes(target) ? this.getAudioEncoder() : 'copy';
const options = [
'-c:v',
videoCodec,
'-c:a',
audioCodec,
// Makes a second pass moving the moov atom to the
// beginning of the file for improved playback speed.
'-movflags',
'faststart',
'-fps_mode',
'passthrough',
'-map',
`0:${videoStream.index}`,
'-map_metadata',
'-1',
];
const options = ['-c:v', videoCodec, '-c:a', audioCodec, '-map', `0:${videoStream.index}`, '-map_metadata', '-1'];
if (audioStream) {
options.push('-map', `0:${audioStream.index}`);
}
@@ -157,18 +209,22 @@ export class BaseConfig implements VideoCodecSWConfig {
}
if (this.getGopSize() > 0) {
options.push('-g', `${this.getGopSize()}`);
if (this.tune.strictGop) {
options.push('-keyint_min', `${this.getGopSize()}`);
}
}
if (
this.config.targetVideoCodec === VideoCodec.Hevc &&
(videoCodec !== 'copy' || videoStream.codecName === 'hevc')
) {
const isHvc = (videoCodec === 'copy' ? videoStream.codecName : videoCodec) === VideoCodec.Hevc;
if (isHvc) {
options.push('-tag:v', 'hvc1');
}
return options;
}
getEncoderOptions(): string[] {
return [];
}
getFilterOptions(videoStream: VideoStreamInfo) {
const options = [];
if (this.shouldScale(videoStream)) {
@@ -272,25 +328,7 @@ export class BaseConfig implements VideoCodecSWConfig {
getScaling(videoStream: VideoStreamInfo, mult = 2) {
const targetResolution = this.getTargetResolution(videoStream);
return this.isVideoVertical(videoStream) ? `${targetResolution}:-${mult}` : `-${mult}:${targetResolution}`;
}
getSize(videoStream: VideoStreamInfo) {
const smaller = this.getTargetResolution(videoStream);
const factor = Math.max(videoStream.height, videoStream.width) / Math.min(videoStream.height, videoStream.width);
let larger = Math.round(smaller * factor);
if (larger % 2 !== 0) {
larger -= 1;
}
return this.isVideoVertical(videoStream) ? { width: smaller, height: larger } : { width: larger, height: smaller };
}
isVideoRotated(videoStream: VideoStreamInfo) {
return Math.abs(videoStream.rotation) === 90;
}
isVideoVertical(videoStream: VideoStreamInfo) {
return videoStream.height > videoStream.width || this.isVideoRotated(videoStream);
return isVideoVertical(videoStream) ? `${targetResolution}:-${mult}` : `-${mult}:${targetResolution}`;
}
isBitrateConstrained() {
@@ -353,23 +391,18 @@ export class BaseConfig implements VideoCodecSWConfig {
}
}
export class BaseHWConfig extends BaseConfig implements VideoCodecHWConfig {
export class BaseHWConfig extends BaseConfig {
protected device: string;
protected interfaces: VideoInterfaces;
constructor(
protected config: SystemConfigFFmpegDto,
interfaces: VideoInterfaces,
protected interfaces: VideoInterfaces,
tune?: VideoTuning,
) {
super(config);
this.interfaces = interfaces;
super(config, tune);
this.device = this.getDevice(interfaces);
}
getSupportedCodecs() {
return [VideoCodec.H264, VideoCodec.Hevc];
}
validateDevices(devices: string[]) {
if (devices.length === 0) {
throw new Error('No /dev/dri devices found. If using Docker, make sure at least one /dev/dri device is mounted');
@@ -474,24 +507,32 @@ export class ThumbnailConfig extends BaseConfig {
}
export class H264Config extends BaseConfig {
getOutputThreadOptions() {
const options = super.getOutputThreadOptions();
if (this.config.threads === 1) {
options.push('-x264-params', 'frame-threads=1:pools=none');
getEncoderOptions(): string[] {
const out = this.getOutputThreadOptions();
if (this.tune.strictGop) {
out.push('-sc_threshold:v', '0');
}
return options;
if (this.config.threads === 1) {
out.push('-x264-params', 'frame-threads=1:pools=none');
}
return out;
}
}
export class HEVCConfig extends BaseConfig {
getOutputThreadOptions() {
const options = super.getOutputThreadOptions();
if (this.config.threads === 1) {
options.push('-x265-params', 'frame-threads=1:pools=none');
getEncoderOptions(): string[] {
const out: string[] = this.getOutputThreadOptions();
const params: string[] = [];
if (this.tune.strictGop) {
params.push('no-scenecut=1', 'no-open-gop=1');
}
return options;
if (this.config.threads === 1) {
params.push('frame-threads=1', 'pools=none');
}
if (params.length > 0) {
out.push('-x265-params', params.join(':'));
}
return out;
}
}
@@ -520,8 +561,8 @@ export class VP9Config extends BaseConfig {
return [`-${this.useCQP() ? 'q:v' : 'crf'}`, `${this.config.crf}`, '-b:v', `${bitrates.max}${bitrates.unit}`];
}
getOutputThreadOptions() {
return ['-row-mt', '1', ...super.getOutputThreadOptions()];
getEncoderOptions(): string[] {
return ['-row-mt', '1', ...this.getOutputThreadOptions()];
}
eligibleForTwoPass() {
@@ -543,23 +584,22 @@ export class AV1Config extends BaseConfig {
}
getBitrateOptions() {
const options = ['-crf', `${this.config.crf}`];
const bitrates = this.getBitrateDistribution();
const svtparams = [];
if (this.config.threads > 0) {
svtparams.push(`lp=${this.config.threads}`);
}
if (bitrates.max > 0) {
svtparams.push(`mbr=${bitrates.max}${bitrates.unit}`);
}
if (svtparams.length > 0) {
options.push('-svtav1-params', svtparams.join(':'));
}
return options;
return ['-crf', `${this.config.crf}`];
}
getOutputThreadOptions() {
return []; // Already set above with svtav1-params
getEncoderOptions(): string[] {
const params: string[] = [];
if (this.tune.lowLatency) {
params.push('hierarchical-levels=3', 'lookahead=0', 'enable-tf=0');
}
if (this.config.threads > 0) {
params.push(`lp=${this.config.threads}`);
}
const bitrates = this.getBitrateDistribution();
if (bitrates.max > 0) {
params.push(`mbr=${bitrates.max}${bitrates.unit}`);
}
return params.length > 0 ? ['-svtav1-params', params.join(':')] : [];
}
eligibleForTwoPass() {
@@ -572,10 +612,6 @@ export class NvencSwDecodeConfig extends BaseHWConfig {
return '0';
}
getSupportedCodecs() {
return [VideoCodec.H264, VideoCodec.Hevc, VideoCodec.Av1];
}
getBaseInputOptions() {
return ['-init_hw_device', `cuda=cuda:${this.device}`, '-filter_hw_device', 'cuda'];
}
@@ -652,6 +688,14 @@ export class NvencSwDecodeConfig extends BaseHWConfig {
return [];
}
getEncoderOptions(): string[] {
const out = this.getOutputThreadOptions();
if (this.tune.strictGop) {
out.push('-forced-idr', '1');
}
return out;
}
getRefs() {
const bframes = this.getBFrames();
if (bframes > 0 && bframes < 3 && this.config.refs < 3) {
@@ -703,8 +747,8 @@ export class NvencHwDecodeConfig extends NvencSwDecodeConfig {
return ['-threads', '1'];
}
getOutputThreadOptions() {
return [];
getEncoderOptions(): string[] {
return this.tune.strictGop ? ['-forced-idr', '1'] : [];
}
}
@@ -749,10 +793,6 @@ export class QsvSwDecodeConfig extends BaseHWConfig {
return options;
}
getSupportedCodecs() {
return [VideoCodec.H264, VideoCodec.Hevc, VideoCodec.Vp9, VideoCodec.Av1];
}
// recommended from https://github.com/intel/media-delivery/blob/master/doc/benchmarks/intel-iris-xe-max-graphics/intel-iris-xe-max-graphics.md
getBFrames() {
if (this.config.bframes < 0) {
@@ -775,6 +815,14 @@ export class QsvSwDecodeConfig extends BaseHWConfig {
getScaling(videoStream: VideoStreamInfo): string {
return super.getScaling(videoStream, 1);
}
getEncoderOptions(): string[] {
const out = this.getOutputThreadOptions();
if (this.tune.strictGop) {
out.push('-idr_interval', '0');
}
return out;
}
}
export class QsvHwDecodeConfig extends QsvSwDecodeConfig {
@@ -888,13 +936,17 @@ export class VaapiSwDecodeConfig extends BaseHWConfig {
return options;
}
getSupportedCodecs() {
return [VideoCodec.H264, VideoCodec.Hevc, VideoCodec.Vp9, VideoCodec.Av1];
}
useCQP() {
return this.config.cqMode !== CQMode.Icq || this.config.targetVideoCodec === VideoCodec.Vp9;
}
getEncoderOptions(): string[] {
const out = this.getOutputThreadOptions();
if (this.tune.strictGop) {
out.push('-idr_interval', '0');
}
return out;
}
}
export class VaapiHwDecodeConfig extends VaapiSwDecodeConfig {
@@ -988,10 +1040,6 @@ export class RkmppSwDecodeConfig extends BaseHWConfig {
return ['-rc_mode', 'CQP', '-qp_init', `${this.config.crf}`];
}
getSupportedCodecs() {
return [VideoCodec.H264, VideoCodec.Hevc];
}
getVideoCodec(): string {
return `${this.config.targetVideoCodec}_rkmpp`;
}
+1 -1
View File
@@ -597,7 +597,7 @@ export const train = {
packets: {
totalDuration: 12_290,
packetCount: 1229,
outputFrames: 1303,
outputFrames: 1304,
keyframePts: [
0, 601, 1201, 1802, 2402, 3003, 3604, 4204, 4805, 5405, 6006, 6607, 7207, 7808, 8408, 9009, 9609, 10_210, 10_811,
11_411, 12_062, 12_703,
@@ -74,5 +74,6 @@ export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRe
copyFile: vitest.fn(),
utimes: vitest.fn(),
watch: vitest.fn().mockImplementation(makeMockWatcher({})),
watchDir: vitest.fn().mockImplementation(() => ({ close: vitest.fn(), on: vitest.fn() })),
};
};
+7 -2
View File
@@ -181,7 +181,11 @@ export const automock = <T>(
const mocks: Mock[] = [];
const instance = new Dependency(...args);
for (const property of Object.getOwnPropertyNames(Dependency.prototype)) {
const propertyNames = new Set([
...Object.getOwnPropertyNames(Dependency.prototype),
...Object.getOwnPropertyNames(instance),
]);
for (const property of propertyNames) {
if (property === 'constructor') {
continue;
}
@@ -346,7 +350,7 @@ export const getMocks = () => {
trash: automock(TrashRepository),
user: automock(UserRepository, { strict: false }),
versionHistory: automock(VersionHistoryRepository),
videoStream: automock(VideoStreamRepository),
videoStream: automock(VideoStreamRepository, { strict: false }),
view: automock(ViewRepository),
// eslint-disable-next-line no-sparse-arrays
websocket: automock(WebsocketRepository, { args: [, loggerMock], strict: false }),
@@ -500,6 +504,7 @@ export const mockSpawn = vitest.fn((exitCode: number, stdout: string, stderr: st
callback(exitCode);
}
}),
kill: vitest.fn(),
} as unknown as ChildProcessWithoutNullStreams;
});
+2 -2
View File
@@ -8,9 +8,9 @@
"experimentalDecorators": true,
"allowSyntheticDefaultImports": true,
"resolveJsonModule": true,
"target": "es2022",
"target": "es2024",
"moduleResolution": "node16",
"lib": ["dom", "es2023"],
"lib": ["dom", "es2024"],
"sourceMap": true,
"outDir": "./dist",
"incremental": true,
@@ -388,6 +388,22 @@
/>
</div>
</SettingAccordion>
<SettingAccordion
key="realtime-transcoding"
title={$t('admin.transcoding_realtime')}
subtitle={$t('admin.transcoding_realtime_description')}
>
<div class="ms-4 mt-4 flex flex-col gap-4">
<SettingSwitch
title={$t('admin.transcoding_realtime_enabled')}
subtitle={$t('admin.transcoding_realtime_enabled_description')}
bind:checked={configToEdit.ffmpeg.realtime.enabled}
isEdited={configToEdit.ffmpeg.realtime.enabled !== configToEdit.ffmpeg.realtime.enabled}
{disabled}
/>
</div>
</SettingAccordion>
</div>
<div class="ms-4">