diff --git a/Emby.Server.Implementations/ApplicationHost.cs b/Emby.Server.Implementations/ApplicationHost.cs index 25ee7e9ec0..c177537b89 100644 --- a/Emby.Server.Implementations/ApplicationHost.cs +++ b/Emby.Server.Implementations/ApplicationHost.cs @@ -46,6 +46,7 @@ using Emby.Server.Implementations.Session; using Emby.Server.Implementations.TV; using Emby.Server.Implementations.Updates; using Emby.Server.Implementations.SyncPlay; +using Jellyfin.Api.Helpers; using MediaBrowser.Api; using MediaBrowser.Common; using MediaBrowser.Common.Configuration; @@ -637,6 +638,8 @@ namespace Emby.Server.Implementations serviceCollection.AddSingleton(); serviceCollection.AddSingleton(); + + serviceCollection.AddSingleton(); } /// diff --git a/Jellyfin.Api/Controllers/AudioController.cs b/Jellyfin.Api/Controllers/AudioController.cs index 39df1e1b13..4d29d38807 100644 --- a/Jellyfin.Api/Controllers/AudioController.cs +++ b/Jellyfin.Api/Controllers/AudioController.cs @@ -3,97 +3,277 @@ using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Jellyfin.Api.Helpers; +using MediaBrowser.Common.Configuration; +using MediaBrowser.Controller.Configuration; +using MediaBrowser.Controller.Devices; using MediaBrowser.Controller.Dlna; +using MediaBrowser.Controller.Library; using MediaBrowser.Controller.MediaEncoding; using MediaBrowser.Controller.Net; +using MediaBrowser.Model.Dlna; +using MediaBrowser.Model.IO; using MediaBrowser.Model.MediaInfo; using MediaBrowser.Model.Net; +using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; -using Microsoft.Extensions.Logging; -using Microsoft.Net.Http.Headers; +using Microsoft.Extensions.Configuration; namespace Jellyfin.Api.Controllers { - /// /// The audio controller. /// + // TODO: In order to autheneticate this in the future, Dlna playback will require updating public class AudioController : BaseJellyfinApiController { private readonly IDlnaManager _dlnaManager; - private readonly ILogger _logger; + private readonly IAuthorizationContext _authContext; + private readonly IUserManager _userManager; + private readonly ILibraryManager _libraryManager; + private readonly IMediaSourceManager _mediaSourceManager; + private readonly IServerConfigurationManager _serverConfigurationManager; + private readonly IMediaEncoder _mediaEncoder; + private readonly IStreamHelper _streamHelper; + private readonly IFileSystem _fileSystem; + private readonly ISubtitleEncoder _subtitleEncoder; + private readonly IConfiguration _configuration; + private readonly IDeviceManager _deviceManager; + private readonly TranscodingJobHelper _transcodingJobHelper; + + private readonly TranscodingJobType _transcodingJobType = TranscodingJobType.Progressive; /// /// Initializes a new instance of the class. /// /// Instance of the interface. - /// Instance of the interface. - public AudioController(IDlnaManager dlnaManager, ILogger logger) + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// The singleton. + public AudioController( + IDlnaManager dlnaManager, + IUserManager userManger, + IAuthorizationContext authorizationContext, + ILibraryManager libraryManager, + IMediaSourceManager mediaSourceManager, + IServerConfigurationManager serverConfigurationManager, + IMediaEncoder mediaEncoder, + IStreamHelper streamHelper, + IFileSystem fileSystem, + ISubtitleEncoder subtitleEncoder, + IConfiguration configuration, + IDeviceManager deviceManager, + TranscodingJobHelper transcodingJobHelper) { _dlnaManager = dlnaManager; - _logger = logger; + _authContext = authorizationContext; + _userManager = userManger; + _libraryManager = libraryManager; + _mediaSourceManager = mediaSourceManager; + _serverConfigurationManager = serverConfigurationManager; + _mediaEncoder = mediaEncoder; + _streamHelper = streamHelper; + _fileSystem = fileSystem; + _subtitleEncoder = subtitleEncoder; + _configuration = configuration; + _deviceManager = deviceManager; + _transcodingJobHelper = transcodingJobHelper; } - [HttpGet("{id}/stream.{container}")] - [HttpGet("{id}/stream")] - [HttpHead("{id}/stream.{container}")] - [HttpGet("{id}/stream")] + /// + /// Gets an audio stream. + /// + /// The item id. + /// The audio container. + /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. + /// The streaming parameters. + /// The tag. + /// Optional. The dlna device profile id to utilize. + /// The play session id. + /// The segment container. + /// The segment lenght. + /// The minimum number of segments. + /// The media version id, if playing an alternate version. + /// The device id of the client requesting. Used to stop encoding processes when needed. + /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. + /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. + /// Whether or not to allow copying of the video stream url. + /// Whether or not to allow copying of the audio stream url. + /// Optional. Whether to break on non key frames. + /// Optional. Specify a specific audio sample rate, e.g. 44100. + /// Optional. The maximum audio bit depth. + /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. + /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. + /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. + /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. + /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. + /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. + /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. + /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. + /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. + /// Optional. The fixed horizontal resolution of the encoded video. + /// Optional. The fixed vertical resolution of the encoded video. + /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. + /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. + /// Optional. Specify the subtitle delivery method. + /// Optional. + /// Optional. The maximum video bit depth. + /// Optional. Whether to require avc. + /// Optional. Whether to deinterlace the video. + /// Optional. Whether to require a non anamporphic stream. + /// Optional. The maximum number of audio channels to transcode. + /// Optional. The limit of how many cpu cores to use. + /// The live stream id. + /// Optional. Whether to enable the MpegtsM2Ts mode. + /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vpx, wmv. + /// Optional. Specify a subtitle codec to encode to. + /// Optional. The transcoding reason. + /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. + /// Optional. The index of the video stream to use. If omitted the first video stream will be used. + /// Optional. The . + /// Optional. The streaming options. + /// A containing the audio file. + [HttpGet("{itemId}/stream.{container}")] + [HttpGet("{itemId}/stream")] + [HttpHead("{itemId}/stream.{container}")] + [HttpGet("{itemId}/stream")] + [ProducesResponseType(StatusCodes.Status200OK)] public async Task GetAudioStream( - [FromRoute] string id, - [FromRoute] string container, - [FromQuery] bool Static, - [FromQuery] string tag) + [FromRoute] Guid itemId, + [FromRoute] string? container, + [FromQuery] bool? @static, + [FromQuery] string? @params, + [FromQuery] string? tag, + [FromQuery] string? deviceProfileId, + [FromQuery] string? playSessionId, + [FromQuery] string? segmentContainer, + [FromQuery] int? segmentLength, + [FromQuery] int? minSegments, + [FromQuery] string? mediaSourceId, + [FromQuery] string? deviceId, + [FromQuery] string? audioCodec, + [FromQuery] bool? enableAutoStreamCopy, + [FromQuery] bool? allowVideoStreamCopy, + [FromQuery] bool? allowAudioStreamCopy, + [FromQuery] bool? breakOnNonKeyFrames, + [FromQuery] int? audioSampleRate, + [FromQuery] int? maxAudioBitDepth, + [FromQuery] int? audioBitRate, + [FromQuery] int? audioChannels, + [FromQuery] int? maxAudioChannels, + [FromQuery] string? profile, + [FromQuery] string? level, + [FromQuery] float? framerate, + [FromQuery] float? maxFramerate, + [FromQuery] bool? copyTimestamps, + [FromQuery] long? startTimeTicks, + [FromQuery] int? width, + [FromQuery] int? height, + [FromQuery] int? videoBitRate, + [FromQuery] int? subtitleStreamIndex, + [FromQuery] SubtitleDeliveryMethod subtitleMethod, + [FromQuery] int? maxRefFrames, + [FromQuery] int? maxVideoBitDepth, + [FromQuery] bool? requireAvc, + [FromQuery] bool? deInterlace, + [FromQuery] bool? requireNonAnamorphic, + [FromQuery] int? transcodingMaxAudioChannels, + [FromQuery] int? cpuCoreLimit, + [FromQuery] string? liveStreamId, + [FromQuery] bool? enableMpegtsM2TsMode, + [FromQuery] string? videoCodec, + [FromQuery] string? subtitleCodec, + [FromQuery] string? transcodingReasons, + [FromQuery] int? audioStreamIndex, + [FromQuery] int? videoStreamIndex, + [FromQuery] EncodingContext context, + [FromQuery] Dictionary streamOptions) { bool isHeadRequest = Request.Method == System.Net.WebRequestMethods.Http.Head; var cancellationTokenSource = new CancellationTokenSource(); - var state = await GetState(request, cancellationTokenSource.Token).ConfigureAwait(false); + var state = await StreamingHelpers.GetStreamingState( + itemId, + startTimeTicks, + audioCodec, + subtitleCodec, + videoCodec, + @params, + @static, + container, + liveStreamId, + playSessionId, + mediaSourceId, + deviceId, + deviceProfileId, + audioBitRate, + Request, + _authContext, + _mediaSourceManager, + _userManager, + _libraryManager, + _serverConfigurationManager, + _mediaEncoder, + _fileSystem, + _subtitleEncoder, + _configuration, + _dlnaManager, + _deviceManager, + _transcodingJobHelper, + _transcodingJobType, + false, + cancellationTokenSource.Token) + .ConfigureAwait(false); - if (Static && state.DirectStreamProvider != null) + if (@static.HasValue && @static.Value && state.DirectStreamProvider != null) { - StreamingHelpers.AddDlnaHeaders(state, Response.Headers, true, Request, _dlnaManager); + StreamingHelpers.AddDlnaHeaders(state, Response.Headers, true, startTimeTicks, Request, _dlnaManager); using (state) { - var outputHeaders = new Dictionary(StringComparer.OrdinalIgnoreCase); + // TODO AllowEndOfFile = false + await new ProgressiveFileCopier(_streamHelper, state.DirectStreamProvider).WriteToAsync(Response.Body, CancellationToken.None).ConfigureAwait(false); - // TODO: Don't hardcode this - outputHeaders[HeaderNames.ContentType] = MimeTypes.GetMimeType("file.ts"); - - return new ProgressiveFileCopier(state.DirectStreamProvider, outputHeaders, null, _logger, CancellationToken.None) - { - AllowEndOfFile = false - }; + // TODO (moved from MediaBrowser.Api): Don't hardcode contentType + return File(Response.Body, MimeTypes.GetMimeType("file.ts")!); } } // Static remote stream - if (Static && state.InputProtocol == MediaProtocol.Http) + if (@static.HasValue && @static.Value && state.InputProtocol == MediaProtocol.Http) { - StreamingHelpers.AddDlnaHeaders(state, Response.Headers, true, Request, _dlnaManager); + StreamingHelpers.AddDlnaHeaders(state, Response.Headers, true, startTimeTicks, Request, _dlnaManager); using (state) { - return await GetStaticRemoteStreamResult(state, responseHeaders, isHeadRequest, cancellationTokenSource).ConfigureAwait(false); + return await FileStreamResponseHelpers.GetStaticRemoteStreamResult(state, isHeadRequest, this, cancellationTokenSource).ConfigureAwait(false); } } - if (Static && state.InputProtocol != MediaProtocol.File) + if (@static.HasValue && @static.Value && state.InputProtocol != MediaProtocol.File) { - throw new ArgumentException(string.Format($"Input protocol {state.InputProtocol} cannot be streamed statically.")); + return BadRequest($"Input protocol {state.InputProtocol} cannot be streamed statically"); } var outputPath = state.OutputFilePath; - var outputPathExists = File.Exists(outputPath); + var outputPathExists = System.IO.File.Exists(outputPath); - var transcodingJob = TranscodingJobHelper.GetTranscodingJob(outputPath, TranscodingJobType.Progressive); + var transcodingJob = _transcodingJobHelper.GetTranscodingJob(outputPath, TranscodingJobType.Progressive); var isTranscodeCached = outputPathExists && transcodingJob != null; - StreamingHelpers.AddDlnaHeaders(state, Response.Headers, Static || isTranscodeCached, Request, _dlnaManager); + StreamingHelpers.AddDlnaHeaders(state, Response.Headers, (@static.HasValue && @static.Value) || isTranscodeCached, startTimeTicks, Request, _dlnaManager); // Static stream - if (Static) + if (@static.HasValue && @static.Value) { var contentType = state.GetMimeType("." + state.OutputContainer, false) ?? state.GetMimeType(state.MediaPath); @@ -101,16 +281,10 @@ namespace Jellyfin.Api.Controllers { if (state.MediaSource.IsInfiniteStream) { - var outputHeaders = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - [HeaderNames.ContentType] = contentType - }; + // TODO AllowEndOfFile = false + await new ProgressiveFileCopier(_streamHelper, state.MediaPath).WriteToAsync(Response.Body, CancellationToken.None).ConfigureAwait(false); - - return new ProgressiveFileCopier(FileSystem, state.MediaPath, outputHeaders, null, _logger, CancellationToken.None) - { - AllowEndOfFile = false - }; + return File(Response.Body, contentType); } TimeSpan? cacheDuration = null; @@ -120,57 +294,65 @@ namespace Jellyfin.Api.Controllers cacheDuration = TimeSpan.FromDays(365); } + return FileStreamResponseHelpers.GetStaticFileResult( + state.MediaPath, + contentType, + _fileSystem.GetLastWriteTimeUtc(state.MediaPath), + cacheDuration, + isHeadRequest, + this); + } + } + + /* + // Not static but transcode cache file exists + if (isTranscodeCached && state.VideoRequest == null) + { + var contentType = state.GetMimeType(outputPath) + try + { + if (transcodingJob != null) + { + ApiEntryPoint.Instance.OnTranscodeBeginRequest(transcodingJob); + } return await ResultFactory.GetStaticFileResult(Request, new StaticFileResultOptions { ResponseHeaders = responseHeaders, ContentType = contentType, IsHeadRequest = isHeadRequest, - Path = state.MediaPath, - CacheDuration = cacheDuration - + Path = outputPath, + FileShare = FileShare.ReadWrite, + OnComplete = () => + { + if (transcodingJob != null) + { + ApiEntryPoint.Instance.OnTranscodeEndRequest(transcodingJob); + } }).ConfigureAwait(false); } + finally + { + state.Dispose(); + } } + */ - //// Not static but transcode cache file exists - //if (isTranscodeCached && state.VideoRequest == null) - //{ - // var contentType = state.GetMimeType(outputPath); - - // try - // { - // if (transcodingJob != null) - // { - // ApiEntryPoint.Instance.OnTranscodeBeginRequest(transcodingJob); - // } - - // return await ResultFactory.GetStaticFileResult(Request, new StaticFileResultOptions - // { - // ResponseHeaders = responseHeaders, - // ContentType = contentType, - // IsHeadRequest = isHeadRequest, - // Path = outputPath, - // FileShare = FileShare.ReadWrite, - // OnComplete = () => - // { - // if (transcodingJob != null) - // { - // ApiEntryPoint.Instance.OnTranscodeEndRequest(transcodingJob); - // } - // } - - // }).ConfigureAwait(false); - // } - // finally - // { - // state.Dispose(); - // } - //} - - // Need to start ffmpeg + // Need to start ffmpeg (because media can't be returned directly) try { - return await GetStreamResult(request, state, responseHeaders, isHeadRequest, cancellationTokenSource).ConfigureAwait(false); + var encodingOptions = _serverConfigurationManager.GetEncodingOptions(); + var encodingHelper = new EncodingHelper(_mediaEncoder, _fileSystem, _subtitleEncoder, _configuration); + var ffmpegCommandLineArguments = encodingHelper.GetProgressiveAudioFullCommandLine(state, encodingOptions, outputPath); + return await FileStreamResponseHelpers.GetTranscodedFile( + state, + isHeadRequest, + _streamHelper, + this, + _transcodingJobHelper, + ffmpegCommandLineArguments, + Request, + _transcodingJobType, + cancellationTokenSource).ConfigureAwait(false); } catch { diff --git a/Jellyfin.Api/Controllers/PlaystateController.cs b/Jellyfin.Api/Controllers/PlaystateController.cs index 05a6edf4ed..da69ca72c9 100644 --- a/Jellyfin.Api/Controllers/PlaystateController.cs +++ b/Jellyfin.Api/Controllers/PlaystateController.cs @@ -8,7 +8,6 @@ using MediaBrowser.Controller.Library; using MediaBrowser.Controller.Net; using MediaBrowser.Controller.Session; using MediaBrowser.Model.Dto; -using MediaBrowser.Model.IO; using MediaBrowser.Model.Session; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Http; @@ -40,8 +39,7 @@ namespace Jellyfin.Api.Controllers /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. - /// Instance of the interface. - /// Instance of the interface. + /// Th singleton. public PlaystateController( IUserManager userManager, IUserDataManager userDataRepository, @@ -49,8 +47,7 @@ namespace Jellyfin.Api.Controllers ISessionManager sessionManager, IAuthorizationContext authContext, ILoggerFactory loggerFactory, - IMediaSourceManager mediaSourceManager, - IFileSystem fileSystem) + TranscodingJobHelper transcodingJobHelper) { _userManager = userManager; _userDataRepository = userDataRepository; @@ -59,10 +56,7 @@ namespace Jellyfin.Api.Controllers _authContext = authContext; _logger = loggerFactory.CreateLogger(); - _transcodingJobHelper = new TranscodingJobHelper( - loggerFactory.CreateLogger(), - mediaSourceManager, - fileSystem); + _transcodingJobHelper = transcodingJobHelper; } /// diff --git a/Jellyfin.Api/Helpers/FileStreamResponseHelpers.cs b/Jellyfin.Api/Helpers/FileStreamResponseHelpers.cs new file mode 100644 index 0000000000..e03cafe35d --- /dev/null +++ b/Jellyfin.Api/Helpers/FileStreamResponseHelpers.cs @@ -0,0 +1,236 @@ +using System; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Jellyfin.Api.Models.StreamingDtos; +using MediaBrowser.Controller.MediaEncoding; +using MediaBrowser.Model.IO; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Primitives; +using Microsoft.Net.Http.Headers; + +namespace Jellyfin.Api.Helpers +{ + /// + /// The stream response helpers. + /// + public static class FileStreamResponseHelpers + { + /// + /// Returns a static file from a remote source. + /// + /// The current . + /// Whether the current request is a HTTP HEAD request so only the headers get returned. + /// The managing the response. + /// The . + /// A containing the API response. + public static async Task GetStaticRemoteStreamResult( + StreamState state, + bool isHeadRequest, + ControllerBase controller, + CancellationTokenSource cancellationTokenSource) + { + HttpClient httpClient = new HttpClient(); + var responseHeaders = controller.Response.Headers; + + if (state.RemoteHttpHeaders.TryGetValue(HeaderNames.UserAgent, out var useragent)) + { + httpClient.DefaultRequestHeaders.Add(HeaderNames.UserAgent, useragent); + } + + var response = await httpClient.GetAsync(state.MediaPath).ConfigureAwait(false); + var contentType = response.Content.Headers.ContentType.ToString(); + + responseHeaders[HeaderNames.AcceptRanges] = "none"; + + // Seeing cases of -1 here + if (response.Content.Headers.ContentLength.HasValue && response.Content.Headers.ContentLength.Value >= 0) + { + responseHeaders[HeaderNames.ContentLength] = response.Content.Headers.ContentLength.Value.ToString(CultureInfo.InvariantCulture); + } + + if (isHeadRequest) + { + using (response) + { + return controller.File(Array.Empty(), contentType); + } + } + + return controller.File(await response.Content.ReadAsStreamAsync().ConfigureAwait(false), contentType); + } + + /// + /// Returns a static file from the server. + /// + /// The path to the file. + /// The content type of the file. + /// The of the last modification of the file. + /// The cache duration of the file. + /// Whether the current request is a HTTP HEAD request so only the headers get returned. + /// The managing the response. + /// An the file. + // TODO: caching doesn't work + public static ActionResult GetStaticFileResult( + string path, + string contentType, + DateTime dateLastModified, + TimeSpan? cacheDuration, + bool isHeadRequest, + ControllerBase controller) + { + bool disableCaching = false; + if (controller.Request.Headers.TryGetValue(HeaderNames.CacheControl, out StringValues headerValue)) + { + disableCaching = headerValue.FirstOrDefault().Contains("no-cache", StringComparison.InvariantCulture); + } + + bool parsingSuccessful = DateTime.TryParseExact(controller.Request.Headers[HeaderNames.IfModifiedSince], "ddd, dd MMM yyyy HH:mm:ss \"GMT\"", new CultureInfo("en-US", false), DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out DateTime ifModifiedSinceHeader); + + // if the parsing of the IfModifiedSince header was not successfull, disable caching + if (!parsingSuccessful) + { + disableCaching = true; + } + + controller.Response.ContentType = contentType; + controller.Response.Headers.Add(HeaderNames.Age, Convert.ToInt64((DateTime.UtcNow - dateLastModified).TotalSeconds).ToString(CultureInfo.InvariantCulture)); + controller.Response.Headers.Add(HeaderNames.Vary, HeaderNames.Accept); + + if (disableCaching) + { + controller.Response.Headers.Add(HeaderNames.CacheControl, "no-cache, no-store, must-revalidate"); + controller.Response.Headers.Add(HeaderNames.Pragma, "no-cache, no-store, must-revalidate"); + } + else + { + if (cacheDuration.HasValue) + { + controller.Response.Headers.Add(HeaderNames.CacheControl, "public, max-age=" + cacheDuration.Value.TotalSeconds); + } + else + { + controller.Response.Headers.Add(HeaderNames.CacheControl, "public"); + } + + controller.Response.Headers.Add(HeaderNames.LastModified, dateLastModified.ToUniversalTime().ToString("ddd, dd MMM yyyy HH:mm:ss \"GMT\"", new CultureInfo("en-US", false))); + + // if the image was not modified since "ifModifiedSinceHeader"-header, return a HTTP status code 304 not modified + if (!(dateLastModified > ifModifiedSinceHeader)) + { + if (ifModifiedSinceHeader.Add(cacheDuration!.Value) < DateTime.UtcNow) + { + controller.Response.StatusCode = StatusCodes.Status304NotModified; + return new ContentResult(); + } + } + } + + // if the request is a head request, return a NoContent result with the same headers as it would with a GET request + if (isHeadRequest) + { + return controller.NoContent(); + } + + var stream = new FileStream(path, FileMode.Open, FileAccess.Read); + return controller.File(stream, contentType); + } + + /// + /// Returns a transcoded file from the server. + /// + /// The current . + /// Whether the current request is a HTTP HEAD request so only the headers get returned. + /// Instance of the interface. + /// The managing the response. + /// The singleton. + /// The command line arguments to start ffmpeg. + /// The starting the transcoding. + /// The . + /// The . + /// A containing the transcoded file. + public static async Task GetTranscodedFile( + StreamState state, + bool isHeadRequest, + IStreamHelper streamHelper, + ControllerBase controller, + TranscodingJobHelper transcodingJobHelper, + string ffmpegCommandLineArguments, + HttpRequest request, + TranscodingJobType transcodingJobType, + CancellationTokenSource cancellationTokenSource) + { + IHeaderDictionary responseHeaders = controller.Response.Headers; + // Use the command line args with a dummy playlist path + var outputPath = state.OutputFilePath; + + responseHeaders[HeaderNames.AcceptRanges] = "none"; + + var contentType = state.GetMimeType(outputPath); + + // TODO: The isHeadRequest is only here because ServiceStack will add Content-Length=0 to the response + // TODO (from api-migration): Investigate if this is still neccessary as we migrated away from ServiceStack + var contentLength = state.EstimateContentLength || isHeadRequest ? GetEstimatedContentLength(state) : null; + + if (contentLength.HasValue) + { + responseHeaders[HeaderNames.ContentLength] = contentLength.Value.ToString(CultureInfo.InvariantCulture); + } + else + { + responseHeaders.Remove(HeaderNames.ContentLength); + } + + // Headers only + if (isHeadRequest) + { + return controller.File(Array.Empty(), contentType); + } + + var transcodingLock = transcodingJobHelper.GetTranscodingLock(outputPath); + await transcodingLock.WaitAsync(cancellationTokenSource.Token).ConfigureAwait(false); + try + { + if (!File.Exists(outputPath)) + { + await transcodingJobHelper.StartFfMpeg(state, outputPath, ffmpegCommandLineArguments, request, transcodingJobType, cancellationTokenSource).ConfigureAwait(false); + } + else + { + transcodingJobHelper.OnTranscodeBeginRequest(outputPath, TranscodingJobType.Progressive); + state.Dispose(); + } + + Stream stream = new MemoryStream(); + + await new ProgressiveFileCopier(streamHelper, outputPath).WriteToAsync(stream, CancellationToken.None).ConfigureAwait(false); + return controller.File(stream, contentType); + } + finally + { + transcodingLock.Release(); + } + } + + /// + /// Gets the length of the estimated content. + /// + /// The state. + /// System.Nullable{System.Int64}. + private static long? GetEstimatedContentLength(StreamState state) + { + var totalBitrate = state.TotalOutputBitrate ?? 0; + + if (totalBitrate > 0 && state.RunTimeTicks.HasValue) + { + return Convert.ToInt64(totalBitrate * TimeSpan.FromTicks(state.RunTimeTicks.Value).TotalSeconds / 8); + } + + return null; + } + } +} diff --git a/Jellyfin.Api/Helpers/StreamingHelpers.cs b/Jellyfin.Api/Helpers/StreamingHelpers.cs index 4cebf40f6d..c88ec0b2f2 100644 --- a/Jellyfin.Api/Helpers/StreamingHelpers.cs +++ b/Jellyfin.Api/Helpers/StreamingHelpers.cs @@ -1,32 +1,255 @@ using System; using System.Collections.Generic; using System.Globalization; +using System.IO; using System.Linq; -using Jellyfin.Api.Models; +using System.Threading; +using System.Threading.Tasks; +using Jellyfin.Api.Models.StreamingDtos; +using MediaBrowser.Common.Configuration; +using MediaBrowser.Common.Extensions; +using MediaBrowser.Controller.Configuration; +using MediaBrowser.Controller.Devices; using MediaBrowser.Controller.Dlna; +using MediaBrowser.Controller.Library; +using MediaBrowser.Controller.MediaEncoding; +using MediaBrowser.Controller.Net; using MediaBrowser.Model.Dlna; +using MediaBrowser.Model.Dto; +using MediaBrowser.Model.Entities; +using MediaBrowser.Model.IO; using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Primitives; +using Microsoft.Net.Http.Headers; namespace Jellyfin.Api.Helpers { /// - /// The streaming helpers + /// The streaming helpers. /// - public class StreamingHelpers + public static class StreamingHelpers { + public static async Task GetStreamingState( + Guid itemId, + long? startTimeTicks, + string? audioCodec, + string? subtitleCodec, + string? videoCodec, + string? @params, + bool? @static, + string? container, + string? liveStreamId, + string? playSessionId, + string? mediaSourceId, + string? deviceId, + string? deviceProfileId, + int? audioBitRate, + HttpRequest request, + IAuthorizationContext authorizationContext, + IMediaSourceManager mediaSourceManager, + IUserManager userManager, + ILibraryManager libraryManager, + IServerConfigurationManager serverConfigurationManager, + IMediaEncoder mediaEncoder, + IFileSystem fileSystem, + ISubtitleEncoder subtitleEncoder, + IConfiguration configuration, + IDlnaManager dlnaManager, + IDeviceManager deviceManager, + TranscodingJobHelper transcodingJobHelper, + TranscodingJobType transcodingJobType, + bool isVideoRequest, + CancellationToken cancellationToken) + { + EncodingHelper encodingHelper = new EncodingHelper(mediaEncoder, fileSystem, subtitleEncoder, configuration); + // Parse the DLNA time seek header + if (!startTimeTicks.HasValue) + { + var timeSeek = request.Headers["TimeSeekRange.dlna.org"]; + + startTimeTicks = ParseTimeSeekHeader(timeSeek); + } + + if (!string.IsNullOrWhiteSpace(@params)) + { + // What is this? + ParseParams(request); + } + + var streamOptions = ParseStreamOptions(request.Query); + + var url = request.Path.Value.Split('.').Last(); + + if (string.IsNullOrEmpty(audioCodec)) + { + audioCodec = encodingHelper.InferAudioCodec(url); + } + + var enableDlnaHeaders = !string.IsNullOrWhiteSpace(@params) || + string.Equals(request.Headers["GetContentFeatures.DLNA.ORG"], "1", StringComparison.OrdinalIgnoreCase); + + var state = new StreamState(mediaSourceManager, transcodingJobType, transcodingJobHelper) + { + // TODO request was the StreamingRequest living in MediaBrowser.Api.Playback.Progressive + Request = request, + RequestedUrl = url, + UserAgent = request.Headers[HeaderNames.UserAgent], + EnableDlnaHeaders = enableDlnaHeaders + }; + + var auth = authorizationContext.GetAuthorizationInfo(request); + if (!auth.UserId.Equals(Guid.Empty)) + { + state.User = userManager.GetUserById(auth.UserId); + } + + /* + if ((Request.UserAgent ?? string.Empty).IndexOf("iphone", StringComparison.OrdinalIgnoreCase) != -1 || + (Request.UserAgent ?? string.Empty).IndexOf("ipad", StringComparison.OrdinalIgnoreCase) != -1 || + (Request.UserAgent ?? string.Empty).IndexOf("ipod", StringComparison.OrdinalIgnoreCase) != -1) + { + state.SegmentLength = 6; + } + */ + + if (state.VideoRequest != null && !string.IsNullOrWhiteSpace(state.VideoRequest.VideoCodec)) + { + state.SupportedVideoCodecs = state.VideoRequest.VideoCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToArray(); + state.VideoRequest.VideoCodec = state.SupportedVideoCodecs.FirstOrDefault(); + } + + if (!string.IsNullOrWhiteSpace(audioCodec)) + { + state.SupportedAudioCodecs = audioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToArray(); + state.Request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault(i => mediaEncoder.CanEncodeToAudioCodec(i)) + ?? state.SupportedAudioCodecs.FirstOrDefault(); + } + + if (!string.IsNullOrWhiteSpace(subtitleCodec)) + { + state.SupportedSubtitleCodecs = subtitleCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToArray(); + state.Request.SubtitleCodec = state.SupportedSubtitleCodecs.FirstOrDefault(i => mediaEncoder.CanEncodeToSubtitleCodec(i)) + ?? state.SupportedSubtitleCodecs.FirstOrDefault(); + } + + var item = libraryManager.GetItemById(itemId); + + state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); + + /* + var primaryImage = item.GetImageInfo(ImageType.Primary, 0) ?? + item.Parents.Select(i => i.GetImageInfo(ImageType.Primary, 0)).FirstOrDefault(i => i != null); + if (primaryImage != null) + { + state.AlbumCoverPath = primaryImage.Path; + } + */ + + MediaSourceInfo? mediaSource = null; + if (string.IsNullOrWhiteSpace(liveStreamId)) + { + var currentJob = !string.IsNullOrWhiteSpace(playSessionId) + ? transcodingJobHelper.GetTranscodingJob(playSessionId) + : null; + + if (currentJob != null) + { + mediaSource = currentJob.MediaSource; + } + + if (mediaSource == null) + { + var mediaSources = await mediaSourceManager.GetPlaybackMediaSources(libraryManager.GetItemById(itemId), null, false, false, cancellationToken).ConfigureAwait(false); + + mediaSource = string.IsNullOrEmpty(mediaSourceId) + ? mediaSources[0] + : mediaSources.Find(i => string.Equals(i.Id, mediaSourceId, StringComparison.InvariantCulture)); + + if (mediaSource == null && Guid.Parse(mediaSourceId) == itemId) + { + mediaSource = mediaSources[0]; + } + } + } + else + { + var liveStreamInfo = await mediaSourceManager.GetLiveStreamWithDirectStreamProvider(liveStreamId, cancellationToken).ConfigureAwait(false); + mediaSource = liveStreamInfo.Item1; + state.DirectStreamProvider = liveStreamInfo.Item2; + } + + encodingHelper.AttachMediaSourceInfo(state, mediaSource, url); + + var containerInternal = Path.GetExtension(state.RequestedUrl); + + if (string.IsNullOrEmpty(container)) + { + containerInternal = container; + } + + if (string.IsNullOrEmpty(containerInternal)) + { + containerInternal = (@static.HasValue && @static.Value) ? StreamBuilder.NormalizeMediaSourceFormatIntoSingleContainer(state.InputContainer, state.MediaPath, null, DlnaProfileType.Audio) : GetOutputFileExtension(state); + } + + state.OutputContainer = (containerInternal ?? string.Empty).TrimStart('.'); + + state.OutputAudioBitrate = encodingHelper.GetAudioBitrateParam(audioBitRate, state.AudioStream); + + state.OutputAudioCodec = audioCodec; + + state.OutputAudioChannels = encodingHelper.GetNumAudioChannelsParam(state, state.AudioStream, state.OutputAudioCodec); + + if (isVideoRequest) + { + state.OutputVideoCodec = state.VideoRequest.VideoCodec; + state.OutputVideoBitrate = EncodingHelper.GetVideoBitrateParamValue(state.VideoRequest, state.VideoStream, state.OutputVideoCodec); + + encodingHelper.TryStreamCopy(state); + + if (state.OutputVideoBitrate.HasValue && !EncodingHelper.IsCopyCodec(state.OutputVideoCodec)) + { + var resolution = ResolutionNormalizer.Normalize( + state.VideoStream?.BitRate, + state.VideoStream?.Width, + state.VideoStream?.Height, + state.OutputVideoBitrate.Value, + state.VideoStream?.Codec, + state.OutputVideoCodec, + videoRequest.MaxWidth, + videoRequest.MaxHeight); + + videoRequest.MaxWidth = resolution.MaxWidth; + videoRequest.MaxHeight = resolution.MaxHeight; + } + } + + ApplyDeviceProfileSettings(state, dlnaManager, deviceManager, request, deviceProfileId, @static); + + var ext = string.IsNullOrWhiteSpace(state.OutputContainer) + ? GetOutputFileExtension(state) + : ('.' + state.OutputContainer); + + state.OutputFilePath = GetOutputFilePath(state, ext!, serverConfigurationManager, deviceId, playSessionId); + + return state; + } + /// /// Adds the dlna headers. /// /// The state. /// The response headers. /// if set to true [is statically streamed]. + /// The start time in ticks. /// The . /// Instance of the interface. public static void AddDlnaHeaders( StreamState state, IHeaderDictionary responseHeaders, bool isStaticallyStreamed, + long? startTimeTicks, HttpRequest request, IDlnaManager dlnaManager) { @@ -54,7 +277,7 @@ namespace Jellyfin.Api.Helpers if (!isStaticallyStreamed && profile != null) { - AddTimeSeekResponseHeaders(state, responseHeaders); + AddTimeSeekResponseHeaders(state, responseHeaders, startTimeTicks); } } @@ -82,51 +305,18 @@ namespace Jellyfin.Api.Helpers { var videoCodec = state.ActualOutputVideoCodec; - responseHeaders.Add("contentFeatures.dlna.org", new ContentFeatureBuilder(profile).BuildVideoHeader( - state.OutputContainer, - videoCodec, - audioCodec, - state.OutputWidth, - state.OutputHeight, - state.TargetVideoBitDepth, - state.OutputVideoBitrate, - state.TargetTimestamp, - isStaticallyStreamed, - state.RunTimeTicks, - state.TargetVideoProfile, - state.TargetVideoLevel, - state.TargetFramerate, - state.TargetPacketLength, - state.TranscodeSeekInfo, - state.IsTargetAnamorphic, - state.IsTargetInterlaced, - state.TargetRefFrames, - state.TargetVideoStreamCount, - state.TargetAudioStreamCount, - state.TargetVideoCodecTag, - state.IsTargetAVC).FirstOrDefault() ?? string.Empty); - } - } - - /// - /// Parses the dlna headers. - /// - /// The start time ticks. - /// The . - public void ParseDlnaHeaders(long? startTimeTicks, HttpRequest request) - { - if (!startTimeTicks.HasValue) - { - var timeSeek = request.Headers["TimeSeekRange.dlna.org"]; - - startTimeTicks = ParseTimeSeekHeader(timeSeek); + responseHeaders.Add( + "contentFeatures.dlna.org", + new ContentFeatureBuilder(profile).BuildVideoHeader(state.OutputContainer, videoCodec, audioCodec, state.OutputWidth, state.OutputHeight, state.TargetVideoBitDepth, state.OutputVideoBitrate, state.TargetTimestamp, isStaticallyStreamed, state.RunTimeTicks, state.TargetVideoProfile, state.TargetVideoLevel, state.TargetFramerate, state.TargetPacketLength, state.TranscodeSeekInfo, state.IsTargetAnamorphic, state.IsTargetInterlaced, state.TargetRefFrames, state.TargetVideoStreamCount, state.TargetAudioStreamCount, state.TargetVideoCodecTag, state.IsTargetAVC).FirstOrDefault() ?? string.Empty); } } /// /// Parses the time seek header. /// - public long? ParseTimeSeekHeader(string value) + /// The time seek header string. + /// A nullable representing the seek time in ticks. + public static long? ParseTimeSeekHeader(string value) { if (string.IsNullOrWhiteSpace(value)) { @@ -138,12 +328,13 @@ namespace Jellyfin.Api.Helpers { throw new ArgumentException("Invalid timeseek header"); } - int index = value.IndexOf('-'); + + int index = value.IndexOf('-', StringComparison.InvariantCulture); value = index == -1 ? value.Substring(Npt.Length) : value.Substring(Npt.Length, index - Npt.Length); - if (value.IndexOf(':') == -1) + if (value.IndexOf(':', StringComparison.InvariantCulture) == -1) { // Parses npt times in the format of '417.33' if (double.TryParse(value, NumberStyles.Any, CultureInfo.InvariantCulture, out var seconds)) @@ -169,15 +360,45 @@ namespace Jellyfin.Api.Helpers { throw new ArgumentException("Invalid timeseek header"); } + timeFactor /= 60; } + return TimeSpan.FromSeconds(secondsSum).Ticks; } - public void AddTimeSeekResponseHeaders(StreamState state, IHeaderDictionary responseHeaders) + /// + /// Parses query parameters as StreamOptions. + /// + /// The query string. + /// A containing the stream options. + public static Dictionary ParseStreamOptions(IQueryCollection queryString) { - var runtimeSeconds = TimeSpan.FromTicks(state.RunTimeTicks.Value).TotalSeconds.ToString(CultureInfo.InvariantCulture); - var startSeconds = TimeSpan.FromTicks(state.Request.StartTimeTicks ?? 0).TotalSeconds.ToString(CultureInfo.InvariantCulture); + Dictionary streamOptions = new Dictionary(); + foreach (var param in queryString) + { + if (char.IsLower(param.Key[0])) + { + // This was probably not parsed initially and should be a StreamOptions + // or the generated URL should correctly serialize it + // TODO: This should be incorporated either in the lower framework for parsing requests + streamOptions[param.Key] = param.Value; + } + } + + return streamOptions; + } + + /// + /// Adds the dlna time seek headers to the response. + /// + /// The current . + /// The of the response. + /// The start time in ticks. + public static void AddTimeSeekResponseHeaders(StreamState state, IHeaderDictionary responseHeaders, long? startTimeTicks) + { + var runtimeSeconds = TimeSpan.FromTicks(state.RunTimeTicks!.Value).TotalSeconds.ToString(CultureInfo.InvariantCulture); + var startSeconds = TimeSpan.FromTicks(startTimeTicks ?? 0).TotalSeconds.ToString(CultureInfo.InvariantCulture); responseHeaders.Add("TimeSeekRange.dlna.org", string.Format( CultureInfo.InvariantCulture, @@ -190,5 +411,369 @@ namespace Jellyfin.Api.Helpers startSeconds, runtimeSeconds)); } + + /// + /// Gets the output file extension. + /// + /// The state. + /// System.String. + public static string? GetOutputFileExtension(StreamState state) + { + var ext = Path.GetExtension(state.RequestedUrl); + + if (!string.IsNullOrEmpty(ext)) + { + return ext; + } + + var isVideoRequest = state.VideoRequest != null; + + // Try to infer based on the desired video codec + if (isVideoRequest) + { + var videoCodec = state.VideoRequest.VideoCodec; + + if (string.Equals(videoCodec, "h264", StringComparison.OrdinalIgnoreCase) || + string.Equals(videoCodec, "h265", StringComparison.OrdinalIgnoreCase)) + { + return ".ts"; + } + + if (string.Equals(videoCodec, "theora", StringComparison.OrdinalIgnoreCase)) + { + return ".ogv"; + } + + if (string.Equals(videoCodec, "vpx", StringComparison.OrdinalIgnoreCase)) + { + return ".webm"; + } + + if (string.Equals(videoCodec, "wmv", StringComparison.OrdinalIgnoreCase)) + { + return ".asf"; + } + } + + // Try to infer based on the desired audio codec + if (!isVideoRequest) + { + var audioCodec = state.Request.AudioCodec; + + if (string.Equals("aac", audioCodec, StringComparison.OrdinalIgnoreCase)) + { + return ".aac"; + } + + if (string.Equals("mp3", audioCodec, StringComparison.OrdinalIgnoreCase)) + { + return ".mp3"; + } + + if (string.Equals("vorbis", audioCodec, StringComparison.OrdinalIgnoreCase)) + { + return ".ogg"; + } + + if (string.Equals("wma", audioCodec, StringComparison.OrdinalIgnoreCase)) + { + return ".wma"; + } + } + + return null; + } + + /// + /// Gets the output file path for transcoding. + /// + /// The current . + /// The file extension of the output file. + /// Instance of the interface. + /// The device id. + /// The play session id. + /// The complete file path, including the folder, for the transcoding file. + private static string GetOutputFilePath(StreamState state, string outputFileExtension, IServerConfigurationManager serverConfigurationManager, string? deviceId, string? playSessionId) + { + var data = $"{state.MediaPath}-{state.UserAgent}-{deviceId!}-{playSessionId!}"; + + var filename = data.GetMD5().ToString("N", CultureInfo.InvariantCulture); + var ext = outputFileExtension?.ToLowerInvariant(); + var folder = serverConfigurationManager.GetTranscodePath(); + + return Path.Combine(folder, filename + ext); + } + + private static void ApplyDeviceProfileSettings(StreamState state, IDlnaManager dlnaManager, IDeviceManager deviceManager, HttpRequest request, string? deviceProfileId, bool? @static) + { + var headers = request.Headers; + + if (!string.IsNullOrWhiteSpace(deviceProfileId)) + { + state.DeviceProfile = dlnaManager.GetProfile(deviceProfileId); + } + else if (!string.IsNullOrWhiteSpace(deviceProfileId)) + { + var caps = deviceManager.GetCapabilities(deviceProfileId); + + state.DeviceProfile = caps == null ? dlnaManager.GetProfile(headers) : caps.DeviceProfile; + } + + var profile = state.DeviceProfile; + + if (profile == null) + { + // Don't use settings from the default profile. + // Only use a specific profile if it was requested. + return; + } + + var audioCodec = state.ActualOutputAudioCodec; + var videoCodec = state.ActualOutputVideoCodec; + + var mediaProfile = state.VideoRequest == null + ? profile.GetAudioMediaProfile(state.OutputContainer, audioCodec, state.OutputAudioChannels, state.OutputAudioBitrate, state.OutputAudioSampleRate, state.OutputAudioBitDepth) + : profile.GetVideoMediaProfile( + state.OutputContainer, + audioCodec, + videoCodec, + state.OutputWidth, + state.OutputHeight, + state.TargetVideoBitDepth, + state.OutputVideoBitrate, + state.TargetVideoProfile, + state.TargetVideoLevel, + state.TargetFramerate, + state.TargetPacketLength, + state.TargetTimestamp, + state.IsTargetAnamorphic, + state.IsTargetInterlaced, + state.TargetRefFrames, + state.TargetVideoStreamCount, + state.TargetAudioStreamCount, + state.TargetVideoCodecTag, + state.IsTargetAVC); + + if (mediaProfile != null) + { + state.MimeType = mediaProfile.MimeType; + } + + if (!(@static.HasValue && @static.Value)) + { + var transcodingProfile = state.VideoRequest == null ? profile.GetAudioTranscodingProfile(state.OutputContainer, audioCodec) : profile.GetVideoTranscodingProfile(state.OutputContainer, audioCodec, videoCodec); + + if (transcodingProfile != null) + { + state.EstimateContentLength = transcodingProfile.EstimateContentLength; + // state.EnableMpegtsM2TsMode = transcodingProfile.EnableMpegtsM2TsMode; + state.TranscodeSeekInfo = transcodingProfile.TranscodeSeekInfo; + + if (state.VideoRequest != null) + { + state.VideoRequest.CopyTimestamps = transcodingProfile.CopyTimestamps; + state.VideoRequest.EnableSubtitlesInManifest = transcodingProfile.EnableSubtitlesInManifest; + } + } + } + } + + /// + /// Parses the parameters. + /// + /// The request. + private void ParseParams(StreamRequest request) + { + var vals = request.Params.Split(';'); + + var videoRequest = request as VideoStreamRequest; + + for (var i = 0; i < vals.Length; i++) + { + var val = vals[i]; + + if (string.IsNullOrWhiteSpace(val)) + { + continue; + } + + switch (i) + { + case 0: + request.DeviceProfileId = val; + break; + case 1: + request.DeviceId = val; + break; + case 2: + request.MediaSourceId = val; + break; + case 3: + request.Static = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + break; + case 4: + if (videoRequest != null) + { + videoRequest.VideoCodec = val; + } + + break; + case 5: + request.AudioCodec = val; + break; + case 6: + if (videoRequest != null) + { + videoRequest.AudioStreamIndex = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 7: + if (videoRequest != null) + { + videoRequest.SubtitleStreamIndex = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 8: + if (videoRequest != null) + { + videoRequest.VideoBitRate = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 9: + request.AudioBitRate = int.Parse(val, CultureInfo.InvariantCulture); + break; + case 10: + request.MaxAudioChannels = int.Parse(val, CultureInfo.InvariantCulture); + break; + case 11: + if (videoRequest != null) + { + videoRequest.MaxFramerate = float.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 12: + if (videoRequest != null) + { + videoRequest.MaxWidth = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 13: + if (videoRequest != null) + { + videoRequest.MaxHeight = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 14: + request.StartTimeTicks = long.Parse(val, CultureInfo.InvariantCulture); + break; + case 15: + if (videoRequest != null) + { + videoRequest.Level = val; + } + + break; + case 16: + if (videoRequest != null) + { + videoRequest.MaxRefFrames = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 17: + if (videoRequest != null) + { + videoRequest.MaxVideoBitDepth = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 18: + if (videoRequest != null) + { + videoRequest.Profile = val; + } + + break; + case 19: + // cabac no longer used + break; + case 20: + request.PlaySessionId = val; + break; + case 21: + // api_key + break; + case 22: + request.LiveStreamId = val; + break; + case 23: + // Duplicating ItemId because of MediaMonkey + break; + case 24: + if (videoRequest != null) + { + videoRequest.CopyTimestamps = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + } + + break; + case 25: + if (!string.IsNullOrWhiteSpace(val) && videoRequest != null) + { + if (Enum.TryParse(val, out SubtitleDeliveryMethod method)) + { + videoRequest.SubtitleMethod = method; + } + } + + break; + case 26: + request.TranscodingMaxAudioChannels = int.Parse(val, CultureInfo.InvariantCulture); + break; + case 27: + if (videoRequest != null) + { + videoRequest.EnableSubtitlesInManifest = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + } + + break; + case 28: + request.Tag = val; + break; + case 29: + if (videoRequest != null) + { + videoRequest.RequireAvc = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + } + + break; + case 30: + request.SubtitleCodec = val; + break; + case 31: + if (videoRequest != null) + { + videoRequest.RequireNonAnamorphic = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + } + + break; + case 32: + if (videoRequest != null) + { + videoRequest.DeInterlace = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + } + + break; + case 33: + request.TranscodeReasons = val; + break; + } + } + } } } diff --git a/Jellyfin.Api/Helpers/TranscodingJobHelper.cs b/Jellyfin.Api/Helpers/TranscodingJobHelper.cs index 7db75387a1..9fbd5ec2db 100644 --- a/Jellyfin.Api/Helpers/TranscodingJobHelper.cs +++ b/Jellyfin.Api/Helpers/TranscodingJobHelper.cs @@ -1,16 +1,28 @@ using System; using System.Collections.Generic; using System.Diagnostics; +using System.Globalization; using System.IO; using System.Linq; +using System.Text; +using System.Text.Json; using System.Threading; using System.Threading.Tasks; -using Jellyfin.Api.Models; using Jellyfin.Api.Models.PlaybackDtos; +using Jellyfin.Api.Models.StreamingDtos; +using Jellyfin.Data.Enums; +using MediaBrowser.Common.Configuration; +using MediaBrowser.Controller.Configuration; using MediaBrowser.Controller.Library; using MediaBrowser.Controller.MediaEncoding; +using MediaBrowser.Controller.Net; +using MediaBrowser.Controller.Session; +using MediaBrowser.Model.Entities; using MediaBrowser.Model.IO; +using MediaBrowser.Model.MediaInfo; using MediaBrowser.Model.Session; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; namespace Jellyfin.Api.Helpers @@ -30,9 +42,17 @@ namespace Jellyfin.Api.Helpers /// private static readonly Dictionary _transcodingLocks = new Dictionary(); - private readonly ILogger _logger; - private readonly IMediaSourceManager _mediaSourceManager; + private readonly IAuthorizationContext _authorizationContext; + private readonly EncodingHelper _encodingHelper; private readonly IFileSystem _fileSystem; + private readonly IIsoManager _isoManager; + + private readonly ILogger _logger; + private readonly IMediaEncoder _mediaEncoder; + private readonly IMediaSourceManager _mediaSourceManager; + private readonly IServerConfigurationManager _serverConfigurationManager; + private readonly ISessionManager _sessionManager; + private readonly ILoggerFactory _loggerFactory; /// /// Initializes a new instance of the class. @@ -40,14 +60,40 @@ namespace Jellyfin.Api.Helpers /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. public TranscodingJobHelper( ILogger logger, IMediaSourceManager mediaSourceManager, - IFileSystem fileSystem) + IFileSystem fileSystem, + IMediaEncoder mediaEncoder, + IServerConfigurationManager serverConfigurationManager, + ISessionManager sessionManager, + IAuthorizationContext authorizationContext, + IIsoManager isoManager, + ISubtitleEncoder subtitleEncoder, + IConfiguration configuration, + ILoggerFactory loggerFactory) { _logger = logger; _mediaSourceManager = mediaSourceManager; _fileSystem = fileSystem; + _mediaEncoder = mediaEncoder; + _serverConfigurationManager = serverConfigurationManager; + _sessionManager = sessionManager; + _authorizationContext = authorizationContext; + _isoManager = isoManager; + _loggerFactory = loggerFactory; + + _encodingHelper = new EncodingHelper(mediaEncoder, fileSystem, subtitleEncoder, configuration); + + DeleteEncodedMediaCache(); } /// @@ -63,7 +109,13 @@ namespace Jellyfin.Api.Helpers } } - public static TranscodingJobDto GetTranscodingJob(string path, TranscodingJobType type) + /// + /// Get transcoding job. + /// + /// Path to the transcoding file. + /// The . + /// The transcoding job. + public TranscodingJobDto GetTranscodingJob(string path, TranscodingJobType type) { lock (_activeTranscodingJobs) { @@ -361,14 +413,24 @@ namespace Jellyfin.Api.Helpers } } + /// + /// Report the transcoding progress to the session manager. + /// + /// The of which the progress will be reported. + /// The of the current transcoding job. + /// The current transcoding position. + /// The framerate of the transcoding job. + /// The completion percentage of the transcode. + /// The number of bytes transcoded. + /// The bitrate of the transcoding job. public void ReportTranscodingProgress( - TranscodingJob job, - StreamState state, - TimeSpan? transcodingPosition, - float? framerate, - double? percentComplete, - long? bytesTranscoded, - int? bitRate) + TranscodingJobDto job, + StreamState state, + TimeSpan? transcodingPosition, + float? framerate, + double? percentComplete, + long? bytesTranscoded, + int? bitRate) { var ticks = transcodingPosition?.Ticks; @@ -405,5 +467,374 @@ namespace Jellyfin.Api.Helpers }); } } + + /// + /// Starts the FFMPEG. + /// + /// The state. + /// The output path. + /// The command line arguments for ffmpeg. + /// The . + /// The . + /// The cancellation token source. + /// The working directory. + /// Task. + public async Task StartFfMpeg( + StreamState state, + string outputPath, + string commandLineArguments, + HttpRequest request, + TranscodingJobType transcodingJobType, + CancellationTokenSource cancellationTokenSource, + string workingDirectory = null) + { + Directory.CreateDirectory(Path.GetDirectoryName(outputPath)); + + await AcquireResources(state, cancellationTokenSource).ConfigureAwait(false); + + if (state.VideoRequest != null && !EncodingHelper.IsCopyCodec(state.OutputVideoCodec)) + { + var auth = _authorizationContext.GetAuthorizationInfo(request); + if (auth.User != null && !auth.User.HasPermission(PermissionKind.EnableVideoPlaybackTranscoding)) + { + this.OnTranscodeFailedToStart(outputPath, transcodingJobType, state); + + throw new ArgumentException("User does not have access to video transcoding"); + } + } + + var process = new Process() + { + StartInfo = new ProcessStartInfo() + { + WindowStyle = ProcessWindowStyle.Hidden, + CreateNoWindow = true, + UseShellExecute = false, + + // Must consume both stdout and stderr or deadlocks may occur + // RedirectStandardOutput = true, + RedirectStandardError = true, + RedirectStandardInput = true, + FileName = _mediaEncoder.EncoderPath, + Arguments = commandLineArguments, + WorkingDirectory = string.IsNullOrWhiteSpace(workingDirectory) ? null : workingDirectory, + ErrorDialog = false + }, + EnableRaisingEvents = true + }; + + var transcodingJob = this.OnTranscodeBeginning( + outputPath, + state.Request.PlaySessionId, + state.MediaSource.LiveStreamId, + Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), + transcodingJobType, + process, + state.Request.DeviceId, + state, + cancellationTokenSource); + + var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments; + _logger.LogInformation(commandLineLogMessage); + + var logFilePrefix = "ffmpeg-transcode"; + if (state.VideoRequest != null + && EncodingHelper.IsCopyCodec(state.OutputVideoCodec)) + { + logFilePrefix = EncodingHelper.IsCopyCodec(state.OutputAudioCodec) + ? "ffmpeg-remux" + : "ffmpeg-directstream"; + } + + var logFilePath = Path.Combine(_serverConfigurationManager.ApplicationPaths.LogDirectoryPath, logFilePrefix + "-" + Guid.NewGuid() + ".txt"); + + // FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory. + Stream logStream = new FileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, IODefaults.FileStreamBufferSize, true); + + var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(request.Path + Environment.NewLine + Environment.NewLine + JsonSerializer.Serialize(state.MediaSource) + Environment.NewLine + Environment.NewLine + commandLineLogMessage + Environment.NewLine + Environment.NewLine); + await logStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationTokenSource.Token).ConfigureAwait(false); + + process.Exited += (sender, args) => OnFfMpegProcessExited(process, transcodingJob, state); + + try + { + process.Start(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error starting ffmpeg"); + + this.OnTranscodeFailedToStart(outputPath, transcodingJobType, state); + + throw; + } + + _logger.LogDebug("Launched ffmpeg process"); + state.TranscodingJob = transcodingJob; + + // Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback + _ = new JobLogger(_logger).StartStreamingLog(state, process.StandardError.BaseStream, logStream); + + // Wait for the file to exist before proceeeding + var ffmpegTargetFile = state.WaitForPath ?? outputPath; + _logger.LogDebug("Waiting for the creation of {0}", ffmpegTargetFile); + while (!File.Exists(ffmpegTargetFile) && !transcodingJob.HasExited) + { + await Task.Delay(100, cancellationTokenSource.Token).ConfigureAwait(false); + } + + _logger.LogDebug("File {0} created or transcoding has finished", ffmpegTargetFile); + + if (state.IsInputVideo && transcodingJob.Type == TranscodingJobType.Progressive && !transcodingJob.HasExited) + { + await Task.Delay(1000, cancellationTokenSource.Token).ConfigureAwait(false); + + if (state.ReadInputAtNativeFramerate && !transcodingJob.HasExited) + { + await Task.Delay(1500, cancellationTokenSource.Token).ConfigureAwait(false); + } + } + + if (!transcodingJob.HasExited) + { + StartThrottler(state, transcodingJob); + } + + _logger.LogDebug("StartFfMpeg() finished successfully"); + + return transcodingJob; + } + + private void StartThrottler(StreamState state, TranscodingJobDto transcodingJob) + { + if (EnableThrottling(state)) + { + transcodingJob.TranscodingThrottler = state.TranscodingThrottler = new TranscodingThrottler(transcodingJob, new Logger(new LoggerFactory()), _serverConfigurationManager, _fileSystem); + state.TranscodingThrottler.Start(); + } + } + + private bool EnableThrottling(StreamState state) + { + var encodingOptions = _serverConfigurationManager.GetEncodingOptions(); + + // enable throttling when NOT using hardware acceleration + if (string.IsNullOrEmpty(encodingOptions.HardwareAccelerationType)) + { + return state.InputProtocol == MediaProtocol.File && + state.RunTimeTicks.HasValue && + state.RunTimeTicks.Value >= TimeSpan.FromMinutes(5).Ticks && + state.IsInputVideo && + state.VideoType == VideoType.VideoFile && + !EncodingHelper.IsCopyCodec(state.OutputVideoCodec); + } + + return false; + } + + /// + /// Called when [transcode beginning]. + /// + /// The path. + /// The play session identifier. + /// The live stream identifier. + /// The transcoding job identifier. + /// The type. + /// The process. + /// The device id. + /// The state. + /// The cancellation token source. + /// TranscodingJob. + public TranscodingJobDto OnTranscodeBeginning( + string path, + string playSessionId, + string liveStreamId, + string transcodingJobId, + TranscodingJobType type, + Process process, + string deviceId, + StreamState state, + CancellationTokenSource cancellationTokenSource) + { + lock (_activeTranscodingJobs) + { + var job = new TranscodingJobDto(_loggerFactory.CreateLogger()) + { + Type = type, + Path = path, + Process = process, + ActiveRequestCount = 1, + DeviceId = deviceId, + CancellationTokenSource = cancellationTokenSource, + Id = transcodingJobId, + PlaySessionId = playSessionId, + LiveStreamId = liveStreamId, + MediaSource = state.MediaSource + }; + + _activeTranscodingJobs.Add(job); + + ReportTranscodingProgress(job, state, null, null, null, null, null); + + return job; + } + } + + /// + /// + /// The progressive + /// + /// Called when [transcode failed to start]. + /// + /// The path. + /// The type. + /// The state. + public void OnTranscodeFailedToStart(string path, TranscodingJobType type, StreamState state) + { + lock (_activeTranscodingJobs) + { + var job = _activeTranscodingJobs.FirstOrDefault(j => j.Type == type && string.Equals(j.Path, path, StringComparison.OrdinalIgnoreCase)); + + if (job != null) + { + _activeTranscodingJobs.Remove(job); + } + } + + lock (_transcodingLocks) + { + _transcodingLocks.Remove(path); + } + + if (!string.IsNullOrWhiteSpace(state.Request.DeviceId)) + { + _sessionManager.ClearTranscodingInfo(state.Request.DeviceId); + } + } + + /// + /// Processes the exited. + /// + /// The process. + /// The job. + /// The state. + private void OnFfMpegProcessExited(Process process, TranscodingJobDto job, StreamState state) + { + if (job != null) + { + job.HasExited = true; + } + + _logger.LogDebug("Disposing stream resources"); + state.Dispose(); + + if (process.ExitCode == 0) + { + _logger.LogInformation("FFMpeg exited with code 0"); + } + else + { + _logger.LogError("FFMpeg exited with code {0}", process.ExitCode); + } + + process.Dispose(); + } + + private async Task AcquireResources(StreamState state, CancellationTokenSource cancellationTokenSource) + { + if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && _isoManager.CanMount(state.MediaPath)) + { + state.IsoMount = await _isoManager.Mount(state.MediaPath, cancellationTokenSource.Token).ConfigureAwait(false); + } + + if (state.MediaSource.RequiresOpening && string.IsNullOrWhiteSpace(state.Request.LiveStreamId)) + { + var liveStreamResponse = await _mediaSourceManager.OpenLiveStream( + new LiveStreamRequest { OpenToken = state.MediaSource.OpenToken }, + cancellationTokenSource.Token) + .ConfigureAwait(false); + + _encodingHelper.AttachMediaSourceInfo(state, liveStreamResponse.MediaSource, state.RequestedUrl); + + if (state.VideoRequest != null) + { + _encodingHelper.TryStreamCopy(state); + } + } + + if (state.MediaSource.BufferMs.HasValue) + { + await Task.Delay(state.MediaSource.BufferMs.Value, cancellationTokenSource.Token).ConfigureAwait(false); + } + } + + /// + /// Called when [transcode begin request]. + /// + /// The path. + /// The type. + /// The . + public TranscodingJobDto? OnTranscodeBeginRequest(string path, TranscodingJobType type) + { + lock (_activeTranscodingJobs) + { + var job = _activeTranscodingJobs.FirstOrDefault(j => j.Type == type && string.Equals(j.Path, path, StringComparison.OrdinalIgnoreCase)); + + if (job == null) + { + return null; + } + + OnTranscodeBeginRequest(job); + + return job; + } + } + + private void OnTranscodeBeginRequest(TranscodingJobDto job) + { + job.ActiveRequestCount++; + + if (string.IsNullOrWhiteSpace(job.PlaySessionId) || job.Type == TranscodingJobType.Progressive) + { + job.StopKillTimer(); + } + } + + /// + /// Gets the transcoding lock. + /// + /// The output path of the transcoded file. + /// A . + public SemaphoreSlim GetTranscodingLock(string outputPath) + { + lock (_transcodingLocks) + { + if (!_transcodingLocks.TryGetValue(outputPath, out SemaphoreSlim result)) + { + result = new SemaphoreSlim(1, 1); + _transcodingLocks[outputPath] = result; + } + + return result; + } + } + + /// + /// Deletes the encoded media cache. + /// + private void DeleteEncodedMediaCache() + { + var path = _serverConfigurationManager.GetTranscodePath(); + if (!Directory.Exists(path)) + { + return; + } + + foreach (var file in _fileSystem.GetFilePaths(path, true)) + { + _fileSystem.DeleteFile(file); + } + } } } diff --git a/Jellyfin.Api/Models/StreamState.cs b/Jellyfin.Api/Models/StreamingDtos/StreamState.cs similarity index 51% rename from Jellyfin.Api/Models/StreamState.cs rename to Jellyfin.Api/Models/StreamingDtos/StreamState.cs index 9fe5f52c3e..b962e0ac79 100644 --- a/Jellyfin.Api/Models/StreamState.cs +++ b/Jellyfin.Api/Models/StreamingDtos/StreamState.cs @@ -5,36 +5,77 @@ using MediaBrowser.Controller.Library; using MediaBrowser.Controller.MediaEncoding; using MediaBrowser.Model.Dlna; -namespace Jellyfin.Api.Models +namespace Jellyfin.Api.Models.StreamingDtos { + /// + /// The stream state dto. + /// public class StreamState : EncodingJobInfo, IDisposable { private readonly IMediaSourceManager _mediaSourceManager; - private bool _disposed = false; + private readonly TranscodingJobHelper _transcodingJobHelper; + private bool _disposed; - public string RequestedUrl { get; set; } - - public StreamRequest Request + /// + /// Initializes a new instance of the class. + /// + /// Instance of the interface. + /// The . + /// The singleton. + public StreamState(IMediaSourceManager mediaSourceManager, TranscodingJobType transcodingType, TranscodingJobHelper transcodingJobHelper) + : base(transcodingType) { - get => (StreamRequest)BaseRequest; - set - { - BaseRequest = value; - - IsVideoRequest = VideoRequest != null; - } + _mediaSourceManager = mediaSourceManager; + _transcodingJobHelper = transcodingJobHelper; } - public TranscodingThrottler TranscodingThrottler { get; set; } + /// + /// Gets or sets the requested url. + /// + public string? RequestedUrl { get; set; } + // /// + // /// Gets or sets the request. + // /// + // public StreamRequest Request + // { + // get => (StreamRequest)BaseRequest; + // set + // { + // BaseRequest = value; + // + // IsVideoRequest = VideoRequest != null; + // } + // } + + /// + /// Gets or sets the transcoding throttler. + /// + public TranscodingThrottler? TranscodingThrottler { get; set; } + + /// + /// Gets the video request. + /// public VideoStreamRequest VideoRequest => Request as VideoStreamRequest; - public IDirectStreamProvider DirectStreamProvider { get; set; } + /// + /// Gets or sets the direct stream provicer. + /// + public IDirectStreamProvider? DirectStreamProvider { get; set; } - public string WaitForPath { get; set; } + /// + /// Gets or sets the path to wait for. + /// + public string? WaitForPath { get; set; } + /// + /// Gets a value indicating whether the request outputs video. + /// public bool IsOutputVideo => Request is VideoStreamRequest; + /// + /// Gets the segment length. + /// public int SegmentLength { get @@ -74,6 +115,9 @@ namespace Jellyfin.Api.Models } } + /// + /// Gets the minimum number of segments. + /// public int MinSegments { get @@ -87,35 +131,53 @@ namespace Jellyfin.Api.Models } } - public string UserAgent { get; set; } + /// + /// Gets or sets the user agent. + /// + public string? UserAgent { get; set; } + /// + /// Gets or sets a value indicating whether to estimate the content length. + /// public bool EstimateContentLength { get; set; } + /// + /// Gets or sets the transcode seek info. + /// public TranscodeSeekInfo TranscodeSeekInfo { get; set; } + /// + /// Gets or sets a value indicating whether to enable dlna headers. + /// public bool EnableDlnaHeaders { get; set; } - public DeviceProfile DeviceProfile { get; set; } + /// + /// Gets or sets the device profile. + /// + public DeviceProfile? DeviceProfile { get; set; } - public TranscodingJobDto TranscodingJob { get; set; } - - public StreamState(IMediaSourceManager mediaSourceManager, TranscodingJobType transcodingType) - : base(transcodingType) - { - _mediaSourceManager = mediaSourceManager; - } - - public override void ReportTranscodingProgress(TimeSpan? transcodingPosition, float? framerate, double? percentComplete, long? bytesTranscoded, int? bitRate) - { - TranscodingJobHelper.ReportTranscodingProgress(TranscodingJob, this, transcodingPosition, framerate, percentComplete, bytesTranscoded, bitRate); - } + /// + /// Gets or sets the transcoding job. + /// + public TranscodingJobDto? TranscodingJob { get; set; } + /// public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } + /// + public override void ReportTranscodingProgress(TimeSpan? transcodingPosition, float? framerate, double? percentComplete, long? bytesTranscoded, int? bitRate) + { + _transcodingJobHelper.ReportTranscodingProgress(TranscodingJob!, this, transcodingPosition, framerate, percentComplete, bytesTranscoded, bitRate); + } + + /// + /// Disposes the stream state. + /// + /// Whether the object is currently beeing disposed. protected virtual void Dispose(bool disposing) { if (_disposed) diff --git a/MediaBrowser.Api/Playback/Progressive/AudioService.cs b/MediaBrowser.Api/Playback/Progressive/AudioService.cs index 34c7986ca5..ef639851bd 100644 --- a/MediaBrowser.Api/Playback/Progressive/AudioService.cs +++ b/MediaBrowser.Api/Playback/Progressive/AudioService.cs @@ -17,10 +17,6 @@ namespace MediaBrowser.Api.Playback.Progressive /// /// Class GetAudioStream /// - [Route("/Audio/{Id}/stream.{Container}", "GET", Summary = "Gets an audio stream")] - [Route("/Audio/{Id}/stream", "GET", Summary = "Gets an audio stream")] - [Route("/Audio/{Id}/stream.{Container}", "HEAD", Summary = "Gets an audio stream")] - [Route("/Audio/{Id}/stream", "HEAD", Summary = "Gets an audio stream")] public class GetAudioStream : StreamRequest { } diff --git a/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs b/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs index 8ce106469e..8cfe562b38 100644 --- a/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs +++ b/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs @@ -1263,6 +1263,17 @@ namespace MediaBrowser.Controller.MediaEncoding return null; } + public int? GetAudioBitrateParam(int? audioBitRate, MediaStream audioStream) + { + if (audioBitRate.HasValue) + { + // Don't encode any higher than this + return Math.Min(384000, audioBitRate.Value); + } + + return null; + } + public string GetAudioFilterParam(EncodingJobInfo state, EncodingOptions encodingOptions, bool isHls) { var channels = state.OutputAudioChannels;