using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Linq;
using System.Threading.Tasks;
using Jellyfin.Api.Attributes;
using Jellyfin.Api.Extensions;
using Jellyfin.Api.Helpers;
using Jellyfin.Api.ModelBinders;
using Jellyfin.Api.Models.StreamingDtos;
using MediaBrowser.Common.Extensions;
using MediaBrowser.Controller.Library;
using MediaBrowser.Controller.MediaEncoding;
using MediaBrowser.Model.Dlna;
using MediaBrowser.Model.MediaInfo;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
namespace Jellyfin.Api.Controllers;
///
/// The universal audio controller.
///
[Route("")]
public class UniversalAudioController : BaseJellyfinApiController
{
private readonly ILibraryManager _libraryManager;
private readonly ILogger _logger;
private readonly MediaInfoHelper _mediaInfoHelper;
private readonly AudioHelper _audioHelper;
private readonly DynamicHlsHelper _dynamicHlsHelper;
///
/// Initializes a new instance of the class.
///
/// Instance of the interface.
/// Instance of the interface.
/// Instance of .
/// Instance of .
/// Instance of .
public UniversalAudioController(
ILibraryManager libraryManager,
ILogger logger,
MediaInfoHelper mediaInfoHelper,
AudioHelper audioHelper,
DynamicHlsHelper dynamicHlsHelper)
{
_libraryManager = libraryManager;
_logger = logger;
_mediaInfoHelper = mediaInfoHelper;
_audioHelper = audioHelper;
_dynamicHlsHelper = dynamicHlsHelper;
}
///
/// Gets an audio stream.
///
/// The item id.
/// Optional. The audio container.
/// The media version id, if playing an alternate version.
/// The device id of the client requesting. Used to stop encoding processes when needed.
/// Optional. The user id.
/// Optional. The audio codec to transcode to.
/// Optional. The maximum number of audio channels.
/// Optional. The number of how many audio channels to transcode to.
/// Optional. The maximum streaming bitrate.
/// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults.
/// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms.
/// Optional. The container to transcode to.
/// Optional. The transcoding protocol.
/// Optional. The maximum audio sample rate.
/// Optional. The maximum audio bit depth.
/// Optional. Whether to enable remote media.
/// Optional. Whether to break on non key frames.
/// Whether to enable redirection. Defaults to true.
/// Audio stream returned.
/// Redirected to remote audio stream.
/// A containing the audio file.
[HttpGet("Audio/{itemId}/universal")]
[HttpHead("Audio/{itemId}/universal", Name = "HeadUniversalAudioStream")]
[Authorize]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status302Found)]
[ProducesAudioFile]
public async Task GetUniversalAudioStream(
[FromRoute, Required] Guid itemId,
[FromQuery, ModelBinder(typeof(CommaDelimitedArrayModelBinder))] string[] container,
[FromQuery] string? mediaSourceId,
[FromQuery] string? deviceId,
[FromQuery] Guid? userId,
[FromQuery] string? audioCodec,
[FromQuery] int? maxAudioChannels,
[FromQuery] int? transcodingAudioChannels,
[FromQuery] int? maxStreamingBitrate,
[FromQuery] int? audioBitRate,
[FromQuery] long? startTimeTicks,
[FromQuery] string? transcodingContainer,
[FromQuery] string? transcodingProtocol,
[FromQuery] int? maxAudioSampleRate,
[FromQuery] int? maxAudioBitDepth,
[FromQuery] bool? enableRemoteMedia,
[FromQuery] bool breakOnNonKeyFrames = false,
[FromQuery] bool enableRedirection = true)
{
var deviceProfile = GetDeviceProfile(container, transcodingContainer, audioCodec, transcodingProtocol, breakOnNonKeyFrames, transcodingAudioChannels, maxAudioSampleRate, maxAudioBitDepth, maxAudioChannels);
userId = RequestHelpers.GetUserId(User, userId);
_logger.LogInformation("GetPostedPlaybackInfo profile: {@Profile}", deviceProfile);
var info = await _mediaInfoHelper.GetPlaybackInfo(
itemId,
userId,
mediaSourceId)
.ConfigureAwait(false);
// set device specific data
var item = _libraryManager.GetItemById(itemId);
foreach (var sourceInfo in info.MediaSources)
{
_mediaInfoHelper.SetDeviceSpecificData(
item,
sourceInfo,
deviceProfile,
User,
maxStreamingBitrate ?? deviceProfile.MaxStreamingBitrate,
startTimeTicks ?? 0,
mediaSourceId ?? string.Empty,
null,
null,
maxAudioChannels,
info.PlaySessionId!,
userId ?? Guid.Empty,
true,
true,
true,
true,
true,
Request.HttpContext.GetNormalizedRemoteIp());
}
_mediaInfoHelper.SortMediaSources(info, maxStreamingBitrate);
foreach (var source in info.MediaSources)
{
_mediaInfoHelper.NormalizeMediaSourceContainer(source, deviceProfile, DlnaProfileType.Video);
}
var mediaSource = info.MediaSources[0];
if (mediaSource.SupportsDirectPlay && mediaSource.Protocol == MediaProtocol.Http && enableRedirection && mediaSource.IsRemote && enableRemoteMedia.HasValue && enableRemoteMedia.Value)
{
return Redirect(mediaSource.Path);
}
var isStatic = mediaSource.SupportsDirectStream;
if (!isStatic && string.Equals(mediaSource.TranscodingSubProtocol, "hls", StringComparison.OrdinalIgnoreCase))
{
// hls segment container can only be mpegts or fmp4 per ffmpeg documentation
// ffmpeg option -> file extension
// mpegts -> ts
// fmp4 -> mp4
// TODO: remove this when we switch back to the segment muxer
var supportedHlsContainers = new[] { "ts", "mp4" };
var dynamicHlsRequestDto = new HlsAudioRequestDto
{
Id = itemId,
Container = ".m3u8",
Static = isStatic,
PlaySessionId = info.PlaySessionId,
// fallback to mpegts if device reports some weird value unsupported by hls
SegmentContainer = Array.Exists(supportedHlsContainers, element => element == transcodingContainer) ? transcodingContainer : "ts",
MediaSourceId = mediaSourceId,
DeviceId = deviceId,
AudioCodec = audioCodec,
EnableAutoStreamCopy = true,
AllowAudioStreamCopy = true,
AllowVideoStreamCopy = true,
BreakOnNonKeyFrames = breakOnNonKeyFrames,
AudioSampleRate = maxAudioSampleRate,
MaxAudioChannels = maxAudioChannels,
MaxAudioBitDepth = maxAudioBitDepth,
AudioBitRate = audioBitRate ?? maxStreamingBitrate,
StartTimeTicks = startTimeTicks,
SubtitleMethod = SubtitleDeliveryMethod.Hls,
RequireAvc = false,
DeInterlace = false,
RequireNonAnamorphic = false,
EnableMpegtsM2TsMode = false,
TranscodeReasons = mediaSource.TranscodeReasons == 0 ? null : mediaSource.TranscodeReasons.ToString(),
Context = EncodingContext.Static,
StreamOptions = new Dictionary(),
EnableAdaptiveBitrateStreaming = true
};
return await _dynamicHlsHelper.GetMasterHlsPlaylist(TranscodingJobType.Hls, dynamicHlsRequestDto, true)
.ConfigureAwait(false);
}
var audioStreamingDto = new StreamingRequestDto
{
Id = itemId,
Container = isStatic ? null : ("." + mediaSource.TranscodingContainer),
Static = isStatic,
PlaySessionId = info.PlaySessionId,
MediaSourceId = mediaSourceId,
DeviceId = deviceId,
AudioCodec = audioCodec,
EnableAutoStreamCopy = true,
AllowAudioStreamCopy = true,
AllowVideoStreamCopy = true,
BreakOnNonKeyFrames = breakOnNonKeyFrames,
AudioSampleRate = maxAudioSampleRate,
MaxAudioChannels = maxAudioChannels,
AudioBitRate = isStatic ? null : (audioBitRate ?? maxStreamingBitrate),
MaxAudioBitDepth = maxAudioBitDepth,
AudioChannels = maxAudioChannels,
CopyTimestamps = true,
StartTimeTicks = startTimeTicks,
SubtitleMethod = SubtitleDeliveryMethod.Embed,
TranscodeReasons = mediaSource.TranscodeReasons == 0 ? null : mediaSource.TranscodeReasons.ToString(),
Context = EncodingContext.Static
};
return await _audioHelper.GetAudioStream(TranscodingJobType.Progressive, audioStreamingDto).ConfigureAwait(false);
}
private DeviceProfile GetDeviceProfile(
string[] containers,
string? transcodingContainer,
string? audioCodec,
string? transcodingProtocol,
bool? breakOnNonKeyFrames,
int? transcodingAudioChannels,
int? maxAudioSampleRate,
int? maxAudioBitDepth,
int? maxAudioChannels)
{
var deviceProfile = new DeviceProfile();
int len = containers.Length;
var directPlayProfiles = new DirectPlayProfile[len];
for (int i = 0; i < len; i++)
{
var parts = containers[i].Split('|', StringSplitOptions.RemoveEmptyEntries);
var audioCodecs = parts.Length == 1 ? null : string.Join(',', parts.Skip(1));
directPlayProfiles[i] = new DirectPlayProfile
{
Type = DlnaProfileType.Audio,
Container = parts[0],
AudioCodec = audioCodecs
};
}
deviceProfile.DirectPlayProfiles = directPlayProfiles;
deviceProfile.TranscodingProfiles = new[]
{
new TranscodingProfile
{
Type = DlnaProfileType.Audio,
Context = EncodingContext.Streaming,
Container = transcodingContainer ?? "mp3",
AudioCodec = audioCodec ?? "mp3",
Protocol = transcodingProtocol ?? "http",
BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false,
MaxAudioChannels = transcodingAudioChannels?.ToString(CultureInfo.InvariantCulture)
}
};
var codecProfiles = new List();
var conditions = new List();
if (maxAudioSampleRate.HasValue)
{
// codec profile
conditions.Add(
new ProfileCondition
{
Condition = ProfileConditionType.LessThanEqual,
IsRequired = false,
Property = ProfileConditionValue.AudioSampleRate,
Value = maxAudioSampleRate.Value.ToString(CultureInfo.InvariantCulture)
});
}
if (maxAudioBitDepth.HasValue)
{
// codec profile
conditions.Add(
new ProfileCondition
{
Condition = ProfileConditionType.LessThanEqual,
IsRequired = false,
Property = ProfileConditionValue.AudioBitDepth,
Value = maxAudioBitDepth.Value.ToString(CultureInfo.InvariantCulture)
});
}
if (maxAudioChannels.HasValue)
{
// codec profile
conditions.Add(
new ProfileCondition
{
Condition = ProfileConditionType.LessThanEqual,
IsRequired = false,
Property = ProfileConditionValue.AudioChannels,
Value = maxAudioChannels.Value.ToString(CultureInfo.InvariantCulture)
});
}
if (conditions.Count > 0)
{
// codec profile
codecProfiles.Add(
new CodecProfile
{
Type = CodecType.Audio,
Container = string.Join(',', containers),
Conditions = conditions.ToArray()
});
}
deviceProfile.CodecProfiles = codecProfiles.ToArray();
return deviceProfile;
}
}