jellyfin-server/MediaBrowser.MediaEncoding/Encoder/EncodingJobFactory.cs

871 lines
32 KiB
C#
Raw Normal View History

2015-05-15 15:46:20 +00:00
using MediaBrowser.Common.Configuration;
2015-01-02 06:12:58 +00:00
using MediaBrowser.Controller.Library;
using MediaBrowser.Controller.MediaEncoding;
2015-05-15 15:46:20 +00:00
using MediaBrowser.Model.Configuration;
2015-01-02 06:12:58 +00:00
using MediaBrowser.Model.Dlna;
using MediaBrowser.Model.Dto;
2015-01-02 06:12:58 +00:00
using MediaBrowser.Model.Entities;
using MediaBrowser.Model.Logging;
using MediaBrowser.Model.MediaInfo;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace MediaBrowser.MediaEncoding.Encoder
{
public class EncodingJobFactory
{
private readonly ILogger _logger;
private readonly ILibraryManager _libraryManager;
2015-03-08 04:44:31 +00:00
private readonly IMediaSourceManager _mediaSourceManager;
2015-05-15 15:46:20 +00:00
private readonly IConfigurationManager _config;
2015-01-02 06:12:58 +00:00
protected static readonly CultureInfo UsCulture = new CultureInfo("en-US");
2015-05-15 15:46:20 +00:00
public EncodingJobFactory(ILogger logger, ILibraryManager libraryManager, IMediaSourceManager mediaSourceManager, IConfigurationManager config)
2015-01-02 06:12:58 +00:00
{
_logger = logger;
_libraryManager = libraryManager;
2015-03-08 04:44:31 +00:00
_mediaSourceManager = mediaSourceManager;
2015-05-15 15:46:20 +00:00
_config = config;
2015-01-02 06:12:58 +00:00
}
public async Task<EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress<double> progress, CancellationToken cancellationToken)
{
var request = options;
if (string.IsNullOrEmpty(request.AudioCodec))
{
request.AudioCodec = InferAudioCodec(request.OutputContainer);
}
var state = new EncodingJob(_logger, _mediaSourceManager)
2015-01-02 06:12:58 +00:00
{
Options = options,
IsVideoRequest = isVideoRequest,
Progress = progress
};
if (!string.IsNullOrWhiteSpace(request.AudioCodec))
{
state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault();
}
var item = _libraryManager.GetItemById(request.ItemId);
state.ItemType = item.GetType().Name;
state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
2015-03-07 22:43:53 +00:00
var primaryImage = item.GetImageInfo(ImageType.Primary, 0) ??
item.Parents.Select(i => i.GetImageInfo(ImageType.Primary, 0)).FirstOrDefault(i => i != null);
if (primaryImage != null)
{
state.AlbumCoverPath = primaryImage.Path;
}
2015-04-09 05:20:23 +00:00
var mediaSources = await _mediaSourceManager.GetPlayackMediaSources(request.ItemId, null, false, new[] { MediaType.Audio, MediaType.Video }, cancellationToken).ConfigureAwait(false);
2015-01-02 06:12:58 +00:00
var mediaSource = string.IsNullOrEmpty(request.MediaSourceId)
? mediaSources.First()
: mediaSources.First(i => string.Equals(i.Id, request.MediaSourceId));
2015-01-02 06:12:58 +00:00
var videoRequest = state.Options;
2015-01-02 06:12:58 +00:00
AttachMediaSourceInfo(state, mediaSource, videoRequest);
//var container = Path.GetExtension(state.RequestedUrl);
//if (string.IsNullOrEmpty(container))
//{
// container = request.Static ?
// state.InputContainer :
// (Path.GetExtension(GetOutputFilePath(state)) ?? string.Empty).TrimStart('.');
//}
//state.OutputContainer = (container ?? string.Empty).TrimStart('.');
state.OutputAudioBitrate = GetAudioBitrateParam(state.Options, state.AudioStream);
2015-01-02 06:12:58 +00:00
state.OutputAudioSampleRate = request.AudioSampleRate;
state.OutputAudioCodec = state.Options.AudioCodec;
2015-01-02 06:12:58 +00:00
state.OutputAudioChannels = GetNumAudioChannelsParam(state.Options, state.AudioStream, state.OutputAudioCodec);
2015-01-02 06:12:58 +00:00
if (videoRequest != null)
2015-01-02 06:12:58 +00:00
{
state.OutputVideoCodec = state.Options.VideoCodec;
state.OutputVideoBitrate = GetVideoBitrateParamValue(state.Options, state.VideoStream, state.OutputVideoCodec);
2015-01-02 06:12:58 +00:00
if (state.OutputVideoBitrate.HasValue)
{
var resolution = ResolutionNormalizer.Normalize(
state.VideoStream == null ? (int?)null : state.VideoStream.BitRate,
state.OutputVideoBitrate.Value,
state.VideoStream == null ? null : state.VideoStream.Codec,
2015-01-02 06:12:58 +00:00
state.OutputVideoCodec,
videoRequest.MaxWidth,
videoRequest.MaxHeight);
2015-01-02 06:12:58 +00:00
videoRequest.MaxWidth = resolution.MaxWidth;
videoRequest.MaxHeight = resolution.MaxHeight;
2015-01-02 06:12:58 +00:00
}
}
ApplyDeviceProfileSettings(state);
if (videoRequest != null)
{
TryStreamCopy(state, videoRequest);
}
//state.OutputFilePath = GetOutputFilePath(state);
return state;
}
internal static void TryStreamCopy(EncodingJob state,
EncodingJobOptions videoRequest)
{
if (state.IsVideoRequest)
2015-01-02 06:12:58 +00:00
{
if (state.VideoStream != null && CanStreamCopyVideo(videoRequest, state.VideoStream))
2015-01-02 06:12:58 +00:00
{
state.OutputVideoCodec = "copy";
}
if (state.AudioStream != null && CanStreamCopyAudio(videoRequest, state.AudioStream, state.SupportedAudioCodecs))
2015-01-02 06:12:58 +00:00
{
state.OutputAudioCodec = "copy";
}
}
}
internal static void AttachMediaSourceInfo(EncodingJob state,
MediaSourceInfo mediaSource,
2015-01-02 06:12:58 +00:00
EncodingJobOptions videoRequest)
{
state.MediaPath = mediaSource.Path;
state.InputProtocol = mediaSource.Protocol;
state.InputContainer = mediaSource.Container;
state.InputFileSize = mediaSource.Size;
state.InputBitrate = mediaSource.Bitrate;
state.RunTimeTicks = mediaSource.RunTimeTicks;
state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
if (mediaSource.VideoType.HasValue)
{
state.VideoType = mediaSource.VideoType.Value;
}
state.IsoType = mediaSource.IsoType;
state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList();
if (mediaSource.Timestamp.HasValue)
{
state.InputTimestamp = mediaSource.Timestamp.Value;
}
state.InputProtocol = mediaSource.Protocol;
state.MediaPath = mediaSource.Path;
state.RunTimeTicks = mediaSource.RunTimeTicks;
state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
state.InputBitrate = mediaSource.Bitrate;
state.InputFileSize = mediaSource.Size;
state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate;
if (state.ReadInputAtNativeFramerate ||
mediaSource.Protocol == MediaProtocol.File && string.Equals(mediaSource.Container, "wtv", StringComparison.OrdinalIgnoreCase))
{
state.OutputAudioSync = "1000";
state.InputVideoSync = "-1";
state.InputAudioSync = "1";
}
if (string.Equals(mediaSource.Container, "wma", StringComparison.OrdinalIgnoreCase))
{
// Seeing some stuttering when transcoding wma to audio-only HLS
state.InputAudioSync = "1";
}
var mediaStreams = mediaSource.MediaStreams;
2015-01-02 06:12:58 +00:00
if (videoRequest != null)
{
if (string.IsNullOrEmpty(videoRequest.VideoCodec))
{
videoRequest.VideoCodec = InferVideoCodec(videoRequest.OutputContainer);
}
state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video);
state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false);
state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio);
if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal)
{
state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream);
}
if (state.VideoStream != null && state.VideoStream.IsInterlaced)
{
state.DeInterlace = true;
}
EnforceResolutionLimit(state, videoRequest);
}
else
{
state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true);
}
state.MediaSource = mediaSource;
2015-01-02 06:12:58 +00:00
}
2015-05-15 15:46:20 +00:00
protected EncodingOptions GetEncodingOptions()
{
return _config.GetConfiguration<EncodingOptions>("encoding");
}
2015-01-02 06:12:58 +00:00
/// <summary>
/// Infers the video codec.
/// </summary>
/// <param name="container">The container.</param>
/// <returns>System.Nullable{VideoCodecs}.</returns>
private static string InferVideoCodec(string container)
{
var ext = "." + (container ?? string.Empty);
if (string.Equals(ext, ".asf", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "wmv";
}
if (string.Equals(ext, ".webm", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "vpx";
}
if (string.Equals(ext, ".ogg", StringComparison.OrdinalIgnoreCase) || string.Equals(ext, ".ogv", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "theora";
}
if (string.Equals(ext, ".m3u8", StringComparison.OrdinalIgnoreCase) || string.Equals(ext, ".ts", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "h264";
}
return "copy";
}
private string InferAudioCodec(string container)
{
var ext = "." + (container ?? string.Empty);
if (string.Equals(ext, ".mp3", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "mp3";
}
if (string.Equals(ext, ".aac", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "aac";
}
if (string.Equals(ext, ".wma", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "wma";
}
if (string.Equals(ext, ".ogg", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "vorbis";
}
if (string.Equals(ext, ".oga", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "vorbis";
}
if (string.Equals(ext, ".ogv", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "vorbis";
}
if (string.Equals(ext, ".webm", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "vorbis";
}
if (string.Equals(ext, ".webma", StringComparison.OrdinalIgnoreCase))
2015-01-02 06:12:58 +00:00
{
return "vorbis";
}
return "copy";
}
/// <summary>
/// Determines which stream will be used for playback
/// </summary>
/// <param name="allStream">All stream.</param>
/// <param name="desiredIndex">Index of the desired.</param>
/// <param name="type">The type.</param>
/// <param name="returnFirstIfNoIndex">if set to <c>true</c> [return first if no index].</param>
/// <returns>MediaStream.</returns>
private static MediaStream GetMediaStream(IEnumerable<MediaStream> allStream, int? desiredIndex, MediaStreamType type, bool returnFirstIfNoIndex = true)
{
var streams = allStream.Where(s => s.Type == type).OrderBy(i => i.Index).ToList();
if (desiredIndex.HasValue)
{
var stream = streams.FirstOrDefault(s => s.Index == desiredIndex.Value);
if (stream != null)
{
return stream;
}
}
if (type == MediaStreamType.Video)
{
streams = streams.Where(i => !string.Equals(i.Codec, "mjpeg", StringComparison.OrdinalIgnoreCase)).ToList();
}
if (returnFirstIfNoIndex && type == MediaStreamType.Audio)
{
return streams.FirstOrDefault(i => i.Channels.HasValue && i.Channels.Value > 0) ??
streams.FirstOrDefault();
}
// Just return the first one
return returnFirstIfNoIndex ? streams.FirstOrDefault() : null;
}
/// <summary>
/// Enforces the resolution limit.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="videoRequest">The video request.</param>
private static void EnforceResolutionLimit(EncodingJob state, EncodingJobOptions videoRequest)
{
// Switch the incoming params to be ceilings rather than fixed values
videoRequest.MaxWidth = videoRequest.MaxWidth ?? videoRequest.Width;
videoRequest.MaxHeight = videoRequest.MaxHeight ?? videoRequest.Height;
videoRequest.Width = null;
videoRequest.Height = null;
}
/// <summary>
/// Gets the number of audio channels to specify on the command line
/// </summary>
/// <param name="request">The request.</param>
/// <param name="audioStream">The audio stream.</param>
/// <param name="outputAudioCodec">The output audio codec.</param>
/// <returns>System.Nullable{System.Int32}.</returns>
private int? GetNumAudioChannelsParam(EncodingJobOptions request, MediaStream audioStream, string outputAudioCodec)
{
2015-09-20 02:06:56 +00:00
var inputChannels = audioStream == null
2016-02-10 01:36:27 +00:00
? null
: audioStream.Channels;
2015-09-20 02:06:56 +00:00
if (inputChannels <= 0)
2015-01-02 06:12:58 +00:00
{
2015-09-20 02:06:56 +00:00
inputChannels = null;
}
2015-01-02 06:12:58 +00:00
2015-09-20 02:06:56 +00:00
var codec = outputAudioCodec ?? string.Empty;
if (codec.IndexOf("wma", StringComparison.OrdinalIgnoreCase) != -1)
{
// wmav2 currently only supports two channel output
return Math.Min(2, inputChannels ?? 2);
2015-01-02 06:12:58 +00:00
}
if (request.MaxAudioChannels.HasValue)
{
2016-02-10 01:36:27 +00:00
var channelLimit = codec.IndexOf("mp3", StringComparison.OrdinalIgnoreCase) != -1
? 2
: 6;
2015-09-20 02:06:56 +00:00
if (inputChannels.HasValue)
2015-01-02 06:12:58 +00:00
{
2016-02-10 01:36:27 +00:00
channelLimit = Math.Min(channelLimit, inputChannels.Value);
2015-01-02 06:12:58 +00:00
}
// If we don't have any media info then limit it to 5 to prevent encoding errors due to asking for too many channels
2015-09-20 02:06:56 +00:00
return Math.Min(request.MaxAudioChannels.Value, channelLimit);
2015-01-02 06:12:58 +00:00
}
return request.AudioChannels;
}
private int? GetVideoBitrateParamValue(EncodingJobOptions request, MediaStream videoStream, string outputVideoCodec)
2015-01-02 06:12:58 +00:00
{
var bitrate = request.VideoBitRate;
if (videoStream != null)
{
var isUpscaling = request.Height.HasValue && videoStream.Height.HasValue &&
request.Height.Value > videoStream.Height.Value;
if (request.Width.HasValue && videoStream.Width.HasValue &&
request.Width.Value > videoStream.Width.Value)
{
isUpscaling = true;
}
// Don't allow bitrate increases unless upscaling
if (!isUpscaling)
{
if (bitrate.HasValue && videoStream.BitRate.HasValue)
{
bitrate = Math.Min(bitrate.Value, videoStream.BitRate.Value);
}
}
}
if (bitrate.HasValue)
{
var inputVideoCodec = videoStream == null ? null : videoStream.Codec;
bitrate = ResolutionNormalizer.ScaleBitrate(bitrate.Value, inputVideoCodec, outputVideoCodec);
2016-06-09 16:13:25 +00:00
// If a max bitrate was requested, don't let the scaled bitrate exceed it
if (request.VideoBitRate.HasValue)
{
bitrate = Math.Min(bitrate.Value, request.VideoBitRate.Value);
}
}
2015-01-02 06:12:58 +00:00
return bitrate;
}
protected string GetVideoBitrateParam(EncodingJob state, string videoCodec, bool isHls)
{
var bitrate = state.OutputVideoBitrate;
if (bitrate.HasValue)
{
if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
{
// With vpx when crf is used, b:v becomes a max rate
// https://trac.ffmpeg.org/wiki/vpxEncodingGuide. But higher bitrate source files -b:v causes judder so limite the bitrate but dont allow it to "saturate" the bitrate. So dont contrain it down just up.
return string.Format(" -maxrate:v {0} -bufsize:v ({0}*2) -b:v {0}", bitrate.Value.ToString(UsCulture));
}
if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
{
return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
}
// h264
2016-02-10 01:36:27 +00:00
return string.Format(" -b:v {0} -maxrate {0} -bufsize {1}",
bitrate.Value.ToString(UsCulture),
(bitrate.Value * 2).ToString(UsCulture));
2015-01-02 06:12:58 +00:00
}
return string.Empty;
}
private int? GetAudioBitrateParam(EncodingJobOptions request, MediaStream audioStream)
{
if (request.AudioBitRate.HasValue)
{
// Make sure we don't request a bitrate higher than the source
var currentBitrate = audioStream == null ? request.AudioBitRate.Value : audioStream.BitRate ?? request.AudioBitRate.Value;
return request.AudioBitRate.Value;
//return Math.Min(currentBitrate, request.AudioBitRate.Value);
}
return null;
}
/// <summary>
/// Determines whether the specified stream is H264.
/// </summary>
/// <param name="stream">The stream.</param>
/// <returns><c>true</c> if the specified stream is H264; otherwise, <c>false</c>.</returns>
protected bool IsH264(MediaStream stream)
{
var codec = stream.Codec ?? string.Empty;
return codec.IndexOf("264", StringComparison.OrdinalIgnoreCase) != -1 ||
codec.IndexOf("avc", StringComparison.OrdinalIgnoreCase) != -1;
}
/// <summary>
/// Gets the name of the output audio codec
/// </summary>
/// <param name="state">The state.</param>
2015-01-02 06:12:58 +00:00
/// <returns>System.String.</returns>
internal static string GetAudioEncoder(EncodingJob state)
2015-01-02 06:12:58 +00:00
{
var codec = state.OutputAudioCodec;
2015-01-02 06:12:58 +00:00
if (string.Equals(codec, "aac", StringComparison.OrdinalIgnoreCase))
{
return "aac -strict experimental";
}
if (string.Equals(codec, "mp3", StringComparison.OrdinalIgnoreCase))
{
return "libmp3lame";
}
if (string.Equals(codec, "vorbis", StringComparison.OrdinalIgnoreCase))
{
return "libvorbis";
}
if (string.Equals(codec, "wma", StringComparison.OrdinalIgnoreCase))
{
return "wmav2";
}
return codec.ToLower();
2015-01-02 06:12:58 +00:00
}
/// <summary>
/// Gets the name of the output video codec
/// </summary>
/// <returns>System.String.</returns>
2016-08-05 05:12:25 +00:00
internal static string GetVideoEncoder(IMediaEncoder mediaEncoder, EncodingJob state, EncodingOptions options)
2015-01-02 06:12:58 +00:00
{
var codec = state.OutputVideoCodec;
2015-01-02 06:12:58 +00:00
if (!string.IsNullOrEmpty(codec))
2015-01-02 06:12:58 +00:00
{
if (string.Equals(codec, "h264", StringComparison.OrdinalIgnoreCase))
{
2016-08-05 05:12:25 +00:00
return GetH264Encoder(mediaEncoder, state, options);
}
if (string.Equals(codec, "vpx", StringComparison.OrdinalIgnoreCase))
{
return "libvpx";
}
if (string.Equals(codec, "wmv", StringComparison.OrdinalIgnoreCase))
{
return "wmv2";
}
if (string.Equals(codec, "theora", StringComparison.OrdinalIgnoreCase))
{
return "libtheora";
}
return codec.ToLower();
2015-01-05 00:49:22 +00:00
}
return "copy";
}
2016-08-05 05:12:25 +00:00
private static string GetAvailableEncoder(IMediaEncoder mediaEncoder, string preferredEncoder, string defaultEncoder)
{
2016-08-05 05:12:25 +00:00
if (mediaEncoder.SupportsEncoder(preferredEncoder))
2015-01-05 00:49:22 +00:00
{
2016-08-05 05:12:25 +00:00
return preferredEncoder;
2016-06-30 18:59:18 +00:00
}
2016-08-05 05:12:25 +00:00
return defaultEncoder;
}
2016-06-30 18:59:18 +00:00
2016-08-05 05:12:25 +00:00
internal static string GetH264Encoder(IMediaEncoder mediaEncoder, EncodingJob state, EncodingOptions options)
{
var defaultEncoder = "libx264";
// Only use alternative encoders for video files.
// When using concat with folder rips, if the mfx session fails to initialize, ffmpeg will be stuck retrying and will not exit gracefully
// Since transcoding of folder rips is expiremental anyway, it's not worth adding additional variables such as this.
if (state.VideoType == VideoType.VideoFile)
2016-06-30 18:59:18 +00:00
{
2016-08-05 05:12:25 +00:00
var hwType = options.HardwareAccelerationType;
if (string.Equals(hwType, "qsv", StringComparison.OrdinalIgnoreCase) ||
string.Equals(hwType, "h264_qsv", StringComparison.OrdinalIgnoreCase))
{
return GetAvailableEncoder(mediaEncoder, "h264_qsv", defaultEncoder);
}
if (string.Equals(hwType, "nvenc", StringComparison.OrdinalIgnoreCase))
{
return GetAvailableEncoder(mediaEncoder, "h264_nvenc", defaultEncoder);
}
if (string.Equals(hwType, "h264_omx", StringComparison.OrdinalIgnoreCase))
{
return GetAvailableEncoder(mediaEncoder, "h264_omx", defaultEncoder);
}
2016-08-24 06:13:15 +00:00
if (string.Equals(hwType, "vaapi", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrWhiteSpace(options.VaapiDevice))
2016-08-23 16:31:16 +00:00
{
if (IsVaapiSupported(state))
{
return GetAvailableEncoder(mediaEncoder, "h264_vaapi", defaultEncoder);
}
2016-08-23 16:31:16 +00:00
}
2015-01-02 06:12:58 +00:00
}
2016-08-05 05:12:25 +00:00
return defaultEncoder;
2015-01-02 06:12:58 +00:00
}
private static bool IsVaapiSupported(EncodingJob state)
{
var videoStream = state.VideoStream;
if (videoStream != null)
{
// vaapi will throw an error with this input
// [vaapi @ 0x7faed8000960] No VAAPI support for codec mpeg4 profile -99.
if (string.Equals(videoStream.Codec, "mpeg4", StringComparison.OrdinalIgnoreCase))
{
if (videoStream.Level == -99 || videoStream.Level == 15)
{
return false;
}
}
}
return true;
}
2015-01-02 06:12:58 +00:00
internal static bool CanStreamCopyVideo(EncodingJobOptions request, MediaStream videoStream)
{
if (videoStream.IsInterlaced)
{
return false;
}
2015-09-20 02:06:56 +00:00
if (videoStream.IsAnamorphic ?? false)
{
return false;
}
2015-01-02 06:12:58 +00:00
// Can't stream copy if we're burning in subtitles
if (request.SubtitleStreamIndex.HasValue)
{
if (request.SubtitleMethod == SubtitleDeliveryMethod.Encode)
{
return false;
}
}
// Source and target codecs must match
if (!string.Equals(request.VideoCodec, videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
return false;
}
2016-04-27 19:23:05 +00:00
if (string.Equals("h264", videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
if (videoStream.IsAVC.HasValue && !videoStream.IsAVC.Value)
{
return false;
}
}
2015-01-02 06:12:58 +00:00
// If client is requesting a specific video profile, it must match the source
if (!string.IsNullOrEmpty(request.Profile))
{
if (string.IsNullOrEmpty(videoStream.Profile))
{
return false;
}
if (!string.Equals(request.Profile, videoStream.Profile, StringComparison.OrdinalIgnoreCase))
{
var currentScore = GetVideoProfileScore(videoStream.Profile);
var requestedScore = GetVideoProfileScore(request.Profile);
if (currentScore == -1 || currentScore > requestedScore)
{
return false;
}
}
}
// Video width must fall within requested value
if (request.MaxWidth.HasValue)
{
if (!videoStream.Width.HasValue || videoStream.Width.Value > request.MaxWidth.Value)
{
return false;
}
}
// Video height must fall within requested value
if (request.MaxHeight.HasValue)
{
if (!videoStream.Height.HasValue || videoStream.Height.Value > request.MaxHeight.Value)
{
return false;
}
}
// Video framerate must fall within requested value
var requestedFramerate = request.MaxFramerate ?? request.Framerate;
if (requestedFramerate.HasValue)
{
var videoFrameRate = videoStream.AverageFrameRate ?? videoStream.RealFrameRate;
if (!videoFrameRate.HasValue || videoFrameRate.Value > requestedFramerate.Value)
{
return false;
}
}
// Video bitrate must fall within requested value
if (request.VideoBitRate.HasValue)
{
if (!videoStream.BitRate.HasValue || videoStream.BitRate.Value > request.VideoBitRate.Value)
{
return false;
}
}
if (request.MaxVideoBitDepth.HasValue)
{
if (videoStream.BitDepth.HasValue && videoStream.BitDepth.Value > request.MaxVideoBitDepth.Value)
{
return false;
}
}
if (request.MaxRefFrames.HasValue)
{
if (videoStream.RefFrames.HasValue && videoStream.RefFrames.Value > request.MaxRefFrames.Value)
{
return false;
}
}
// If a specific level was requested, the source must match or be less than
if (request.Level.HasValue)
{
if (!videoStream.Level.HasValue)
{
return false;
}
if (videoStream.Level.Value > request.Level.Value)
{
return false;
}
}
return request.EnableAutoStreamCopy;
}
private static int GetVideoProfileScore(string profile)
{
var list = new List<string>
{
"Constrained Baseline",
"Baseline",
"Extended",
"Main",
"High",
"Progressive High",
"Constrained High"
};
return Array.FindIndex(list.ToArray(), t => string.Equals(t, profile, StringComparison.OrdinalIgnoreCase));
}
internal static bool CanStreamCopyAudio(EncodingJobOptions request, MediaStream audioStream, List<string> supportedAudioCodecs)
{
// Source and target codecs must match
if (string.IsNullOrEmpty(audioStream.Codec) || !supportedAudioCodecs.Contains(audioStream.Codec, StringComparer.OrdinalIgnoreCase))
{
return false;
}
// Video bitrate must fall within requested value
if (request.AudioBitRate.HasValue)
{
if (!audioStream.BitRate.HasValue || audioStream.BitRate.Value <= 0)
{
return false;
}
if (audioStream.BitRate.Value > request.AudioBitRate.Value)
{
return false;
}
}
// Channels must fall within requested value
var channels = request.AudioChannels ?? request.MaxAudioChannels;
if (channels.HasValue)
{
if (!audioStream.Channels.HasValue || audioStream.Channels.Value <= 0)
{
return false;
}
if (audioStream.Channels.Value > channels.Value)
{
return false;
}
}
// Sample rate must fall within requested value
if (request.AudioSampleRate.HasValue)
{
if (!audioStream.SampleRate.HasValue || audioStream.SampleRate.Value <= 0)
{
return false;
}
if (audioStream.SampleRate.Value > request.AudioSampleRate.Value)
{
return false;
}
}
return request.EnableAutoStreamCopy;
}
private void ApplyDeviceProfileSettings(EncodingJob state)
{
var profile = state.Options.DeviceProfile;
if (profile == null)
{
// Don't use settings from the default profile.
// Only use a specific profile if it was requested.
return;
}
var audioCodec = state.ActualOutputAudioCodec;
var videoCodec = state.ActualOutputVideoCodec;
var outputContainer = state.Options.OutputContainer;
var mediaProfile = state.IsVideoRequest ?
profile.GetAudioMediaProfile(outputContainer, audioCodec, state.OutputAudioChannels, state.OutputAudioBitrate) :
profile.GetVideoMediaProfile(outputContainer,
audioCodec,
videoCodec,
state.OutputWidth,
state.OutputHeight,
state.TargetVideoBitDepth,
state.OutputVideoBitrate,
state.TargetVideoProfile,
state.TargetVideoLevel,
state.TargetFramerate,
state.TargetPacketLength,
state.TargetTimestamp,
state.IsTargetAnamorphic,
state.TargetRefFrames,
state.TargetVideoStreamCount,
2015-10-19 16:05:03 +00:00
state.TargetAudioStreamCount,
2016-10-03 06:28:45 +00:00
state.TargetVideoCodecTag,
2016-12-14 20:58:55 +00:00
state.IsTargetAVC);
2015-01-02 06:12:58 +00:00
if (mediaProfile != null)
{
state.MimeType = mediaProfile.MimeType;
}
var transcodingProfile = state.IsVideoRequest ?
profile.GetAudioTranscodingProfile(outputContainer, audioCodec) :
profile.GetVideoTranscodingProfile(outputContainer, audioCodec, videoCodec);
if (transcodingProfile != null)
{
state.EstimateContentLength = transcodingProfile.EstimateContentLength;
state.EnableMpegtsM2TsMode = transcodingProfile.EnableMpegtsM2TsMode;
state.TranscodeSeekInfo = transcodingProfile.TranscodeSeekInfo;
state.Options.CopyTimestamps = transcodingProfile.CopyTimestamps;
2015-01-02 06:12:58 +00:00
}
}
}
}