jellyfin-server/MediaBrowser.MediaEncoding/Encoder/BaseEncoder.cs

1039 lines
37 KiB
C#
Raw Normal View History

2015-01-02 06:12:58 +00:00
using MediaBrowser.Common.Configuration;
using MediaBrowser.Common.IO;
using MediaBrowser.Controller.Configuration;
using MediaBrowser.Controller.Library;
using MediaBrowser.Controller.MediaEncoding;
using MediaBrowser.Controller.Session;
using MediaBrowser.MediaEncoding.Subtitles;
using MediaBrowser.Model.Configuration;
2015-03-20 05:40:51 +00:00
using MediaBrowser.Model.Dto;
2015-01-02 06:12:58 +00:00
using MediaBrowser.Model.Entities;
using MediaBrowser.Model.IO;
using MediaBrowser.Model.Logging;
using MediaBrowser.Model.MediaInfo;
2015-01-02 06:12:58 +00:00
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
2015-10-04 04:23:11 +00:00
using CommonIO;
2016-03-07 18:50:58 +00:00
using MediaBrowser.Model.Dlna;
2015-01-02 06:12:58 +00:00
namespace MediaBrowser.MediaEncoding.Encoder
{
public abstract class BaseEncoder
{
protected readonly MediaEncoder MediaEncoder;
protected readonly ILogger Logger;
protected readonly IServerConfigurationManager ConfigurationManager;
protected readonly IFileSystem FileSystem;
protected readonly IIsoManager IsoManager;
protected readonly ILibraryManager LibraryManager;
protected readonly ISessionManager SessionManager;
protected readonly ISubtitleEncoder SubtitleEncoder;
2015-03-08 04:44:31 +00:00
protected readonly IMediaSourceManager MediaSourceManager;
2015-01-02 06:12:58 +00:00
protected readonly CultureInfo UsCulture = new CultureInfo("en-US");
2015-05-15 15:46:20 +00:00
protected BaseEncoder(MediaEncoder mediaEncoder,
2015-01-02 06:12:58 +00:00
ILogger logger,
IServerConfigurationManager configurationManager,
IFileSystem fileSystem,
IIsoManager isoManager,
ILibraryManager libraryManager,
ISessionManager sessionManager,
ISubtitleEncoder subtitleEncoder,
IMediaSourceManager mediaSourceManager)
2015-01-02 06:12:58 +00:00
{
MediaEncoder = mediaEncoder;
Logger = logger;
ConfigurationManager = configurationManager;
FileSystem = fileSystem;
IsoManager = isoManager;
LibraryManager = libraryManager;
SessionManager = sessionManager;
SubtitleEncoder = subtitleEncoder;
2015-03-08 04:44:31 +00:00
MediaSourceManager = mediaSourceManager;
2015-01-02 06:12:58 +00:00
}
public async Task<EncodingJob> Start(EncodingJobOptions options,
IProgress<double> progress,
CancellationToken cancellationToken)
{
2015-05-15 15:46:20 +00:00
var encodingJob = await new EncodingJobFactory(Logger, LibraryManager, MediaSourceManager, ConfigurationManager)
2015-01-02 06:12:58 +00:00
.CreateJob(options, IsVideoEncoder, progress, cancellationToken).ConfigureAwait(false);
encodingJob.OutputFilePath = GetOutputFilePath(encodingJob);
2015-09-13 21:32:02 +00:00
FileSystem.CreateDirectory(Path.GetDirectoryName(encodingJob.OutputFilePath));
2015-01-02 06:12:58 +00:00
2015-04-09 17:30:18 +00:00
encodingJob.ReadInputAtNativeFramerate = options.ReadInputAtNativeFramerate;
2015-01-02 06:12:58 +00:00
await AcquireResources(encodingJob, cancellationToken).ConfigureAwait(false);
var commandLineArgs = GetCommandLineArguments(encodingJob);
if (GetEncodingOptions().EnableDebugLogging)
{
commandLineArgs = "-loglevel debug " + commandLineArgs;
}
var process = new Process
{
StartInfo = new ProcessStartInfo
{
CreateNoWindow = true,
UseShellExecute = false,
// Must consume both stdout and stderr or deadlocks may occur
RedirectStandardOutput = true,
RedirectStandardError = true,
RedirectStandardInput = true,
FileName = MediaEncoder.EncoderPath,
Arguments = commandLineArgs,
WindowStyle = ProcessWindowStyle.Hidden,
ErrorDialog = false
},
EnableRaisingEvents = true
};
var workingDirectory = GetWorkingDirectory(options);
if (!string.IsNullOrWhiteSpace(workingDirectory))
{
process.StartInfo.WorkingDirectory = workingDirectory;
}
OnTranscodeBeginning(encodingJob);
var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments;
Logger.Info(commandLineLogMessage);
var logFilePath = Path.Combine(ConfigurationManager.CommonApplicationPaths.LogDirectoryPath, "transcode-" + Guid.NewGuid() + ".txt");
2015-09-13 21:32:02 +00:00
FileSystem.CreateDirectory(Path.GetDirectoryName(logFilePath));
2015-01-02 06:12:58 +00:00
// FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory.
encodingJob.LogFileStream = FileSystem.GetFileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, true);
var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(commandLineLogMessage + Environment.NewLine + Environment.NewLine);
await encodingJob.LogFileStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationToken).ConfigureAwait(false);
process.Exited += (sender, args) => OnFfMpegProcessExited(process, encodingJob);
try
{
process.Start();
}
catch (Exception ex)
{
Logger.ErrorException("Error starting ffmpeg", ex);
OnTranscodeFailedToStart(encodingJob.OutputFilePath, encodingJob);
throw;
}
2015-01-05 06:00:13 +00:00
cancellationToken.Register(() => Cancel(process, encodingJob));
2015-01-02 06:12:58 +00:00
// MUST read both stdout and stderr asynchronously or a deadlock may occurr
process.BeginOutputReadLine();
// Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback
new JobLogger(Logger).StartStreamingLog(encodingJob, process.StandardError.BaseStream, encodingJob.LogFileStream);
// Wait for the file to exist before proceeeding
2015-09-13 21:32:02 +00:00
while (!FileSystem.FileExists(encodingJob.OutputFilePath) && !encodingJob.HasExited)
2015-01-02 06:12:58 +00:00
{
await Task.Delay(100, cancellationToken).ConfigureAwait(false);
}
return encodingJob;
}
2015-01-05 06:00:13 +00:00
private void Cancel(Process process, EncodingJob job)
{
Logger.Info("Killing ffmpeg process for {0}", job.OutputFilePath);
//process.Kill();
process.StandardInput.WriteLine("q");
job.IsCancelled = true;
}
2015-01-02 06:12:58 +00:00
/// <summary>
/// Processes the exited.
/// </summary>
/// <param name="process">The process.</param>
/// <param name="job">The job.</param>
private void OnFfMpegProcessExited(Process process, EncodingJob job)
{
job.HasExited = true;
Logger.Debug("Disposing stream resources");
job.Dispose();
2015-01-05 06:00:13 +00:00
var isSuccesful = false;
2015-01-02 06:12:58 +00:00
try
{
2015-01-05 06:00:13 +00:00
var exitCode = process.ExitCode;
Logger.Info("FFMpeg exited with code {0}", exitCode);
2015-01-02 06:12:58 +00:00
2015-01-05 06:00:13 +00:00
isSuccesful = exitCode == 0;
}
catch
{
Logger.Error("FFMpeg exited with an error.");
}
if (isSuccesful && !job.IsCancelled)
{
job.TaskCompletionSource.TrySetResult(true);
}
else if (job.IsCancelled)
{
try
{
DeleteFiles(job);
}
catch
{
}
2015-01-02 06:12:58 +00:00
try
{
2015-01-05 06:00:13 +00:00
job.TaskCompletionSource.TrySetException(new OperationCanceledException());
2015-01-02 06:12:58 +00:00
}
catch
{
}
}
2015-01-05 06:00:13 +00:00
else
2015-01-02 06:12:58 +00:00
{
try
{
2015-01-05 06:00:13 +00:00
DeleteFiles(job);
}
catch
{
}
try
{
job.TaskCompletionSource.TrySetException(new ApplicationException("Encoding failed"));
2015-01-02 06:12:58 +00:00
}
catch
{
}
}
// This causes on exited to be called twice:
//try
//{
// // Dispose the process
// process.Dispose();
//}
//catch (Exception ex)
//{
// Logger.ErrorException("Error disposing ffmpeg.", ex);
//}
}
2015-01-05 06:00:13 +00:00
protected virtual void DeleteFiles(EncodingJob job)
{
FileSystem.DeleteFile(job.OutputFilePath);
2015-01-05 06:00:13 +00:00
}
2015-01-02 06:12:58 +00:00
private void OnTranscodeBeginning(EncodingJob job)
{
job.ReportTranscodingProgress(null, null, null, null);
}
private void OnTranscodeFailedToStart(string path, EncodingJob job)
{
if (!string.IsNullOrWhiteSpace(job.Options.DeviceId))
{
SessionManager.ClearTranscodingInfo(job.Options.DeviceId);
}
}
2015-01-05 06:00:13 +00:00
protected abstract bool IsVideoEncoder { get; }
2015-01-02 06:12:58 +00:00
protected virtual string GetWorkingDirectory(EncodingJobOptions options)
{
return null;
}
protected EncodingOptions GetEncodingOptions()
{
return ConfigurationManager.GetConfiguration<EncodingOptions>("encoding");
}
protected abstract string GetCommandLineArguments(EncodingJob job);
private string GetOutputFilePath(EncodingJob state)
{
2015-01-17 04:29:53 +00:00
var folder = string.IsNullOrWhiteSpace(state.Options.OutputDirectory) ?
ConfigurationManager.ApplicationPaths.TranscodingTempPath :
state.Options.OutputDirectory;
2015-01-02 06:12:58 +00:00
var outputFileExtension = GetOutputFileExtension(state);
var filename = state.Id + (outputFileExtension ?? string.Empty).ToLower();
2015-01-17 04:29:53 +00:00
return Path.Combine(folder, filename);
2015-01-02 06:12:58 +00:00
}
protected virtual string GetOutputFileExtension(EncodingJob state)
{
if (!string.IsNullOrWhiteSpace(state.Options.OutputContainer))
{
return "." + state.Options.OutputContainer;
}
return null;
}
/// <summary>
/// Gets the number of threads.
/// </summary>
/// <returns>System.Int32.</returns>
protected int GetNumberOfThreads(EncodingJob job, bool isWebm)
{
2015-04-09 17:30:18 +00:00
return job.Options.CpuCoreLimit ?? 0;
2015-01-02 06:12:58 +00:00
}
protected string GetInputModifier(EncodingJob state, bool genPts = true)
2015-01-02 06:12:58 +00:00
{
var inputModifier = string.Empty;
var probeSize = GetProbeSizeArgument(state);
2015-01-02 06:12:58 +00:00
inputModifier += " " + probeSize;
inputModifier = inputModifier.Trim();
var userAgentParam = GetUserAgentParam(state);
2015-01-02 06:12:58 +00:00
if (!string.IsNullOrWhiteSpace(userAgentParam))
{
inputModifier += " " + userAgentParam;
}
inputModifier = inputModifier.Trim();
inputModifier += " " + GetFastSeekCommandLineParameter(state.Options);
2015-01-02 06:12:58 +00:00
inputModifier = inputModifier.Trim();
if (state.IsVideoRequest && genPts)
2015-01-02 06:12:58 +00:00
{
inputModifier += " -fflags +genpts";
}
if (!string.IsNullOrEmpty(state.InputAudioSync))
2015-01-02 06:12:58 +00:00
{
inputModifier += " -async " + state.InputAudioSync;
2015-01-02 06:12:58 +00:00
}
if (!string.IsNullOrEmpty(state.InputVideoSync))
2015-01-02 06:12:58 +00:00
{
inputModifier += " -vsync " + state.InputVideoSync;
2015-01-02 06:12:58 +00:00
}
if (state.ReadInputAtNativeFramerate)
2015-01-02 06:12:58 +00:00
{
inputModifier += " -re";
}
var videoDecoder = GetVideoDecoder(state);
2015-09-20 02:06:56 +00:00
if (!string.IsNullOrWhiteSpace(videoDecoder))
{
inputModifier += " " + videoDecoder;
}
//if (state.IsVideoRequest)
//{
// if (string.Equals(state.OutputContainer, "mkv", StringComparison.OrdinalIgnoreCase))
// {
// //inputModifier += " -noaccurate_seek";
// }
//}
2015-01-02 06:12:58 +00:00
return inputModifier;
}
2015-09-20 02:06:56 +00:00
/// <summary>
/// Gets the name of the output video codec
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected string GetVideoDecoder(EncodingJob state)
{
2015-11-27 04:34:11 +00:00
if (string.Equals(GetEncodingOptions().HardwareAccelerationType, "qsv", StringComparison.OrdinalIgnoreCase))
2015-09-20 02:06:56 +00:00
{
if (state.VideoStream != null && !string.IsNullOrWhiteSpace(state.VideoStream.Codec))
{
switch (state.MediaSource.VideoStream.Codec.ToLower())
{
case "avc":
case "h264":
if (MediaEncoder.SupportsDecoder("h264_qsv"))
{
return "-c:v h264_qsv ";
}
break;
case "mpeg2video":
if (MediaEncoder.SupportsDecoder("mpeg2_qsv"))
{
return "-c:v mpeg2_qsv ";
}
break;
case "vc1":
if (MediaEncoder.SupportsDecoder("vc1_qsv"))
{
return "-c:v vc1_qsv ";
}
break;
}
}
}
// leave blank so ffmpeg will decide
return null;
}
private string GetUserAgentParam(EncodingJob state)
2015-01-02 06:12:58 +00:00
{
string useragent = null;
state.RemoteHttpHeaders.TryGetValue("User-Agent", out useragent);
2015-01-02 06:12:58 +00:00
if (!string.IsNullOrWhiteSpace(useragent))
{
return "-user-agent \"" + useragent + "\"";
}
return string.Empty;
}
/// <summary>
/// Gets the probe size argument.
/// </summary>
/// <param name="state">The state.</param>
2015-01-02 06:12:58 +00:00
/// <returns>System.String.</returns>
private string GetProbeSizeArgument(EncodingJob state)
2015-01-02 06:12:58 +00:00
{
if (state.PlayableStreamFileNames.Count > 0)
2015-01-02 06:12:58 +00:00
{
return MediaEncoder.GetProbeSizeArgument(state.PlayableStreamFileNames.ToArray(), state.InputProtocol);
2015-01-02 06:12:58 +00:00
}
return MediaEncoder.GetProbeSizeArgument(new[] { state.MediaPath }, state.InputProtocol);
2015-01-02 06:12:58 +00:00
}
/// <summary>
/// Gets the fast seek command line parameter.
/// </summary>
/// <param name="request">The request.</param>
2015-01-02 06:12:58 +00:00
/// <returns>System.String.</returns>
/// <value>The fast seek command line parameter.</value>
protected string GetFastSeekCommandLineParameter(EncodingJobOptions request)
2015-01-02 06:12:58 +00:00
{
var time = request.StartTimeTicks ?? 0;
2015-01-02 06:12:58 +00:00
if (time > 0)
2015-01-02 06:12:58 +00:00
{
return string.Format("-ss {0}", MediaEncoder.GetTimeParameter(time));
2015-01-02 06:12:58 +00:00
}
return string.Empty;
}
/// <summary>
/// Gets the input argument.
/// </summary>
/// <param name="state">The state.</param>
2015-01-02 06:12:58 +00:00
/// <returns>System.String.</returns>
protected string GetInputArgument(EncodingJob state)
2015-01-02 06:12:58 +00:00
{
var arg = string.Format("-i {0}", GetInputPathArgument(state));
2015-01-02 06:12:58 +00:00
2016-03-07 18:50:58 +00:00
if (state.SubtitleStream != null && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode)
2015-01-02 06:12:58 +00:00
{
if (state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)
2015-01-02 06:12:58 +00:00
{
if (state.VideoStream != null && state.VideoStream.Width.HasValue)
{
// This is hacky but not sure how to get the exact subtitle resolution
double height = state.VideoStream.Width.Value;
height /= 16;
height *= 9;
arg += string.Format(" -canvas_size {0}:{1}", state.VideoStream.Width.Value.ToString(CultureInfo.InvariantCulture), Convert.ToInt32(height).ToString(CultureInfo.InvariantCulture));
}
arg += " -i \"" + state.SubtitleStream.Path + "\"";
2015-01-02 06:12:58 +00:00
}
}
return arg.Trim();
2015-01-02 06:12:58 +00:00
}
private string GetInputPathArgument(EncodingJob state)
2015-01-02 06:12:58 +00:00
{
var protocol = state.InputProtocol;
var mediaPath = state.MediaPath ?? string.Empty;
2015-01-02 06:12:58 +00:00
var inputPath = new[] { mediaPath };
2015-01-02 06:12:58 +00:00
if (state.IsInputVideo)
2015-01-02 06:12:58 +00:00
{
if (!(state.VideoType == VideoType.Iso && state.IsoMount == null))
2015-01-02 06:12:58 +00:00
{
inputPath = MediaEncoderHelpers.GetInputArgument(FileSystem, mediaPath, state.InputProtocol, state.IsoMount, state.PlayableStreamFileNames);
2015-01-02 06:12:58 +00:00
}
}
return MediaEncoder.GetInputArgument(inputPath, protocol);
}
private async Task AcquireResources(EncodingJob state, CancellationToken cancellationToken)
{
if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && IsoManager.CanMount(state.MediaPath))
{
state.IsoMount = await IsoManager.Mount(state.MediaPath, cancellationToken).ConfigureAwait(false);
}
2015-09-20 02:06:56 +00:00
if (state.MediaSource.RequiresOpening && string.IsNullOrWhiteSpace(state.LiveStreamId))
2015-01-02 06:12:58 +00:00
{
var liveStreamResponse = await MediaSourceManager.OpenLiveStream(new LiveStreamRequest
2015-01-02 06:12:58 +00:00
{
OpenToken = state.MediaSource.OpenToken
2015-01-02 06:12:58 +00:00
}, false, cancellationToken).ConfigureAwait(false);
2015-01-02 06:12:58 +00:00
AttachMediaSourceInfo(state, liveStreamResponse.MediaSource, state.Options);
2015-01-02 06:12:58 +00:00
if (state.IsVideoRequest)
2015-01-02 06:12:58 +00:00
{
EncodingJobFactory.TryStreamCopy(state, state.Options);
2015-01-02 06:12:58 +00:00
}
}
2015-01-02 06:12:58 +00:00
if (state.MediaSource.BufferMs.HasValue)
{
await Task.Delay(state.MediaSource.BufferMs.Value, cancellationToken).ConfigureAwait(false);
2015-01-02 06:12:58 +00:00
}
}
private void AttachMediaSourceInfo(EncodingJob state,
2015-03-20 05:40:51 +00:00
MediaSourceInfo mediaSource,
2015-01-02 06:12:58 +00:00
EncodingJobOptions videoRequest)
{
EncodingJobFactory.AttachMediaSourceInfo(state, mediaSource, videoRequest);
2015-01-02 06:12:58 +00:00
}
/// <summary>
/// Gets the internal graphical subtitle param.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputVideoCodec">The output video codec.</param>
/// <returns>System.String.</returns>
protected string GetGraphicalSubtitleParam(EncodingJob state, string outputVideoCodec)
{
var outputSizeParam = string.Empty;
var request = state.Options;
// Add resolution params, if specified
if (request.Width.HasValue || request.Height.HasValue || request.MaxHeight.HasValue || request.MaxWidth.HasValue)
{
outputSizeParam = GetOutputSizeParam(state, outputVideoCodec).TrimEnd('"');
outputSizeParam = "," + outputSizeParam.Substring(outputSizeParam.IndexOf("scale", StringComparison.OrdinalIgnoreCase));
}
var videoSizeParam = string.Empty;
if (state.VideoStream != null && state.VideoStream.Width.HasValue && state.VideoStream.Height.HasValue)
{
videoSizeParam = string.Format(",scale={0}:{1}", state.VideoStream.Width.Value.ToString(UsCulture), state.VideoStream.Height.Value.ToString(UsCulture));
}
var mapPrefix = state.SubtitleStream.IsExternal ?
1 :
0;
var subtitleStreamIndex = state.SubtitleStream.IsExternal
? 0
: state.SubtitleStream.Index;
return string.Format(" -filter_complex \"[{0}:{1}]format=yuva444p{4},lut=u=128:v=128:y=gammaval(.3)[sub] ; [0:{2}] [sub] overlay{3}\"",
mapPrefix.ToString(UsCulture),
subtitleStreamIndex.ToString(UsCulture),
state.VideoStream.Index.ToString(UsCulture),
outputSizeParam,
videoSizeParam);
}
/// <summary>
/// Gets the video bitrate to specify on the command line
/// </summary>
/// <param name="state">The state.</param>
/// <param name="videoCodec">The video codec.</param>
/// <returns>System.String.</returns>
2016-02-07 21:48:08 +00:00
protected string GetVideoQualityParam(EncodingJob state, string videoCodec)
2015-01-02 06:12:58 +00:00
{
var param = string.Empty;
var isVc1 = state.VideoStream != null &&
string.Equals(state.VideoStream.Codec, "vc1", StringComparison.OrdinalIgnoreCase);
if (string.Equals(videoCodec, "libx264", StringComparison.OrdinalIgnoreCase))
{
2015-01-11 03:06:16 +00:00
param = "-preset superfast";
param += " -crf 23";
2015-01-11 03:06:16 +00:00
}
else if (string.Equals(videoCodec, "libx265", StringComparison.OrdinalIgnoreCase))
{
param = "-preset fast";
2015-01-02 06:12:58 +00:00
2015-07-31 01:52:11 +00:00
param += " -crf 28";
2015-01-02 06:12:58 +00:00
}
// h264 (h264_qsv)
else if (string.Equals(videoCodec, "h264_qsv", StringComparison.OrdinalIgnoreCase))
{
param = "-preset 7 -look_ahead 0";
}
// h264 (libnvenc)
else if (string.Equals(videoCodec, "libnvenc", StringComparison.OrdinalIgnoreCase))
{
param = "-preset high-performance";
}
2015-01-02 06:12:58 +00:00
// webm
else if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
{
// Values 0-3, 0 being highest quality but slower
var profileScore = 0;
string crf;
var qmin = "0";
var qmax = "50";
2015-07-31 01:52:11 +00:00
crf = "10";
2015-01-02 06:12:58 +00:00
if (isVc1)
{
profileScore++;
}
// Max of 2
profileScore = Math.Min(profileScore, 2);
// http://www.webmproject.org/docs/encoder-parameters/
param = string.Format("-speed 16 -quality good -profile:v {0} -slices 8 -crf {1} -qmin {2} -qmax {3}",
profileScore.ToString(UsCulture),
crf,
qmin,
qmax);
}
else if (string.Equals(videoCodec, "mpeg4", StringComparison.OrdinalIgnoreCase))
{
param = "-mbd rd -flags +mv4+aic -trellis 2 -cmp 2 -subcmp 2 -bf 2";
}
// asf/wmv
else if (string.Equals(videoCodec, "wmv2", StringComparison.OrdinalIgnoreCase))
{
param = "-qmin 2";
}
else if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
{
param = "-mbd 2";
}
2016-02-07 21:48:08 +00:00
param += GetVideoBitrateParam(state, videoCodec);
2015-01-02 06:12:58 +00:00
var framerate = GetFramerateParam(state);
if (framerate.HasValue)
{
param += string.Format(" -r {0}", framerate.Value.ToString(UsCulture));
}
if (!string.IsNullOrEmpty(state.OutputVideoSync))
{
param += " -vsync " + state.OutputVideoSync;
}
if (!string.IsNullOrEmpty(state.Options.Profile))
{
param += " -profile:v " + state.Options.Profile;
}
var levelString = state.Options.Level.HasValue ? state.Options.Level.Value.ToString(CultureInfo.InvariantCulture) : null;
if (!string.IsNullOrEmpty(levelString))
2015-01-02 06:12:58 +00:00
{
var h264Encoder = EncodingJobFactory.GetH264Encoder(state, GetEncodingOptions());
// h264_qsv and libnvenc expect levels to be expressed as a decimal. libx264 supports decimal and non-decimal format
if (String.Equals(h264Encoder, "h264_qsv", StringComparison.OrdinalIgnoreCase) || String.Equals(h264Encoder, "libnvenc", StringComparison.OrdinalIgnoreCase))
{
switch (levelString)
{
case "30":
param += " -level 3";
break;
case "31":
param += " -level 3.1";
break;
case "32":
param += " -level 3.2";
break;
case "40":
param += " -level 4";
break;
case "41":
param += " -level 4.1";
break;
case "42":
param += " -level 4.2";
break;
case "50":
param += " -level 5";
break;
case "51":
param += " -level 5.1";
break;
case "52":
param += " -level 5.2";
break;
default:
param += " -level " + levelString;
break;
}
}
else
{
param += " -level " + levelString;
}
2015-01-02 06:12:58 +00:00
}
2015-02-06 05:39:07 +00:00
return "-pix_fmt yuv420p " + param;
2015-01-02 06:12:58 +00:00
}
2016-02-07 21:48:08 +00:00
protected string GetVideoBitrateParam(EncodingJob state, string videoCodec)
2015-01-02 06:12:58 +00:00
{
var bitrate = state.OutputVideoBitrate;
if (bitrate.HasValue)
{
if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
{
// With vpx when crf is used, b:v becomes a max rate
// https://trac.ffmpeg.org/wiki/vpxEncodingGuide. But higher bitrate source files -b:v causes judder so limite the bitrate but dont allow it to "saturate" the bitrate. So dont contrain it down just up.
return string.Format(" -maxrate:v {0} -bufsize:v ({0}*2) -b:v {0}", bitrate.Value.ToString(UsCulture));
}
if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
{
return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
}
// h264
2016-02-07 21:48:08 +00:00
return string.Format(" -b:v {0} -maxrate {0} -bufsize {1}",
bitrate.Value.ToString(UsCulture),
(bitrate.Value * 2).ToString(UsCulture));
2015-01-02 06:12:58 +00:00
}
return string.Empty;
}
protected double? GetFramerateParam(EncodingJob state)
{
if (state.Options != null)
2015-01-02 06:12:58 +00:00
{
if (state.Options.Framerate.HasValue)
{
return state.Options.Framerate.Value;
}
2015-01-02 06:12:58 +00:00
var maxrate = state.Options.MaxFramerate;
2015-01-02 06:12:58 +00:00
if (maxrate.HasValue && state.VideoStream != null)
2015-01-02 06:12:58 +00:00
{
var contentRate = state.VideoStream.AverageFrameRate ?? state.VideoStream.RealFrameRate;
if (contentRate.HasValue && contentRate.Value > maxrate.Value)
{
return maxrate;
}
2015-01-02 06:12:58 +00:00
}
}
return null;
}
/// <summary>
/// Gets the map args.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected virtual string GetMapArgs(EncodingJob state)
{
// If we don't have known media info
// If input is video, use -sn to drop subtitles
// Otherwise just return empty
if (state.VideoStream == null && state.AudioStream == null)
{
return state.IsInputVideo ? "-sn" : string.Empty;
}
// We have media info, but we don't know the stream indexes
if (state.VideoStream != null && state.VideoStream.Index == -1)
{
return "-sn";
}
// We have media info, but we don't know the stream indexes
if (state.AudioStream != null && state.AudioStream.Index == -1)
{
return state.IsInputVideo ? "-sn" : string.Empty;
}
var args = string.Empty;
if (state.VideoStream != null)
{
args += string.Format("-map 0:{0}", state.VideoStream.Index);
}
else
{
args += "-map -0:v";
}
if (state.AudioStream != null)
{
args += string.Format(" -map 0:{0}", state.AudioStream.Index);
}
else
{
args += " -map -0:a";
}
2016-03-07 18:50:58 +00:00
if (state.SubtitleStream == null || state.Options.SubtitleMethod == SubtitleDeliveryMethod.Hls)
2015-01-02 06:12:58 +00:00
{
args += " -map -0:s";
}
else if (state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)
{
args += " -map 1:0 -sn";
}
return args;
}
/// <summary>
/// Determines whether the specified stream is H264.
/// </summary>
/// <param name="stream">The stream.</param>
/// <returns><c>true</c> if the specified stream is H264; otherwise, <c>false</c>.</returns>
protected bool IsH264(MediaStream stream)
{
var codec = stream.Codec ?? string.Empty;
return codec.IndexOf("264", StringComparison.OrdinalIgnoreCase) != -1 ||
codec.IndexOf("avc", StringComparison.OrdinalIgnoreCase) != -1;
}
/// <summary>
/// If we're going to put a fixed size on the command line, this will calculate it
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputVideoCodec">The output video codec.</param>
/// <param name="allowTimeStampCopy">if set to <c>true</c> [allow time stamp copy].</param>
/// <returns>System.String.</returns>
protected string GetOutputSizeParam(EncodingJob state,
string outputVideoCodec,
bool allowTimeStampCopy = true)
{
// http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/
var request = state.Options;
var filters = new List<string>();
if (state.DeInterlace)
{
filters.Add("yadif=0:-1:0");
}
// If fixed dimensions were supplied
if (request.Width.HasValue && request.Height.HasValue)
{
var widthParam = request.Width.Value.ToString(UsCulture);
var heightParam = request.Height.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", widthParam, heightParam));
}
// If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size
else if (request.MaxWidth.HasValue && request.MaxHeight.HasValue)
{
var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);
var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);
2015-05-01 03:00:46 +00:00
filters.Add(string.Format("scale=trunc(min(max(iw\\,ih*dar)\\,min({0}\\,{1}*dar))/2)*2:trunc(min(max(iw/dar\\,ih)\\,min({0}/dar\\,{1}))/2)*2", maxWidthParam, maxHeightParam));
2015-01-02 06:12:58 +00:00
}
// If a fixed width was requested
else if (request.Width.HasValue)
{
var widthParam = request.Width.Value.ToString(UsCulture);
filters.Add(string.Format("scale={0}:trunc(ow/a/2)*2", widthParam));
}
// If a fixed height was requested
else if (request.Height.HasValue)
{
var heightParam = request.Height.Value.ToString(UsCulture);
2015-07-08 16:10:34 +00:00
filters.Add(string.Format("scale=trunc(oh*a/2)*2:{0}", heightParam));
2015-01-02 06:12:58 +00:00
}
// If a max width was requested
2015-05-01 03:00:46 +00:00
else if (request.MaxWidth.HasValue)
2015-01-02 06:12:58 +00:00
{
var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc(min(max(iw\\,ih*dar)\\,{0})/2)*2:trunc(ow/dar/2)*2", maxWidthParam));
2015-01-02 06:12:58 +00:00
}
// If a max height was requested
2015-05-01 03:00:46 +00:00
else if (request.MaxHeight.HasValue)
2015-01-02 06:12:58 +00:00
{
var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);
2015-07-08 16:10:34 +00:00
filters.Add(string.Format("scale=trunc(oh*a/2)*2:min(ih\\,{0})", maxHeightParam));
2015-01-02 06:12:58 +00:00
}
if (string.Equals(outputVideoCodec, "h264_qsv", StringComparison.OrdinalIgnoreCase))
{
if (filters.Count > 1)
{
//filters[filters.Count - 1] += ":flags=fast_bilinear";
}
}
2015-01-02 06:12:58 +00:00
var output = string.Empty;
2016-03-07 18:50:58 +00:00
if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode)
2015-01-02 06:12:58 +00:00
{
var subParam = GetTextSubtitleParam(state);
filters.Add(subParam);
if (allowTimeStampCopy)
{
output += " -copyts";
}
}
if (filters.Count > 0)
{
output += string.Format(" -vf \"{0}\"", string.Join(",", filters.ToArray()));
}
return output;
}
/// <summary>
/// Gets the text subtitle param.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected string GetTextSubtitleParam(EncodingJob state)
{
var seconds = Math.Round(TimeSpan.FromTicks(state.Options.StartTimeTicks ?? 0).TotalSeconds);
if (state.SubtitleStream.IsExternal)
{
var subtitlePath = state.SubtitleStream.Path;
var charsetParam = string.Empty;
if (!string.IsNullOrEmpty(state.SubtitleStream.Language))
{
2015-03-31 16:24:16 +00:00
var charenc = SubtitleEncoder.GetSubtitleFileCharacterSet(subtitlePath, state.MediaSource.Protocol, CancellationToken.None).Result;
2015-01-02 06:12:58 +00:00
if (!string.IsNullOrEmpty(charenc))
{
charsetParam = ":charenc=" + charenc;
}
}
// TODO: Perhaps also use original_size=1920x800 ??
return string.Format("subtitles=filename='{0}'{1},setpts=PTS -{2}/TB",
2015-08-16 15:53:30 +00:00
MediaEncoder.EscapeSubtitleFilterPath(subtitlePath),
2015-01-02 06:12:58 +00:00
charsetParam,
seconds.ToString(UsCulture));
}
var mediaPath = state.MediaPath ?? string.Empty;
2015-01-02 06:12:58 +00:00
return string.Format("subtitles='{0}:si={1}',setpts=PTS -{2}/TB",
MediaEncoder.EscapeSubtitleFilterPath(mediaPath),
2015-01-02 06:12:58 +00:00
state.InternalSubtitleStreamOffset.ToString(UsCulture),
seconds.ToString(UsCulture));
}
protected string GetAudioFilterParam(EncodingJob state, bool isHls)
{
var volParam = string.Empty;
var audioSampleRate = string.Empty;
var channels = state.OutputAudioChannels;
// Boost volume to 200% when downsampling from 6ch to 2ch
if (channels.HasValue && channels.Value <= 2)
{
if (state.AudioStream != null && state.AudioStream.Channels.HasValue && state.AudioStream.Channels.Value > 5)
{
volParam = ",volume=" + GetEncodingOptions().DownMixAudioBoost.ToString(UsCulture);
}
}
if (state.OutputAudioSampleRate.HasValue)
{
audioSampleRate = state.OutputAudioSampleRate.Value + ":";
}
var adelay = isHls ? "adelay=1," : string.Empty;
var pts = string.Empty;
2016-03-07 18:50:58 +00:00
if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode && !state.Options.CopyTimestamps)
2015-01-02 06:12:58 +00:00
{
var seconds = TimeSpan.FromTicks(state.Options.StartTimeTicks ?? 0).TotalSeconds;
pts = string.Format(",asetpts=PTS-{0}/TB", Math.Round(seconds).ToString(UsCulture));
}
return string.Format("-af \"{0}aresample={1}async={4}{2}{3}\"",
adelay,
audioSampleRate,
volParam,
pts,
state.OutputAudioSync);
}
}
}