Merge branch 'master' into populate-extras

This commit is contained in:
Liggy 2019-02-20 14:24:23 +01:00 committed by Torsten
commit 33171a58b5
69 changed files with 925 additions and 1782 deletions

View File

@ -20,6 +20,7 @@
- [RazeLighter777](https://github.com/RazeLighter777)
- [WillWill56](https://github.com/WillWill56)
- [Liggy](https://github.com/Liggy)
- [fruhnow](https://github.com/fruhnow)
# Emby Contributors

View File

@ -4,6 +4,7 @@ using System.Globalization;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Xml;
using System.Xml.Linq;
using Emby.Dlna.Common;
using Emby.Dlna.Server;
@ -733,26 +734,21 @@ namespace Emby.Dlna.PlayTo
return (true, null);
}
XElement uPnpResponse;
XElement uPnpResponse = null;
// Handle different variations sent back by devices
try
{
uPnpResponse = XElement.Parse(trackString);
uPnpResponse = ParseResponse(trackString);
}
catch (Exception)
catch (Exception ex)
{
// first try to add a root node with a dlna namesapce
try
{
uPnpResponse = XElement.Parse("<data xmlns:dlna=\"urn:schemas-dlna-org:device-1-0\">" + trackString + "</data>");
uPnpResponse = uPnpResponse.Descendants().First();
}
catch (Exception ex)
{
_logger.LogError(ex, "Unable to parse xml {0}", trackString);
return (true, null);
}
_logger.LogError(ex, "Uncaught exception while parsing xml");
}
if (uPnpResponse == null)
{
_logger.LogError("Failed to parse xml: \n {Xml}", trackString);
return (true, null);
}
var e = uPnpResponse.Element(uPnpNamespaces.items);
@ -762,6 +758,43 @@ namespace Emby.Dlna.PlayTo
return (true, uTrack);
}
private XElement ParseResponse(string xml)
{
// Handle different variations sent back by devices
try
{
return XElement.Parse(xml);
}
catch (XmlException)
{
}
// first try to add a root node with a dlna namesapce
try
{
return XElement.Parse("<data xmlns:dlna=\"urn:schemas-dlna-org:device-1-0\">" + xml + "</data>")
.Descendants()
.First();
}
catch (XmlException)
{
}
// some devices send back invalid xml
try
{
return XElement.Parse(xml.Replace("&", "&amp;"));
}
catch (XmlException)
{
}
return null;
}
private static uBaseObject CreateUBaseObject(XElement container, string trackUri)
{
if (container == null)

View File

@ -11,101 +11,81 @@ namespace Emby.Notifications
public class CoreNotificationTypes : INotificationTypeFactory
{
private readonly ILocalizationManager _localization;
private readonly IServerApplicationHost _appHost;
public CoreNotificationTypes(ILocalizationManager localization, IServerApplicationHost appHost)
public CoreNotificationTypes(ILocalizationManager localization)
{
_localization = localization;
_appHost = appHost;
}
public IEnumerable<NotificationTypeInfo> GetNotificationTypes()
{
var knownTypes = new List<NotificationTypeInfo>
var knownTypes = new NotificationTypeInfo[]
{
new NotificationTypeInfo
{
Type = NotificationType.ApplicationUpdateInstalled.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.InstallationFailed.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.PluginInstalled.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.PluginError.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.PluginUninstalled.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.PluginUpdateInstalled.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.ServerRestartRequired.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.TaskFailed.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.NewLibraryContent.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.AudioPlayback.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.VideoPlayback.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.AudioPlaybackStopped.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.VideoPlaybackStopped.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.CameraImageUploaded.ToString()
},
new NotificationTypeInfo
{
Type = NotificationType.UserLockedOut.ToString()
}
};
if (!_appHost.CanSelfUpdate)
{
knownTypes.Add(new NotificationTypeInfo
},
new NotificationTypeInfo
{
Type = NotificationType.ApplicationUpdateAvailable.ToString()
});
}
}
};
foreach (var type in knownTypes)
{

View File

@ -5,21 +5,17 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using MediaBrowser.Common.Configuration;
using MediaBrowser.Common.Updates;
using MediaBrowser.Controller;
using MediaBrowser.Controller.Devices;
using MediaBrowser.Controller.Entities;
using MediaBrowser.Controller.Entities.Audio;
using MediaBrowser.Controller.Entities.TV;
using MediaBrowser.Controller.Library;
using MediaBrowser.Controller.Notifications;
using MediaBrowser.Controller.Plugins;
using MediaBrowser.Controller.Session;
using MediaBrowser.Model.Activity;
using MediaBrowser.Model.Events;
using MediaBrowser.Model.Globalization;
using MediaBrowser.Model.Notifications;
using MediaBrowser.Model.Tasks;
using Microsoft.Extensions.Logging;
namespace Emby.Notifications
@ -29,43 +25,40 @@ namespace Emby.Notifications
/// </summary>
public class Notifications : IServerEntryPoint
{
private readonly IInstallationManager _installationManager;
private readonly IUserManager _userManager;
private readonly ILogger _logger;
private readonly ITaskManager _taskManager;
private readonly INotificationManager _notificationManager;
private readonly ILibraryManager _libraryManager;
private readonly ISessionManager _sessionManager;
private readonly IServerApplicationHost _appHost;
private Timer LibraryUpdateTimer { get; set; }
private readonly object _libraryChangedSyncLock = new object();
private readonly IConfigurationManager _config;
private readonly IDeviceManager _deviceManager;
private readonly ILocalizationManager _localization;
private readonly IActivityManager _activityManager;
private string[] _coreNotificationTypes;
public Notifications(IInstallationManager installationManager, IActivityManager activityManager, ILocalizationManager localization, IUserManager userManager, ILogger logger, ITaskManager taskManager, INotificationManager notificationManager, ILibraryManager libraryManager, ISessionManager sessionManager, IServerApplicationHost appHost, IConfigurationManager config, IDeviceManager deviceManager)
public Notifications(
IActivityManager activityManager,
ILocalizationManager localization,
ILogger logger,
INotificationManager notificationManager,
ILibraryManager libraryManager,
IServerApplicationHost appHost,
IConfigurationManager config)
{
_installationManager = installationManager;
_userManager = userManager;
_logger = logger;
_taskManager = taskManager;
_notificationManager = notificationManager;
_libraryManager = libraryManager;
_sessionManager = sessionManager;
_appHost = appHost;
_config = config;
_deviceManager = deviceManager;
_localization = localization;
_activityManager = activityManager;
_coreNotificationTypes = new CoreNotificationTypes(localization, appHost).GetNotificationTypes().Select(i => i.Type).ToArray();
_coreNotificationTypes = new CoreNotificationTypes(localization).GetNotificationTypes().Select(i => i.Type).ToArray();
}
public Task RunAsync()
@ -124,10 +117,9 @@ namespace Emby.Notifications
return _config.GetConfiguration<NotificationOptions>("notifications");
}
async void _appHost_HasUpdateAvailableChanged(object sender, EventArgs e)
private async void _appHost_HasUpdateAvailableChanged(object sender, EventArgs e)
{
// This notification is for users who can't auto-update (aka running as service)
if (!_appHost.HasUpdateAvailable || _appHost.CanSelfUpdate)
if (!_appHost.HasUpdateAvailable)
{
return;
}
@ -145,7 +137,7 @@ namespace Emby.Notifications
}
private readonly List<BaseItem> _itemsAdded = new List<BaseItem>();
void _libraryManager_ItemAdded(object sender, ItemChangeEventArgs e)
private void _libraryManager_ItemAdded(object sender, ItemChangeEventArgs e)
{
if (!FilterItem(e.Item))
{

View File

@ -72,12 +72,6 @@ namespace Emby.Server.Implementations.AppBase
/// <value>The plugin configurations path.</value>
public string PluginConfigurationsPath => Path.Combine(PluginsPath, "configurations");
/// <summary>
/// Gets the path to where temporary update files will be stored
/// </summary>
/// <value>The plugin configurations path.</value>
public string TempUpdatePath => Path.Combine(ProgramDataPath, "updates");
/// <summary>
/// Gets the path to the log directory
/// </summary>

View File

@ -123,12 +123,6 @@ namespace Emby.Server.Implementations
/// <value><c>true</c> if this instance can self restart; otherwise, <c>false</c>.</value>
public abstract bool CanSelfRestart { get; }
/// <summary>
/// Gets or sets a value indicating whether this instance can self update.
/// </summary>
/// <value><c>true</c> if this instance can self update; otherwise, <c>false</c>.</value>
public virtual bool CanSelfUpdate => false;
public virtual bool CanLaunchWebBrowser
{
get
@ -1456,7 +1450,6 @@ namespace Emby.Server.Implementations
OperatingSystem = EnvironmentInfo.OperatingSystem.ToString(),
OperatingSystemDisplayName = EnvironmentInfo.OperatingSystemName,
CanSelfRestart = CanSelfRestart,
CanSelfUpdate = CanSelfUpdate,
CanLaunchWebBrowser = CanLaunchWebBrowser,
WanAddress = wanAddress,
HasUpdateAvailable = HasUpdateAvailable,
@ -1755,21 +1748,6 @@ namespace Emby.Server.Implementations
Plugins = list.ToArray();
}
/// <summary>
/// Updates the application.
/// </summary>
/// <param name="package">The package that contains the update</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <param name="progress">The progress.</param>
public async Task UpdateApplication(PackageVersionInfo package, CancellationToken cancellationToken, IProgress<double> progress)
{
await InstallationManager.InstallPackage(package, false, progress, cancellationToken).ConfigureAwait(false);
HasUpdateAvailable = false;
OnApplicationUpdated(package);
}
/// <summary>
/// This returns localhost in the case of no external dns, and the hostname if the
/// dns is prefixed with a valid Uri prefix.

View File

@ -224,7 +224,7 @@ namespace Emby.Server.Implementations.Data
});
}
db.ExecuteAll(string.Join(";", queries.ToArray()));
db.ExecuteAll(string.Join(";", queries));
Logger.LogInformation("PRAGMA synchronous=" + db.Query("PRAGMA synchronous").SelectScalarString().First());
}
@ -232,23 +232,6 @@ namespace Emby.Server.Implementations.Data
protected virtual int? CacheSize => null;
internal static void CheckOk(int rc)
{
string msg = "";
if (raw.SQLITE_OK != rc)
{
throw CreateException((ErrorCode)rc, msg);
}
}
internal static Exception CreateException(ErrorCode rc, string msg)
{
var exp = new Exception(msg);
return exp;
}
private bool _disposed;
protected void CheckDisposed()
{
@ -375,13 +358,6 @@ namespace Emby.Server.Implementations.Data
}
}
public class DummyToken : IDisposable
{
public void Dispose()
{
}
}
public static IDisposable Read(this ReaderWriterLockSlim obj)
{
//if (BaseSqliteRepository.ThreadSafeMode > 0)
@ -390,6 +366,7 @@ namespace Emby.Server.Implementations.Data
//}
return new WriteLockToken(obj);
}
public static IDisposable Write(this ReaderWriterLockSlim obj)
{
//if (BaseSqliteRepository.ThreadSafeMode > 0)

View File

@ -536,7 +536,7 @@ namespace Emby.Server.Implementations.Data
throw new ArgumentNullException(nameof(item));
}
SaveItems(new List<BaseItem> { item }, cancellationToken);
SaveItems(new [] { item }, cancellationToken);
}
public void SaveImages(BaseItem item)
@ -576,7 +576,7 @@ namespace Emby.Server.Implementations.Data
/// or
/// cancellationToken
/// </exception>
public void SaveItems(List<BaseItem> items, CancellationToken cancellationToken)
public void SaveItems(IEnumerable<BaseItem> items, CancellationToken cancellationToken)
{
if (items == null)
{
@ -587,7 +587,7 @@ namespace Emby.Server.Implementations.Data
CheckDisposed();
var tuples = new List<Tuple<BaseItem, List<Guid>, BaseItem, string, List<string>>>();
var tuples = new List<(BaseItem, List<Guid>, BaseItem, string, List<string>)>();
foreach (var item in items)
{
var ancestorIds = item.SupportsAncestors ?
@ -599,7 +599,7 @@ namespace Emby.Server.Implementations.Data
var userdataKey = item.GetUserDataKeys().FirstOrDefault();
var inheritedTags = item.GetInheritedTags();
tuples.Add(new Tuple<BaseItem, List<Guid>, BaseItem, string, List<string>>(item, ancestorIds, topParent, userdataKey, inheritedTags));
tuples.Add((item, ancestorIds, topParent, userdataKey, inheritedTags));
}
using (WriteLock.Write())
@ -615,7 +615,7 @@ namespace Emby.Server.Implementations.Data
}
}
private void SaveItemsInTranscation(IDatabaseConnection db, List<Tuple<BaseItem, List<Guid>, BaseItem, string, List<string>>> tuples)
private void SaveItemsInTranscation(IDatabaseConnection db, IEnumerable<(BaseItem, List<Guid>, BaseItem, string, List<string>)> tuples)
{
var statements = PrepareAllSafe(db, new string[]
{
@ -966,7 +966,7 @@ namespace Emby.Server.Implementations.Data
if (item.ExtraIds.Length > 0)
{
saveItemStatement.TryBind("@ExtraIds", string.Join("|", item.ExtraIds.ToArray()));
saveItemStatement.TryBind("@ExtraIds", string.Join("|", item.ExtraIds));
}
else
{
@ -1183,9 +1183,9 @@ namespace Emby.Server.Implementations.Data
/// <exception cref="ArgumentException"></exception>
public BaseItem RetrieveItem(Guid id)
{
if (id.Equals(Guid.Empty))
if (id == Guid.Empty)
{
throw new ArgumentNullException(nameof(id));
throw new ArgumentException(nameof(id), "Guid can't be empty");
}
CheckDisposed();
@ -2079,14 +2079,14 @@ namespace Emby.Server.Implementations.Data
return false;
}
var sortingFields = query.OrderBy.Select(i => i.Item1);
var sortingFields = new HashSet<string>(query.OrderBy.Select(i => i.Item1), StringComparer.OrdinalIgnoreCase);
return sortingFields.Contains(ItemSortBy.IsFavoriteOrLiked, StringComparer.OrdinalIgnoreCase)
|| sortingFields.Contains(ItemSortBy.IsPlayed, StringComparer.OrdinalIgnoreCase)
|| sortingFields.Contains(ItemSortBy.IsUnplayed, StringComparer.OrdinalIgnoreCase)
|| sortingFields.Contains(ItemSortBy.PlayCount, StringComparer.OrdinalIgnoreCase)
|| sortingFields.Contains(ItemSortBy.DatePlayed, StringComparer.OrdinalIgnoreCase)
|| sortingFields.Contains(ItemSortBy.SeriesDatePlayed, StringComparer.OrdinalIgnoreCase)
return sortingFields.Contains(ItemSortBy.IsFavoriteOrLiked)
|| sortingFields.Contains(ItemSortBy.IsPlayed)
|| sortingFields.Contains(ItemSortBy.IsUnplayed)
|| sortingFields.Contains(ItemSortBy.PlayCount)
|| sortingFields.Contains(ItemSortBy.DatePlayed)
|| sortingFields.Contains(ItemSortBy.SeriesDatePlayed)
|| query.IsFavoriteOrLiked.HasValue
|| query.IsFavorite.HasValue
|| query.IsResumable.HasValue
@ -2094,9 +2094,9 @@ namespace Emby.Server.Implementations.Data
|| query.IsLiked.HasValue;
}
private readonly List<ItemFields> allFields = Enum.GetNames(typeof(ItemFields))
private readonly ItemFields[] _allFields = Enum.GetNames(typeof(ItemFields))
.Select(i => (ItemFields)Enum.Parse(typeof(ItemFields), i, true))
.ToList();
.ToArray();
private string[] GetColumnNamesFromField(ItemFields field)
{
@ -2151,18 +2151,26 @@ namespace Emby.Server.Implementations.Data
}
}
private static readonly HashSet<string> _programExcludeParentTypes = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"Series",
"Season",
"MusicAlbum",
"MusicArtist",
"PhotoAlbum"
};
private static readonly HashSet<string> _programTypes = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"Program",
"TvChannel",
"LiveTvProgram",
"LiveTvTvChannel"
};
private bool HasProgramAttributes(InternalItemsQuery query)
{
var excludeParentTypes = new string[]
{
"Series",
"Season",
"MusicAlbum",
"MusicArtist",
"PhotoAlbum"
};
if (excludeParentTypes.Contains(query.ParentType ?? string.Empty, StringComparer.OrdinalIgnoreCase))
if (_programExcludeParentTypes.Contains(query.ParentType))
{
return false;
}
@ -2172,29 +2180,18 @@ namespace Emby.Server.Implementations.Data
return true;
}
var types = new string[]
{
"Program",
"TvChannel",
"LiveTvProgram",
"LiveTvTvChannel"
};
return types.Any(i => query.IncludeItemTypes.Contains(i, StringComparer.OrdinalIgnoreCase));
return query.IncludeItemTypes.Any(x => _programTypes.Contains(x));
}
private static readonly HashSet<string> _serviceTypes = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"TvChannel",
"LiveTvTvChannel"
};
private bool HasServiceName(InternalItemsQuery query)
{
var excludeParentTypes = new string[]
{
"Series",
"Season",
"MusicAlbum",
"MusicArtist",
"PhotoAlbum"
};
if (excludeParentTypes.Contains(query.ParentType ?? string.Empty, StringComparer.OrdinalIgnoreCase))
if (_programExcludeParentTypes.Contains(query.ParentType))
{
return false;
}
@ -2204,27 +2201,18 @@ namespace Emby.Server.Implementations.Data
return true;
}
var types = new string[]
{
"TvChannel",
"LiveTvTvChannel"
};
return types.Any(i => query.IncludeItemTypes.Contains(i, StringComparer.OrdinalIgnoreCase));
return query.IncludeItemTypes.Any(x => _serviceTypes.Contains(x));
}
private static readonly HashSet<string> _startDateTypes = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"Program",
"LiveTvProgram"
};
private bool HasStartDate(InternalItemsQuery query)
{
var excludeParentTypes = new string[]
{
"Series",
"Season",
"MusicAlbum",
"MusicArtist",
"PhotoAlbum"
};
if (excludeParentTypes.Contains(query.ParentType ?? string.Empty, StringComparer.OrdinalIgnoreCase))
if (_programExcludeParentTypes.Contains(query.ParentType))
{
return false;
}
@ -2234,13 +2222,7 @@ namespace Emby.Server.Implementations.Data
return true;
}
var types = new string[]
{
"Program",
"LiveTvProgram"
};
return types.Any(i => query.IncludeItemTypes.Contains(i, StringComparer.OrdinalIgnoreCase));
return query.IncludeItemTypes.Any(x => _startDateTypes.Contains(x));
}
private bool HasEpisodeAttributes(InternalItemsQuery query)
@ -2263,16 +2245,26 @@ namespace Emby.Server.Implementations.Data
return query.IncludeItemTypes.Contains("Trailer", StringComparer.OrdinalIgnoreCase);
}
private static readonly HashSet<string> _artistExcludeParentTypes = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"Series",
"Season",
"PhotoAlbum"
};
private static readonly HashSet<string> _artistsTypes = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"Audio",
"MusicAlbum",
"MusicVideo",
"AudioBook",
"AudioPodcast"
};
private bool HasArtistFields(InternalItemsQuery query)
{
var excludeParentTypes = new string[]
{
"Series",
"Season",
"PhotoAlbum"
};
if (excludeParentTypes.Contains(query.ParentType ?? string.Empty, StringComparer.OrdinalIgnoreCase))
if (_artistExcludeParentTypes.Contains(query.ParentType))
{
return false;
}
@ -2282,18 +2274,18 @@ namespace Emby.Server.Implementations.Data
return true;
}
var types = new string[]
{
"Audio",
"MusicAlbum",
"MusicVideo",
"AudioBook",
"AudioPodcast"
};
return types.Any(i => query.IncludeItemTypes.Contains(i, StringComparer.OrdinalIgnoreCase));
return query.IncludeItemTypes.Any(x => _artistsTypes.Contains(x));
}
private static readonly HashSet<string> _seriesTypes = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"Audio",
"MusicAlbum",
"MusicVideo",
"AudioBook",
"AudioPodcast"
};
private bool HasSeriesFields(InternalItemsQuery query)
{
if (string.Equals(query.ParentType, "PhotoAlbum", StringComparison.OrdinalIgnoreCase))
@ -2306,26 +2298,18 @@ namespace Emby.Server.Implementations.Data
return true;
}
var types = new string[]
{
"Book",
"AudioBook",
"Episode",
"Season"
};
return types.Any(i => query.IncludeItemTypes.Contains(i, StringComparer.OrdinalIgnoreCase));
return query.IncludeItemTypes.Any(x => _seriesTypes.Contains(x));
}
private string[] GetFinalColumnsToSelect(InternalItemsQuery query, string[] startColumns)
private List<string> GetFinalColumnsToSelect(InternalItemsQuery query, IEnumerable<string> startColumns)
{
var list = startColumns.ToList();
foreach (var field in allFields)
foreach (var field in _allFields)
{
if (!HasField(query, field))
{
foreach (var fieldToRemove in GetColumnNamesFromField(field).ToList())
foreach (var fieldToRemove in GetColumnNamesFromField(field))
{
list.Remove(fieldToRemove);
}
@ -2419,11 +2403,14 @@ namespace Emby.Server.Implementations.Data
list.Add(builder.ToString());
var excludeIds = query.ExcludeItemIds.ToList();
excludeIds.Add(item.Id);
excludeIds.AddRange(item.ExtraIds);
var oldLen = query.ExcludeItemIds.Length;
var newLen = oldLen + item.ExtraIds.Length + 1;
var excludeIds = new Guid[newLen];
query.ExcludeItemIds.CopyTo(excludeIds, 0);
excludeIds[oldLen] = item.Id;
item.ExtraIds.CopyTo(excludeIds, oldLen + 1);
query.ExcludeItemIds = excludeIds.ToArray();
query.ExcludeItemIds = excludeIds;
query.ExcludeProviderIds = item.ProviderIds;
}
@ -2444,7 +2431,7 @@ namespace Emby.Server.Implementations.Data
list.Add(builder.ToString());
}
return list.ToArray();
return list;
}
private void BindSearchParams(InternalItemsQuery query, IStatement statement)
@ -2723,18 +2710,17 @@ namespace Emby.Server.Implementations.Data
private void AddItem(List<BaseItem> items, BaseItem newItem)
{
var providerIds = newItem.ProviderIds.ToList();
for (var i = 0; i < items.Count; i++)
{
var item = items[i];
foreach (var providerId in providerIds)
foreach (var providerId in newItem.ProviderIds)
{
if (providerId.Key == MetadataProviders.TmdbCollection.ToString())
{
continue;
}
if (item.GetProviderId(providerId.Key) == providerId.Value)
{
if (newItem.SourceType == SourceType.Library)
@ -2753,10 +2739,10 @@ namespace Emby.Server.Implementations.Data
{
var elapsed = (DateTime.UtcNow - startDate).TotalMilliseconds;
int slowThreshold = 1000;
int slowThreshold = 100;
#if DEBUG
slowThreshold = 250;
slowThreshold = 10;
#endif
if (elapsed >= slowThreshold)
@ -2806,7 +2792,7 @@ namespace Emby.Server.Implementations.Data
var whereText = whereClauses.Count == 0 ?
string.Empty :
" where " + string.Join(" AND ", whereClauses.ToArray());
" where " + string.Join(" AND ", whereClauses);
commandText += whereText
+ GetGroupBy(query)
@ -2930,25 +2916,31 @@ namespace Emby.Server.Implementations.Data
private string GetOrderByText(InternalItemsQuery query)
{
var orderBy = query.OrderBy.ToList();
var enableOrderInversion = false;
if (query.SimilarTo != null && orderBy.Count == 0)
if (string.IsNullOrEmpty(query.SearchTerm))
{
orderBy.Add(new ValueTuple<string, SortOrder>("SimilarityScore", SortOrder.Descending));
orderBy.Add(new ValueTuple<string, SortOrder>(ItemSortBy.Random, SortOrder.Ascending));
int oldLen = query.OrderBy.Length;
if (query.SimilarTo != null && oldLen == 0)
{
var arr = new (string, SortOrder)[oldLen + 2];
query.OrderBy.CopyTo(arr, 0);
arr[oldLen] = ("SimilarityScore", SortOrder.Descending);
arr[oldLen + 1] = (ItemSortBy.Random, SortOrder.Ascending);
query.OrderBy = arr;
}
}
else
{
query.OrderBy = new []
{
("SearchScore", SortOrder.Descending),
(ItemSortBy.SortName, SortOrder.Ascending)
};
}
if (!string.IsNullOrEmpty(query.SearchTerm))
{
orderBy = new List<(string, SortOrder)>();
orderBy.Add(new ValueTuple<string, SortOrder>("SearchScore", SortOrder.Descending));
orderBy.Add(new ValueTuple<string, SortOrder>(ItemSortBy.SortName, SortOrder.Ascending));
}
var orderBy = query.OrderBy;
query.OrderBy = orderBy.ToArray();
if (orderBy.Count == 0)
if (orderBy.Length == 0)
{
return string.Empty;
}
@ -2957,6 +2949,7 @@ namespace Emby.Server.Implementations.Data
{
var columnMap = MapOrderByField(i.Item1, query);
var columnAscending = i.Item2 == SortOrder.Ascending;
const bool enableOrderInversion = false;
if (columnMap.Item2 && enableOrderInversion)
{
columnAscending = !columnAscending;
@ -2968,7 +2961,7 @@ namespace Emby.Server.Implementations.Data
}));
}
private ValueTuple<string, bool> MapOrderByField(string name, InternalItemsQuery query)
private (string, bool) MapOrderByField(string name, InternalItemsQuery query)
{
if (string.Equals(name, ItemSortBy.AirTime, StringComparison.OrdinalIgnoreCase))
{
@ -3218,7 +3211,7 @@ namespace Emby.Server.Implementations.Data
var whereText = whereClauses.Count == 0 ?
string.Empty :
" where " + string.Join(" AND ", whereClauses.ToArray());
" where " + string.Join(" AND ", whereClauses);
commandText += whereText
+ GetGroupBy(query)
@ -4378,7 +4371,7 @@ namespace Emby.Server.Implementations.Data
}
else if (query.Years.Length > 1)
{
var val = string.Join(",", query.Years.ToArray());
var val = string.Join(",", query.Years);
whereClauses.Add("ProductionYear in (" + val + ")");
}
@ -4952,7 +4945,12 @@ where AncestorIdText not null and ItemValues.Value not null and ItemValues.Type
return result;
}
return new[] { value }.Where(IsValidType);
if (IsValidType(value))
{
return new[] { value };
}
return Array.Empty<string>();
}
public void DeleteItem(Guid id, CancellationToken cancellationToken)
@ -5215,32 +5213,32 @@ where AncestorIdText not null and ItemValues.Value not null and ItemValues.Type
}
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetAllArtists(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetAllArtists(InternalItemsQuery query)
{
return GetItemValues(query, new[] { 0, 1 }, typeof(MusicArtist).FullName);
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetArtists(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetArtists(InternalItemsQuery query)
{
return GetItemValues(query, new[] { 0 }, typeof(MusicArtist).FullName);
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetAlbumArtists(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetAlbumArtists(InternalItemsQuery query)
{
return GetItemValues(query, new[] { 1 }, typeof(MusicArtist).FullName);
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetStudios(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetStudios(InternalItemsQuery query)
{
return GetItemValues(query, new[] { 3 }, typeof(Studio).FullName);
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetGenres(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetGenres(InternalItemsQuery query)
{
return GetItemValues(query, new[] { 2 }, typeof(Genre).FullName);
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetMusicGenres(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetMusicGenres(InternalItemsQuery query)
{
return GetItemValues(query, new[] { 2 }, typeof(MusicGenre).FullName);
}
@ -5317,7 +5315,7 @@ where AncestorIdText not null and ItemValues.Value not null and ItemValues.Type
}
}
private QueryResult<Tuple<BaseItem, ItemCounts>> GetItemValues(InternalItemsQuery query, int[] itemValueTypes, string returnType)
private QueryResult<(BaseItem, ItemCounts)> GetItemValues(InternalItemsQuery query, int[] itemValueTypes, string returnType)
{
if (query == null)
{
@ -5335,7 +5333,7 @@ where AncestorIdText not null and ItemValues.Value not null and ItemValues.Type
var typeClause = itemValueTypes.Length == 1 ?
("Type=" + itemValueTypes[0].ToString(CultureInfo.InvariantCulture)) :
("Type in (" + string.Join(",", itemValueTypes.Select(i => i.ToString(CultureInfo.InvariantCulture)).ToArray()) + ")");
("Type in (" + string.Join(",", itemValueTypes.Select(i => i.ToString(CultureInfo.InvariantCulture))) + ")");
InternalItemsQuery typeSubQuery = null;
@ -5363,11 +5361,7 @@ where AncestorIdText not null and ItemValues.Value not null and ItemValues.Type
whereClauses.Add("guid in (select ItemId from ItemValues where ItemValues.CleanValue=A.CleanName AND " + typeClause + ")");
var typeWhereText = whereClauses.Count == 0 ?
string.Empty :
" where " + string.Join(" AND ", whereClauses);
itemCountColumnQuery += typeWhereText;
itemCountColumnQuery += " where " + string.Join(" AND ", whereClauses);
itemCountColumns = new Dictionary<string, string>()
{
@ -5400,7 +5394,7 @@ where AncestorIdText not null and ItemValues.Value not null and ItemValues.Type
IsSeries = query.IsSeries
};
columns = GetFinalColumnsToSelect(query, columns.ToArray()).ToList();
columns = GetFinalColumnsToSelect(query, columns);
var commandText = "select "
+ string.Join(",", columns)
@ -5492,8 +5486,8 @@ where AncestorIdText not null and ItemValues.Value not null and ItemValues.Type
{
return connection.RunInTransaction(db =>
{
var list = new List<Tuple<BaseItem, ItemCounts>>();
var result = new QueryResult<Tuple<BaseItem, ItemCounts>>();
var list = new List<(BaseItem, ItemCounts)>();
var result = new QueryResult<(BaseItem, ItemCounts)>();
var statements = PrepareAllSafe(db, statementTexts);
@ -5531,7 +5525,7 @@ where AncestorIdText not null and ItemValues.Value not null and ItemValues.Type
{
var countStartColumn = columns.Count - 1;
list.Add(new Tuple<BaseItem, ItemCounts>(item, GetItemCounts(row, countStartColumn, typesToCount)));
list.Add((item, GetItemCounts(row, countStartColumn, typesToCount)));
}
}
@ -6198,6 +6192,5 @@ where AncestorIdText not null and ItemValues.Value not null and ItemValues.Type
return item;
}
}
}

View File

@ -90,7 +90,7 @@ namespace Emby.Server.Implementations.HttpServer
/// </summary>
private IHasHeaders GetHttpResult(IRequest requestContext, Stream content, string contentType, bool addCachePrevention, IDictionary<string, string> responseHeaders = null)
{
var result = new StreamWriter(content, contentType, _logger);
var result = new StreamWriter(content, contentType);
if (responseHeaders == null)
{
@ -131,7 +131,7 @@ namespace Emby.Server.Implementations.HttpServer
content = Array.Empty<byte>();
}
result = new StreamWriter(content, contentType, contentLength, _logger);
result = new StreamWriter(content, contentType, contentLength);
}
else
{
@ -143,7 +143,7 @@ namespace Emby.Server.Implementations.HttpServer
responseHeaders = new Dictionary<string, string>();
}
if (addCachePrevention && !responseHeaders.TryGetValue("Expires", out string expires))
if (addCachePrevention && !responseHeaders.TryGetValue("Expires", out string _))
{
responseHeaders["Expires"] = "-1";
}
@ -175,7 +175,7 @@ namespace Emby.Server.Implementations.HttpServer
bytes = Array.Empty<byte>();
}
result = new StreamWriter(bytes, contentType, contentLength, _logger);
result = new StreamWriter(bytes, contentType, contentLength);
}
else
{
@ -187,7 +187,7 @@ namespace Emby.Server.Implementations.HttpServer
responseHeaders = new Dictionary<string, string>();
}
if (addCachePrevention && !responseHeaders.TryGetValue("Expires", out string expires))
if (addCachePrevention && !responseHeaders.TryGetValue("Expires", out string _))
{
responseHeaders["Expires"] = "-1";
}
@ -277,9 +277,10 @@ namespace Emby.Server.Implementations.HttpServer
private object ToOptimizedResultInternal<T>(IRequest request, T dto, IDictionary<string, string> responseHeaders = null)
{
var contentType = request.ResponseContentType;
// TODO: @bond use Span and .Equals
var contentType = request.ResponseContentType?.Split(';')[0].Trim().ToLowerInvariant();
switch (GetRealContentType(contentType))
switch (contentType)
{
case "application/xml":
case "text/xml":
@ -333,13 +334,13 @@ namespace Emby.Server.Implementations.HttpServer
if (isHeadRequest)
{
var result = new StreamWriter(Array.Empty<byte>(), contentType, contentLength, _logger);
var result = new StreamWriter(Array.Empty<byte>(), contentType, contentLength);
AddResponseHeaders(result, responseHeaders);
return result;
}
else
{
var result = new StreamWriter(content, contentType, contentLength, _logger);
var result = new StreamWriter(content, contentType, contentLength);
AddResponseHeaders(result, responseHeaders);
return result;
}
@ -348,13 +349,19 @@ namespace Emby.Server.Implementations.HttpServer
private byte[] Compress(byte[] bytes, string compressionType)
{
if (string.Equals(compressionType, "br", StringComparison.OrdinalIgnoreCase))
{
return CompressBrotli(bytes);
}
if (string.Equals(compressionType, "deflate", StringComparison.OrdinalIgnoreCase))
{
return Deflate(bytes);
}
if (string.Equals(compressionType, "gzip", StringComparison.OrdinalIgnoreCase))
{
return GZip(bytes);
}
throw new NotSupportedException(compressionType);
}
@ -390,13 +397,6 @@ namespace Emby.Server.Implementations.HttpServer
}
}
public static string GetRealContentType(string contentType)
{
return contentType == null
? null
: contentType.Split(';')[0].ToLowerInvariant().Trim();
}
private static string SerializeToXmlString(object from)
{
using (var ms = new MemoryStream())
@ -603,7 +603,7 @@ namespace Emby.Server.Implementations.HttpServer
}
}
var hasHeaders = new StreamWriter(stream, contentType, _logger)
var hasHeaders = new StreamWriter(stream, contentType)
{
OnComplete = options.OnComplete,
OnError = options.OnError

View File

@ -14,8 +14,6 @@ namespace Emby.Server.Implementations.HttpServer
/// </summary>
public class StreamWriter : IAsyncStreamWriter, IHasHeaders
{
private ILogger Logger { get; set; }
private static readonly CultureInfo UsCulture = new CultureInfo("en-US");
/// <summary>
@ -45,7 +43,7 @@ namespace Emby.Server.Implementations.HttpServer
/// <param name="source">The source.</param>
/// <param name="contentType">Type of the content.</param>
/// <param name="logger">The logger.</param>
public StreamWriter(Stream source, string contentType, ILogger logger)
public StreamWriter(Stream source, string contentType)
{
if (string.IsNullOrEmpty(contentType))
{
@ -53,7 +51,6 @@ namespace Emby.Server.Implementations.HttpServer
}
SourceStream = source;
Logger = logger;
Headers["Content-Type"] = contentType;
@ -69,7 +66,7 @@ namespace Emby.Server.Implementations.HttpServer
/// <param name="source">The source.</param>
/// <param name="contentType">Type of the content.</param>
/// <param name="logger">The logger.</param>
public StreamWriter(byte[] source, string contentType, int contentLength, ILogger logger)
public StreamWriter(byte[] source, string contentType, int contentLength)
{
if (string.IsNullOrEmpty(contentType))
{
@ -77,7 +74,6 @@ namespace Emby.Server.Implementations.HttpServer
}
SourceBytes = source;
Logger = logger;
Headers["Content-Type"] = contentType;

View File

@ -1225,9 +1225,9 @@ namespace Emby.Server.Implementations.Library
/// <exception cref="ArgumentNullException">id</exception>
public BaseItem GetItemById(Guid id)
{
if (id.Equals(Guid.Empty))
if (id == Guid.Empty)
{
throw new ArgumentNullException(nameof(id));
throw new ArgumentException(nameof(id), "Guid can't be empty");
}
if (LibraryItemsCache.TryGetValue(id, out BaseItem item))
@ -1237,8 +1237,6 @@ namespace Emby.Server.Implementations.Library
item = RetrieveItem(id);
//_logger.LogDebug("GetitemById {0}", id);
if (item != null)
{
RegisterItem(item);
@ -1333,7 +1331,7 @@ namespace Emby.Server.Implementations.Library
return ItemRepository.GetItemIdsList(query);
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetStudios(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetStudios(InternalItemsQuery query)
{
if (query.User != null)
{
@ -1344,7 +1342,7 @@ namespace Emby.Server.Implementations.Library
return ItemRepository.GetStudios(query);
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetGenres(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetGenres(InternalItemsQuery query)
{
if (query.User != null)
{
@ -1355,7 +1353,7 @@ namespace Emby.Server.Implementations.Library
return ItemRepository.GetGenres(query);
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetMusicGenres(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetMusicGenres(InternalItemsQuery query)
{
if (query.User != null)
{
@ -1366,7 +1364,7 @@ namespace Emby.Server.Implementations.Library
return ItemRepository.GetMusicGenres(query);
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetAllArtists(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetAllArtists(InternalItemsQuery query)
{
if (query.User != null)
{
@ -1377,7 +1375,7 @@ namespace Emby.Server.Implementations.Library
return ItemRepository.GetAllArtists(query);
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetArtists(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetArtists(InternalItemsQuery query)
{
if (query.User != null)
{
@ -1421,7 +1419,7 @@ namespace Emby.Server.Implementations.Library
}
}
public QueryResult<Tuple<BaseItem, ItemCounts>> GetAlbumArtists(InternalItemsQuery query)
public QueryResult<(BaseItem, ItemCounts)> GetAlbumArtists(InternalItemsQuery query)
{
if (query.User != null)
{
@ -1808,18 +1806,16 @@ namespace Emby.Server.Implementations.Library
/// <returns>Task.</returns>
public void CreateItems(IEnumerable<BaseItem> items, BaseItem parent, CancellationToken cancellationToken)
{
var list = items.ToList();
ItemRepository.SaveItems(items, cancellationToken);
ItemRepository.SaveItems(list, cancellationToken);
foreach (var item in list)
foreach (var item in items)
{
RegisterItem(item);
}
if (ItemAdded != null)
{
foreach (var item in list)
foreach (var item in items)
{
// With the live tv guide this just creates too much noise
if (item.SourceType != SourceType.Library)
@ -1853,7 +1849,7 @@ namespace Emby.Server.Implementations.Library
/// <summary>
/// Updates the item.
/// </summary>
public void UpdateItems(List<BaseItem> items, BaseItem parent, ItemUpdateType updateReason, CancellationToken cancellationToken)
public void UpdateItems(IEnumerable<BaseItem> items, BaseItem parent, ItemUpdateType updateReason, CancellationToken cancellationToken)
{
foreach (var item in items)
{
@ -1908,7 +1904,7 @@ namespace Emby.Server.Implementations.Library
/// <returns>Task.</returns>
public void UpdateItem(BaseItem item, BaseItem parent, ItemUpdateType updateReason, CancellationToken cancellationToken)
{
UpdateItems(new List<BaseItem> { item }, parent, updateReason, cancellationToken);
UpdateItems(new [] { item }, parent, updateReason, cancellationToken);
}
/// <summary>
@ -2005,9 +2001,7 @@ namespace Emby.Server.Implementations.Library
.FirstOrDefault();
}
var options = collectionFolder == null ? new LibraryOptions() : collectionFolder.GetLibraryOptions();
return options;
return collectionFolder == null ? new LibraryOptions() : collectionFolder.GetLibraryOptions();
}
public string GetContentType(BaseItem item)
@ -2017,11 +2011,13 @@ namespace Emby.Server.Implementations.Library
{
return configuredContentType;
}
configuredContentType = GetConfiguredContentType(item, true);
if (!string.IsNullOrEmpty(configuredContentType))
{
return configuredContentType;
}
return GetInheritedContentType(item);
}
@ -2056,6 +2052,7 @@ namespace Emby.Server.Implementations.Library
{
return collectionFolder.CollectionType;
}
return GetContentTypeOverride(item.ContainingFolderPath, inheritConfiguredPath);
}
@ -2066,6 +2063,7 @@ namespace Emby.Server.Implementations.Library
{
return nameValuePair.Value;
}
return null;
}
@ -2108,9 +2106,9 @@ namespace Emby.Server.Implementations.Library
string viewType,
string sortName)
{
var path = Path.Combine(ConfigurationManager.ApplicationPaths.InternalMetadataPath, "views");
path = Path.Combine(path, _fileSystem.GetValidFilename(viewType));
var path = Path.Combine(ConfigurationManager.ApplicationPaths.InternalMetadataPath,
"views",
_fileSystem.GetValidFilename(viewType));
var id = GetNewItemId(path + "_namedview_" + name, typeof(UserView));

View File

@ -168,9 +168,9 @@ namespace Emby.Server.Implementations.Library
/// <exception cref="ArgumentNullException"></exception>
public User GetUserById(Guid id)
{
if (id.Equals(Guid.Empty))
if (id == Guid.Empty)
{
throw new ArgumentNullException(nameof(id));
throw new ArgumentException(nameof(id), "Guid can't be empty");
}
return Users.FirstOrDefault(u => u.Id == id);

View File

@ -184,7 +184,7 @@ namespace Emby.Server.Implementations.LiveTv
public QueryResult<BaseItem> GetInternalChannels(LiveTvChannelQuery query, DtoOptions dtoOptions, CancellationToken cancellationToken)
{
var user = query.UserId.Equals(Guid.Empty) ? null : _userManager.GetUserById(query.UserId);
var user = query.UserId == Guid.Empty ? null : _userManager.GetUserById(query.UserId);
var topFolder = GetInternalLiveTvFolder(cancellationToken);

View File

@ -41,6 +41,27 @@ namespace Emby.Server.Implementations.Serialization
ServiceStack.Text.JsonSerializer.SerializeToStream(obj, obj.GetType(), stream);
}
/// <summary>
/// Serializes to stream.
/// </summary>
/// <param name="obj">The obj.</param>
/// <param name="stream">The stream.</param>
/// <exception cref="ArgumentNullException">obj</exception>
public void SerializeToStream<T>(T obj, Stream stream)
{
if (obj == null)
{
throw new ArgumentNullException(nameof(obj));
}
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
}
ServiceStack.Text.JsonSerializer.SerializeToStream<T>(obj, stream);
}
/// <summary>
/// Serializes to file.
/// </summary>

View File

@ -187,26 +187,13 @@ namespace Jellyfin.Server
if (string.IsNullOrEmpty(dataDir))
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
dataDir = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData);
}
else
{
// $XDG_DATA_HOME defines the base directory relative to which user specific data files should be stored.
dataDir = Environment.GetEnvironmentVariable("XDG_DATA_HOME");
// If $XDG_DATA_HOME is either not set or empty, a default equal to $HOME/.local/share should be used.
if (string.IsNullOrEmpty(dataDir))
{
dataDir = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".local", "share");
}
}
dataDir = Path.Combine(dataDir, "jellyfin");
// LocalApplicationData follows the XDG spec on unix machines
dataDir = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "jellyfin");
}
}
Directory.CreateDirectory(dataDir);
// configDir
// IF --configdir
// ELSE IF $JELLYFIN_CONFIG_DIR
@ -216,7 +203,6 @@ namespace Jellyfin.Server
// ELSE IF $XDG_CONFIG_HOME use $XDG_CONFIG_HOME/jellyfin
// ELSE $HOME/.config/jellyfin
var configDir = options.ConfigDir;
if (string.IsNullOrEmpty(configDir))
{
configDir = Environment.GetEnvironmentVariable("JELLYFIN_CONFIG_DIR");
@ -300,7 +286,6 @@ namespace Jellyfin.Server
// Ensure the main folders exist before we continue
try
{
Directory.CreateDirectory(dataDir);
Directory.CreateDirectory(logDir);
Directory.CreateDirectory(configDir);
Directory.CreateDirectory(cacheDir);

View File

@ -6,9 +6,13 @@ namespace Jellyfin.Server.SocketSharp
public class HttpFile : IHttpFile
{
public string Name { get; set; }
public string FileName { get; set; }
public long ContentLength { get; set; }
public string ContentType { get; set; }
public Stream InputStream { get; set; }
}
}

View File

@ -0,0 +1,204 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Net;
using System.Text;
using System.Threading.Tasks;
using MediaBrowser.Model.Services;
public sealed class HttpPostedFile : IDisposable
{
private string _name;
private string _contentType;
private Stream _stream;
private bool _disposed = false;
internal HttpPostedFile(string name, string content_type, Stream base_stream, long offset, long length)
{
_name = name;
_contentType = content_type;
_stream = new ReadSubStream(base_stream, offset, length);
}
public string ContentType => _contentType;
public int ContentLength => (int)_stream.Length;
public string FileName => _name;
public Stream InputStream => _stream;
/// <summary>
/// Releases the unmanaged resources and disposes of the managed resources used.
/// </summary>
public void Dispose()
{
if (_disposed)
{
return;
}
_stream.Dispose();
_stream = null;
_name = null;
_contentType = null;
_disposed = true;
}
private class ReadSubStream : Stream
{
private Stream _stream;
private long _offset;
private long _end;
private long _position;
public ReadSubStream(Stream s, long offset, long length)
{
_stream = s;
_offset = offset;
_end = offset + length;
_position = offset;
}
public override void Flush()
{
}
public override int Read(byte[] buffer, int dest_offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (dest_offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(dest_offset), "< 0");
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), "< 0");
}
int len = buffer.Length;
if (dest_offset > len)
{
throw new ArgumentException("destination offset is beyond array size", nameof(dest_offset));
}
// reordered to avoid possible integer overflow
if (dest_offset > len - count)
{
throw new ArgumentException("Reading would overrun buffer", nameof(count));
}
if (count > _end - _position)
{
count = (int)(_end - _position);
}
if (count <= 0)
{
return 0;
}
_stream.Position = _position;
int result = _stream.Read(buffer, dest_offset, count);
if (result > 0)
{
_position += result;
}
else
{
_position = _end;
}
return result;
}
public override int ReadByte()
{
if (_position >= _end)
{
return -1;
}
_stream.Position = _position;
int result = _stream.ReadByte();
if (result < 0)
{
_position = _end;
}
else
{
_position++;
}
return result;
}
public override long Seek(long d, SeekOrigin origin)
{
long real;
switch (origin)
{
case SeekOrigin.Begin:
real = _offset + d;
break;
case SeekOrigin.End:
real = _end + d;
break;
case SeekOrigin.Current:
real = _position + d;
break;
default:
throw new ArgumentException("Unknown SeekOrigin value", nameof(origin));
}
long virt = real - _offset;
if (virt < 0 || virt > Length)
{
throw new ArgumentException("Invalid position", nameof(d));
}
_position = _stream.Seek(real, SeekOrigin.Begin);
return _position;
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => false;
public override long Length => _end - _offset;
public override long Position
{
get => _position - _offset;
set
{
if (value > Length)
{
throw new ArgumentOutOfRangeException(nameof(value));
}
_position = Seek(value, SeekOrigin.Begin);
}
}
}
}

View File

@ -225,7 +225,7 @@ namespace Jellyfin.Server.SocketSharp
if (starts_with)
{
return StrUtils.StartsWith(ContentType, ct, true);
return ContentType.StartsWith(ct, StringComparison.OrdinalIgnoreCase);
}
return string.Equals(ContentType, ct, StringComparison.OrdinalIgnoreCase);
@ -324,215 +324,6 @@ namespace Jellyfin.Server.SocketSharp
return result.ToString();
}
}
public sealed class HttpPostedFile
{
private string name;
private string content_type;
private Stream stream;
private class ReadSubStream : Stream
{
private Stream s;
private long offset;
private long end;
private long position;
public ReadSubStream(Stream s, long offset, long length)
{
this.s = s;
this.offset = offset;
this.end = offset + length;
position = offset;
}
public override void Flush()
{
}
public override int Read(byte[] buffer, int dest_offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (dest_offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(dest_offset), "< 0");
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), "< 0");
}
int len = buffer.Length;
if (dest_offset > len)
{
throw new ArgumentException("destination offset is beyond array size", nameof(dest_offset));
}
// reordered to avoid possible integer overflow
if (dest_offset > len - count)
{
throw new ArgumentException("Reading would overrun buffer", nameof(count));
}
if (count > end - position)
{
count = (int)(end - position);
}
if (count <= 0)
{
return 0;
}
s.Position = position;
int result = s.Read(buffer, dest_offset, count);
if (result > 0)
{
position += result;
}
else
{
position = end;
}
return result;
}
public override int ReadByte()
{
if (position >= end)
{
return -1;
}
s.Position = position;
int result = s.ReadByte();
if (result < 0)
{
position = end;
}
else
{
position++;
}
return result;
}
public override long Seek(long d, SeekOrigin origin)
{
long real;
switch (origin)
{
case SeekOrigin.Begin:
real = offset + d;
break;
case SeekOrigin.End:
real = end + d;
break;
case SeekOrigin.Current:
real = position + d;
break;
default:
throw new ArgumentException("Unknown SeekOrigin value", nameof(origin));
}
long virt = real - offset;
if (virt < 0 || virt > Length)
{
throw new ArgumentException("Invalid position", nameof(d));
}
position = s.Seek(real, SeekOrigin.Begin);
return position;
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => false;
public override long Length => end - offset;
public override long Position
{
get => position - offset;
set
{
if (value > Length)
{
throw new ArgumentOutOfRangeException(nameof(value));
}
position = Seek(value, SeekOrigin.Begin);
}
}
}
internal HttpPostedFile(string name, string content_type, Stream base_stream, long offset, long length)
{
this.name = name;
this.content_type = content_type;
this.stream = new ReadSubStream(base_stream, offset, length);
}
public string ContentType => content_type;
public int ContentLength => (int)stream.Length;
public string FileName => name;
public Stream InputStream => stream;
}
internal static class StrUtils
{
public static bool StartsWith(string str1, string str2, bool ignore_case)
{
if (string.IsNullOrEmpty(str1))
{
return false;
}
var comparison = ignore_case ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal;
return str1.IndexOf(str2, comparison) == 0;
}
public static bool EndsWith(string str1, string str2, bool ignore_case)
{
int l2 = str2.Length;
if (l2 == 0)
{
return true;
}
int l1 = str1.Length;
if (l2 > l1)
{
return false;
}
var comparison = ignore_case ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal;
return str1.IndexOf(str2, comparison) == str1.Length - str2.Length - 1;
}
}
private class HttpMultipart
{
@ -606,12 +397,12 @@ namespace Jellyfin.Server.SocketSharp
string header;
while ((header = ReadHeaders()) != null)
{
if (StrUtils.StartsWith(header, "Content-Disposition:", true))
if (header.StartsWith("Content-Disposition:", StringComparison.OrdinalIgnoreCase))
{
elem.Name = GetContentDispositionAttribute(header, "name");
elem.Filename = StripPath(GetContentDispositionAttributeWithEncoding(header, "filename"));
}
else if (StrUtils.StartsWith(header, "Content-Type:", true))
else if (header.StartsWith("Content-Type:", StringComparison.OrdinalIgnoreCase))
{
elem.ContentType = header.Substring("Content-Type:".Length).Trim();
elem.Encoding = GetEncoding(elem.ContentType);
@ -730,13 +521,14 @@ namespace Jellyfin.Server.SocketSharp
return false;
}
if (!StrUtils.EndsWith(line, boundary, false))
if (!line.EndsWith(boundary, StringComparison.Ordinal))
{
return true;
}
}
catch
{
}
return false;

View File

@ -44,10 +44,11 @@ namespace Jellyfin.Server.SocketSharp
socket.OnMessage += OnSocketMessage;
socket.OnClose += OnSocketClose;
socket.OnError += OnSocketError;
WebSocket.ConnectAsServer();
}
public Task ConnectAsServerAsync()
=> WebSocket.ConnectAsServer();
public Task StartReceive()
{
return _taskCompletionSource.Task;
@ -133,7 +134,7 @@ namespace Jellyfin.Server.SocketSharp
_cancellationTokenSource.Cancel();
WebSocket.Close();
WebSocket.CloseAsync().GetAwaiter().GetResult();
}
_disposed = true;

View File

@ -69,7 +69,7 @@ namespace Jellyfin.Server.SocketSharp
{
if (_listener == null)
{
_listener = new HttpListener(_logger, _cryptoProvider, _socketFactory, _networkManager, _streamHelper, _fileSystem, _environment);
_listener = new HttpListener(_logger, _cryptoProvider, _socketFactory, _streamHelper, _fileSystem, _environment);
}
_listener.EnableDualMode = _enableDualMode;
@ -79,22 +79,14 @@ namespace Jellyfin.Server.SocketSharp
_listener.LoadCert(_certificate);
}
foreach (var prefix in urlPrefixes)
{
_logger.LogInformation("Adding HttpListener prefix " + prefix);
_listener.Prefixes.Add(prefix);
}
_logger.LogInformation("Adding HttpListener prefixes {Prefixes}", urlPrefixes);
_listener.Prefixes.AddRange(urlPrefixes);
_listener.OnContext = ProcessContext;
_listener.OnContext = async c => await InitTask(c, _disposeCancellationToken).ConfigureAwait(false);
_listener.Start();
}
private void ProcessContext(HttpListenerContext context)
{
_ = Task.Run(async () => await InitTask(context, _disposeCancellationToken).ConfigureAwait(false));
}
private static void LogRequest(ILogger logger, HttpListenerRequest request)
{
var url = request.Url.ToString();
@ -151,10 +143,7 @@ namespace Jellyfin.Server.SocketSharp
Endpoint = endpoint
};
if (WebSocketConnecting != null)
{
WebSocketConnecting(connectingArgs);
}
WebSocketConnecting?.Invoke(connectingArgs);
if (connectingArgs.AllowConnection)
{
@ -165,6 +154,7 @@ namespace Jellyfin.Server.SocketSharp
if (WebSocketConnected != null)
{
var socket = new SharpWebSocket(webSocketContext.WebSocket, _logger);
await socket.ConnectAsServerAsync().ConfigureAwait(false);
WebSocketConnected(new WebSocketConnectEventArgs
{
@ -174,7 +164,7 @@ namespace Jellyfin.Server.SocketSharp
Endpoint = endpoint
});
await ReceiveWebSocket(ctx, socket).ConfigureAwait(false);
await ReceiveWebSocketAsync(ctx, socket).ConfigureAwait(false);
}
}
else
@ -192,7 +182,7 @@ namespace Jellyfin.Server.SocketSharp
}
}
private async Task ReceiveWebSocket(HttpListenerContext ctx, SharpWebSocket socket)
private async Task ReceiveWebSocketAsync(HttpListenerContext ctx, SharpWebSocket socket)
{
try
{

View File

@ -28,30 +28,6 @@ namespace Jellyfin.Server.SocketSharp
// HandlerFactoryPath = GetHandlerPathIfAny(UrlPrefixes[0]);
}
private static string GetHandlerPathIfAny(string listenerUrl)
{
if (listenerUrl == null)
{
return null;
}
var pos = listenerUrl.IndexOf("://", StringComparison.OrdinalIgnoreCase);
if (pos == -1)
{
return null;
}
var startHostUrl = listenerUrl.Substring(pos + "://".Length);
var endPos = startHostUrl.IndexOf('/', StringComparison.Ordinal);
if (endPos == -1)
{
return null;
}
var endHostUrl = startHostUrl.Substring(endPos + 1);
return string.IsNullOrEmpty(endHostUrl) ? null : endHostUrl.TrimEnd('/');
}
public HttpListenerRequest HttpRequest => request;
public object OriginalRequest => request;
@ -102,7 +78,7 @@ namespace Jellyfin.Server.SocketSharp
name = name.Trim(HttpTrimCharacters);
// First, check for correctly formed multi-line value
// Second, check for absenece of CTL characters
// Second, check for absence of CTL characters
int crlf = 0;
for (int i = 0; i < name.Length; ++i)
{
@ -231,8 +207,15 @@ namespace Jellyfin.Server.SocketSharp
{
foreach (var acceptsType in acceptContentTypes)
{
var contentType = HttpResultFactory.GetRealContentType(acceptsType);
acceptsAnything = acceptsAnything || contentType == "*/*";
// TODO: @bond move to Span when Span.Split lands
// https://github.com/dotnet/corefx/issues/26528
var contentType = acceptsType?.Split(';')[0].Trim();
acceptsAnything = contentType.Equals("*/*", StringComparison.OrdinalIgnoreCase);
if (acceptsAnything)
{
break;
}
}
if (acceptsAnything)
@ -241,7 +224,7 @@ namespace Jellyfin.Server.SocketSharp
{
return defaultContentType;
}
else if (serverDefaultContentType != null)
else
{
return serverDefaultContentType;
}
@ -284,11 +267,11 @@ namespace Jellyfin.Server.SocketSharp
private static string GetQueryStringContentType(IRequest httpReq)
{
var format = httpReq.QueryString["format"];
ReadOnlySpan<char> format = httpReq.QueryString["format"];
if (format == null)
{
const int formatMaxLength = 4;
var pi = httpReq.PathInfo;
ReadOnlySpan<char> pi = httpReq.PathInfo;
if (pi == null || pi.Length <= formatMaxLength)
{
return null;
@ -296,7 +279,7 @@ namespace Jellyfin.Server.SocketSharp
if (pi[0] == '/')
{
pi = pi.Substring(1);
pi = pi.Slice(1);
}
format = LeftPart(pi, '/');
@ -330,6 +313,17 @@ namespace Jellyfin.Server.SocketSharp
return pos == -1 ? strVal : strVal.Substring(0, pos);
}
public static ReadOnlySpan<char> LeftPart(ReadOnlySpan<char> strVal, char needle)
{
if (strVal == null)
{
return null;
}
var pos = strVal.IndexOf(needle);
return pos == -1 ? strVal : strVal.Slice(0, pos);
}
public static string HandlerFactoryPath;
private string pathInfo;
@ -341,7 +335,7 @@ namespace Jellyfin.Server.SocketSharp
{
var mode = HandlerFactoryPath;
var pos = request.RawUrl.IndexOf("?", StringComparison.Ordinal);
var pos = request.RawUrl.IndexOf('?', StringComparison.Ordinal);
if (pos != -1)
{
var path = request.RawUrl.Substring(0, pos);
@ -525,10 +519,13 @@ namespace Jellyfin.Server.SocketSharp
public static string NormalizePathInfo(string pathInfo, string handlerPath)
{
var trimmed = pathInfo.TrimStart('/');
if (handlerPath != null && trimmed.StartsWith(handlerPath, StringComparison.OrdinalIgnoreCase))
if (handlerPath != null)
{
return trimmed.Substring(handlerPath.Length);
var trimmed = pathInfo.TrimStart('/');
if (trimmed.StartsWith(handlerPath, StringComparison.OrdinalIgnoreCase))
{
return trimmed.Substring(handlerPath.Length);
}
}
return pathInfo;

View File

@ -55,6 +55,41 @@ namespace Jellyfin.Server.SocketSharp
public QueryParamCollection Headers => _response.Headers;
private static string AsHeaderValue(Cookie cookie)
{
DateTime defaultExpires = DateTime.MinValue;
var path = cookie.Expires == defaultExpires
? "/"
: cookie.Path ?? "/";
var sb = new StringBuilder();
sb.Append($"{cookie.Name}={cookie.Value};path={path}");
if (cookie.Expires != defaultExpires)
{
sb.Append($";expires={cookie.Expires:R}");
}
if (!string.IsNullOrEmpty(cookie.Domain))
{
sb.Append($";domain={cookie.Domain}");
}
if (cookie.Secure)
{
sb.Append(";Secure");
}
if (cookie.HttpOnly)
{
sb.Append(";HttpOnly");
}
return sb.ToString();
}
public void AddHeader(string name, string value)
{
if (string.Equals(name, "Content-Type", StringComparison.OrdinalIgnoreCase))
@ -126,41 +161,6 @@ namespace Jellyfin.Server.SocketSharp
_response.Headers.Add("Set-Cookie", cookieStr);
}
public static string AsHeaderValue(Cookie cookie)
{
var defaultExpires = DateTime.MinValue;
var path = cookie.Expires == defaultExpires
? "/"
: cookie.Path ?? "/";
var sb = new StringBuilder();
sb.Append($"{cookie.Name}={cookie.Value};path={path}");
if (cookie.Expires != defaultExpires)
{
sb.Append($";expires={cookie.Expires:R}");
}
if (!string.IsNullOrEmpty(cookie.Domain))
{
sb.Append($";domain={cookie.Domain}");
}
if (cookie.Secure)
{
sb.Append(";Secure");
}
if (cookie.HttpOnly)
{
sb.Append(";HttpOnly");
}
return sb.ToString();
}
public bool SendChunked
{
get => _response.SendChunked;

View File

@ -9,6 +9,7 @@ using MediaBrowser.Controller.Net;
using MediaBrowser.Controller.Session;
using MediaBrowser.Model.Entities;
using MediaBrowser.Model.Services;
using MediaBrowser.Model.Querying;
using Microsoft.Extensions.Logging;
namespace MediaBrowser.Api
@ -118,8 +119,7 @@ namespace MediaBrowser.Api
{
var options = new DtoOptions();
var hasFields = request as IHasItemFields;
if (hasFields != null)
if (request is IHasItemFields hasFields)
{
options.Fields = hasFields.GetItemFields();
}
@ -133,9 +133,11 @@ namespace MediaBrowser.Api
client.IndexOf("media center", StringComparison.OrdinalIgnoreCase) != -1 ||
client.IndexOf("classic", StringComparison.OrdinalIgnoreCase) != -1)
{
var list = options.Fields.ToList();
list.Add(Model.Querying.ItemFields.RecursiveItemCount);
options.Fields = list.ToArray();
int oldLen = options.Fields.Length;
var arr = new ItemFields[oldLen + 1];
options.Fields.CopyTo(arr, 0);
arr[oldLen] = Model.Querying.ItemFields.RecursiveItemCount;
options.Fields = arr;
}
if (client.IndexOf("kodi", StringComparison.OrdinalIgnoreCase) != -1 ||
@ -146,9 +148,12 @@ namespace MediaBrowser.Api
client.IndexOf("samsung", StringComparison.OrdinalIgnoreCase) != -1 ||
client.IndexOf("androidtv", StringComparison.OrdinalIgnoreCase) != -1)
{
var list = options.Fields.ToList();
list.Add(Model.Querying.ItemFields.ChildCount);
options.Fields = list.ToArray();
int oldLen = options.Fields.Length;
var arr = new ItemFields[oldLen + 1];
options.Fields.CopyTo(arr, 0);
arr[oldLen] = Model.Querying.ItemFields.ChildCount;
options.Fields = arr;
}
}
@ -167,7 +172,16 @@ namespace MediaBrowser.Api
if (!string.IsNullOrWhiteSpace(hasDtoOptions.EnableImageTypes))
{
options.ImageTypes = (hasDtoOptions.EnableImageTypes ?? string.Empty).Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).Select(v => (ImageType)Enum.Parse(typeof(ImageType), v, true)).ToArray();
if (string.IsNullOrEmpty(hasDtoOptions.EnableImageTypes))
{
options.ImageTypes = Array.Empty<ImageType>();
}
else
{
options.ImageTypes = hasDtoOptions.EnableImageTypes.Split(new [] { ',' }, StringSplitOptions.RemoveEmptyEntries)
.Select(v => (ImageType)Enum.Parse(typeof(ImageType), v, true))
.ToArray();
}
}
}

View File

@ -197,16 +197,6 @@ namespace MediaBrowser.Api.ScheduledTasks
throw new ResourceNotFoundException("Task not found");
}
if (string.Equals(task.ScheduledTask.Key, "SystemUpdateTask", StringComparison.OrdinalIgnoreCase))
{
// This is a hack for now just to get the update application function to work when auto-update is disabled
if (!_config.Configuration.EnableAutoUpdate)
{
_config.Configuration.EnableAutoUpdate = true;
_config.SaveConfiguration();
}
}
TaskManager.Execute(task, new TaskOptions());
}
@ -238,16 +228,14 @@ namespace MediaBrowser.Api.ScheduledTasks
// https://code.google.com/p/servicestack/source/browse/trunk/Common/ServiceStack.Text/ServiceStack.Text/Controller/PathInfo.cs
var id = GetPathValue(1);
var task = TaskManager.ScheduledTasks.FirstOrDefault(i => string.Equals(i.Id, id));
var task = TaskManager.ScheduledTasks.FirstOrDefault(i => string.Equals(i.Id, id, StringComparison.Ordinal));
if (task == null)
{
throw new ResourceNotFoundException("Task not found");
}
var triggerInfos = request;
task.Triggers = triggerInfos.ToArray();
task.Triggers = request.ToArray();
}
}
}

View File

@ -112,7 +112,7 @@ namespace MediaBrowser.Api.UserLibrary
return ToOptimizedResult(result);
}
protected override QueryResult<Tuple<BaseItem, ItemCounts>> GetItems(GetItemsByName request, InternalItemsQuery query)
protected override QueryResult<(BaseItem, ItemCounts)> GetItems(GetItemsByName request, InternalItemsQuery query)
{
if (request is GetAlbumArtists)
{

View File

@ -209,9 +209,9 @@ namespace MediaBrowser.Api.UserLibrary
};
}
protected virtual QueryResult<Tuple<BaseItem, ItemCounts>> GetItems(GetItemsByName request, InternalItemsQuery query)
protected virtual QueryResult<(BaseItem, ItemCounts)> GetItems(GetItemsByName request, InternalItemsQuery query)
{
return new QueryResult<Tuple<BaseItem, ItemCounts>>();
return new QueryResult<(BaseItem, ItemCounts)>();
}
private void SetItemCounts(BaseItemDto dto, ItemCounts counts)

View File

@ -396,14 +396,12 @@ namespace MediaBrowser.Api.UserLibrary
public VideoType[] GetVideoTypes()
{
var val = VideoTypes;
if (string.IsNullOrEmpty(val))
if (string.IsNullOrEmpty(VideoTypes))
{
return new VideoType[] { };
return Array.Empty<VideoType>();
}
return val.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries).Select(v => (VideoType)Enum.Parse(typeof(VideoType), v, true)).ToArray();
return VideoTypes.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries).Select(v => (VideoType)Enum.Parse(typeof(VideoType), v, true)).ToArray();
}
/// <summary>

View File

@ -92,7 +92,7 @@ namespace MediaBrowser.Api.UserLibrary
return ToOptimizedResult(result);
}
protected override QueryResult<Tuple<BaseItem, ItemCounts>> GetItems(GetItemsByName request, InternalItemsQuery query)
protected override QueryResult<(BaseItem, ItemCounts)> GetItems(GetItemsByName request, InternalItemsQuery query)
{
var viewType = GetParentItemViewType(request);

View File

@ -12,6 +12,7 @@ using MediaBrowser.Model.Entities;
using MediaBrowser.Model.Globalization;
using MediaBrowser.Model.Querying;
using MediaBrowser.Model.Services;
using Microsoft.Extensions.Logging;
namespace MediaBrowser.Api.UserLibrary
{
@ -90,7 +91,7 @@ namespace MediaBrowser.Api.UserLibrary
var options = GetDtoOptions(_authContext, request);
var ancestorIds = new List<Guid>();
var ancestorIds = Array.Empty<Guid>();
var excludeFolderIds = user.Configuration.LatestItemsExcludes;
if (parentIdGuid.Equals(Guid.Empty) && excludeFolderIds.Length > 0)
@ -99,12 +100,12 @@ namespace MediaBrowser.Api.UserLibrary
.Where(i => i is Folder)
.Where(i => !excludeFolderIds.Contains(i.Id.ToString("N")))
.Select(i => i.Id)
.ToList();
.ToArray();
}
var itemsResult = _libraryManager.GetItemsResult(new InternalItemsQuery(user)
{
OrderBy = new[] { ItemSortBy.DatePlayed }.Select(i => new ValueTuple<string, SortOrder>(i, SortOrder.Descending)).ToArray(),
OrderBy = new[] { (ItemSortBy.DatePlayed, SortOrder.Descending) },
IsResumable = true,
StartIndex = request.StartIndex,
Limit = request.Limit,
@ -115,7 +116,7 @@ namespace MediaBrowser.Api.UserLibrary
IsVirtualItem = false,
CollapseBoxSetItems = false,
EnableTotalRecordCount = request.EnableTotalRecordCount,
AncestorIds = ancestorIds.ToArray(),
AncestorIds = ancestorIds,
IncludeItemTypes = request.GetIncludeItemTypes(),
ExcludeItemTypes = request.GetExcludeItemTypes(),
SearchTerm = request.SearchTerm
@ -155,7 +156,7 @@ namespace MediaBrowser.Api.UserLibrary
/// <param name="request">The request.</param>
private QueryResult<BaseItemDto> GetItems(GetItems request)
{
var user = !request.UserId.Equals(Guid.Empty) ? _userManager.GetUserById(request.UserId) : null;
var user = request.UserId == Guid.Empty ? null : _userManager.GetUserById(request.UserId);
var dtoOptions = GetDtoOptions(_authContext, request);
@ -190,11 +191,8 @@ namespace MediaBrowser.Api.UserLibrary
/// </summary>
private QueryResult<BaseItem> GetQueryResult(GetItems request, DtoOptions dtoOptions, User user)
{
if (string.Equals(request.IncludeItemTypes, "Playlist", StringComparison.OrdinalIgnoreCase))
{
request.ParentId = null;
}
else if (string.Equals(request.IncludeItemTypes, "BoxSet", StringComparison.OrdinalIgnoreCase))
if (string.Equals(request.IncludeItemTypes, "Playlist", StringComparison.OrdinalIgnoreCase)
|| string.Equals(request.IncludeItemTypes, "BoxSet", StringComparison.OrdinalIgnoreCase))
{
request.ParentId = null;
}
@ -227,6 +225,16 @@ namespace MediaBrowser.Api.UserLibrary
request.IncludeItemTypes = "Playlist";
}
if (!user.Policy.EnableAllFolders && !user.Policy.EnabledFolders.Any(i => new Guid(i) == item.Id))
{
Logger.LogWarning("{UserName} is not permitted to access Library {ItemName}.", user.Name, item.Name);
return new QueryResult<BaseItem>
{
Items = Array.Empty<BaseItem>(),
TotalRecordCount = 0
};
}
if (request.Recursive || !string.IsNullOrEmpty(request.Ids) || user == null)
{
return folder.GetItems(GetItemsQuery(request, dtoOptions, user));

View File

@ -83,7 +83,7 @@ namespace MediaBrowser.Api.UserLibrary
return ToOptimizedResult(result);
}
protected override QueryResult<Tuple<BaseItem, ItemCounts>> GetItems(GetItemsByName request, InternalItemsQuery query)
protected override QueryResult<(BaseItem, ItemCounts)> GetItems(GetItemsByName request, InternalItemsQuery query)
{
return LibraryManager.GetMusicGenres(query);
}

View File

@ -101,7 +101,7 @@ namespace MediaBrowser.Api.UserLibrary
throw new NotImplementedException();
}
protected override QueryResult<Tuple<BaseItem, ItemCounts>> GetItems(GetItemsByName request, InternalItemsQuery query)
protected override QueryResult<(BaseItem, ItemCounts)> GetItems(GetItemsByName request, InternalItemsQuery query)
{
var items = LibraryManager.GetPeopleItems(new InternalPeopleQuery
{
@ -109,10 +109,10 @@ namespace MediaBrowser.Api.UserLibrary
NameContains = query.NameContains ?? query.SearchTerm
});
return new QueryResult<Tuple<BaseItem, ItemCounts>>
return new QueryResult<(BaseItem, ItemCounts)>
{
TotalRecordCount = items.Count,
Items = items.Take(query.Limit ?? int.MaxValue).Select(i => new Tuple<BaseItem, ItemCounts>(i, new ItemCounts())).ToArray()
Items = items.Take(query.Limit ?? int.MaxValue).Select(i => (i as BaseItem, new ItemCounts())).ToArray()
};
}

View File

@ -91,7 +91,7 @@ namespace MediaBrowser.Api.UserLibrary
return ToOptimizedResult(result);
}
protected override QueryResult<Tuple<BaseItem, ItemCounts>> GetItems(GetItemsByName request, InternalItemsQuery query)
protected override QueryResult<(BaseItem, ItemCounts)> GetItems(GetItemsByName request, InternalItemsQuery query)
{
return LibraryManager.GetStudios(query);
}

View File

@ -41,12 +41,6 @@ namespace MediaBrowser.Common.Configuration
/// <value>The plugin configurations path.</value>
string PluginConfigurationsPath { get; }
/// <summary>
/// Gets the path to where temporary update files will be stored
/// </summary>
/// <value>The plugin configurations path.</value>
string TempUpdatePath { get; }
/// <summary>
/// Gets the path to the log directory
/// </summary>

View File

@ -72,12 +72,6 @@ namespace MediaBrowser.Common
/// <value>The application user agent.</value>
string ApplicationUserAgent { get; }
/// <summary>
/// Gets or sets a value indicating whether this instance can self update.
/// </summary>
/// <value><c>true</c> if this instance can self update; otherwise, <c>false</c>.</value>
bool CanSelfUpdate { get; }
/// <summary>
/// Gets the exports.
/// </summary>
@ -86,12 +80,6 @@ namespace MediaBrowser.Common
/// <returns>IEnumerable{``0}.</returns>
IEnumerable<T> GetExports<T>(bool manageLifetime = true);
/// <summary>
/// Updates the application.
/// </summary>
/// <returns>Task.</returns>
Task UpdateApplication(PackageVersionInfo package, CancellationToken cancellationToken, IProgress<double> progress);
/// <summary>
/// Resolves this instance.
/// </summary>

View File

@ -193,7 +193,7 @@ namespace MediaBrowser.Controller.Library
/// <summary>
/// Updates the item.
/// </summary>
void UpdateItems(List<BaseItem> items, BaseItem parent, ItemUpdateType updateReason, CancellationToken cancellationToken);
void UpdateItems(IEnumerable<BaseItem> items, BaseItem parent, ItemUpdateType updateReason, CancellationToken cancellationToken);
void UpdateItem(BaseItem item, BaseItem parent, ItemUpdateType updateReason, CancellationToken cancellationToken);
/// <summary>
@ -520,12 +520,12 @@ namespace MediaBrowser.Controller.Library
void UpdateMediaPath(string virtualFolderName, MediaPathInfo path);
void RemoveMediaPath(string virtualFolderName, string path);
QueryResult<Tuple<BaseItem, ItemCounts>> GetGenres(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetMusicGenres(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetStudios(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetArtists(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetAlbumArtists(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetAllArtists(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetGenres(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetMusicGenres(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetStudios(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetArtists(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetAlbumArtists(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetAllArtists(InternalItemsQuery query);
int GetCount(InternalItemsQuery query);

View File

@ -32,7 +32,7 @@ namespace MediaBrowser.Controller.Persistence
/// </summary>
/// <param name="items">The items.</param>
/// <param name="cancellationToken">The cancellation token.</param>
void SaveItems(List<BaseItem> items, CancellationToken cancellationToken);
void SaveItems(IEnumerable<BaseItem> items, CancellationToken cancellationToken);
void SaveImages(BaseItem item);
@ -141,12 +141,12 @@ namespace MediaBrowser.Controller.Persistence
int GetCount(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetGenres(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetMusicGenres(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetStudios(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetArtists(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetAlbumArtists(InternalItemsQuery query);
QueryResult<Tuple<BaseItem, ItemCounts>> GetAllArtists(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetGenres(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetMusicGenres(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetStudios(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetArtists(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetAlbumArtists(InternalItemsQuery query);
QueryResult<(BaseItem, ItemCounts)> GetAllArtists(InternalItemsQuery query);
List<string> GetMusicGenreNames();
List<string> GetStudioNames();

View File

@ -7,20 +7,6 @@ namespace MediaBrowser.Model.Configuration
/// </summary>
public class BaseApplicationConfiguration
{
// TODO: @bond Remove?
/// <summary>
/// Gets or sets a value indicating whether [enable debug level logging].
/// </summary>
/// <value><c>true</c> if [enable debug level logging]; otherwise, <c>false</c>.</value>
public bool EnableDebugLevelLogging { get; set; }
/// <summary>
/// Enable automatically and silently updating of the application
/// </summary>
/// <value><c>true</c> if [enable auto update]; otherwise, <c>false</c>.</value>
public bool EnableAutoUpdate { get; set; }
// TODO: @bond Remove?
/// <summary>
/// The number of days we should retain log files
/// </summary>
@ -44,7 +30,6 @@ namespace MediaBrowser.Model.Configuration
/// </summary>
public BaseApplicationConfiguration()
{
EnableAutoUpdate = true;
LogFileRetentionDays = 3;
}
}

View File

@ -14,6 +14,14 @@ namespace MediaBrowser.Model.Serialization
/// <exception cref="ArgumentNullException">obj</exception>
void SerializeToStream(object obj, Stream stream);
/// <summary>
/// Serializes to stream.
/// </summary>
/// <param name="obj">The obj.</param>
/// <param name="stream">The stream.</param>
/// <exception cref="ArgumentNullException">obj</exception>
void SerializeToStream<T>(T obj, Stream stream);
/// <summary>
/// Serializes to file.
/// </summary>

View File

@ -60,12 +60,6 @@ namespace MediaBrowser.Model.System
/// <value><c>true</c> if this instance can self restart; otherwise, <c>false</c>.</value>
public bool CanSelfRestart { get; set; }
/// <summary>
/// Gets or sets a value indicating whether this instance can self update.
/// </summary>
/// <value><c>true</c> if this instance can self update; otherwise, <c>false</c>.</value>
public bool CanSelfUpdate { get; set; }
public bool CanLaunchWebBrowser { get; set; }
/// <summary>

View File

@ -1,402 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
//TODO Fix namespace or replace
namespace Priority_Queue
{
/// <summary>
/// Credit: https://github.com/BlueRaja/High-Speed-Priority-Queue-for-C-Sharp
/// A copy of StablePriorityQueue which also has generic priority-type
/// </summary>
/// <typeparam name="TItem">The values in the queue. Must extend the GenericPriorityQueue class</typeparam>
/// <typeparam name="TPriority">The priority-type. Must extend IComparable&lt;TPriority&gt;</typeparam>
public sealed class GenericPriorityQueue<TItem, TPriority> : IFixedSizePriorityQueue<TItem, TPriority>
where TItem : GenericPriorityQueueNode<TPriority>
where TPriority : IComparable<TPriority>
{
private int _numNodes;
private TItem[] _nodes;
private long _numNodesEverEnqueued;
/// <summary>
/// Instantiate a new Priority Queue
/// </summary>
/// <param name="maxNodes">The max nodes ever allowed to be enqueued (going over this will cause undefined behavior)</param>
public GenericPriorityQueue(int maxNodes)
{
#if DEBUG
if (maxNodes <= 0)
{
throw new InvalidOperationException("New queue size cannot be smaller than 1");
}
#endif
_numNodes = 0;
_nodes = new TItem[maxNodes + 1];
_numNodesEverEnqueued = 0;
}
/// <summary>
/// Returns the number of nodes in the queue.
/// O(1)
/// </summary>
public int Count => _numNodes;
/// <summary>
/// Returns the maximum number of items that can be enqueued at once in this queue. Once you hit this number (ie. once Count == MaxSize),
/// attempting to enqueue another item will cause undefined behavior. O(1)
/// </summary>
public int MaxSize => _nodes.Length - 1;
/// <summary>
/// Removes every node from the queue.
/// O(n) (So, don't do this often!)
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Clear()
{
Array.Clear(_nodes, 1, _numNodes);
_numNodes = 0;
}
/// <summary>
/// Returns (in O(1)!) whether the given node is in the queue. O(1)
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool Contains(TItem node)
{
#if DEBUG
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
if (node.QueueIndex < 0 || node.QueueIndex >= _nodes.Length)
{
throw new InvalidOperationException("node.QueueIndex has been corrupted. Did you change it manually? Or add this node to another queue?");
}
#endif
return (_nodes[node.QueueIndex] == node);
}
/// <summary>
/// Enqueue a node to the priority queue. Lower values are placed in front. Ties are broken by first-in-first-out.
/// If the queue is full, the result is undefined.
/// If the node is already enqueued, the result is undefined.
/// O(log n)
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Enqueue(TItem node, TPriority priority)
{
#if DEBUG
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
if (_numNodes >= _nodes.Length - 1)
{
throw new InvalidOperationException("Queue is full - node cannot be added: " + node);
}
if (Contains(node))
{
throw new InvalidOperationException("Node is already enqueued: " + node);
}
#endif
node.Priority = priority;
_numNodes++;
_nodes[_numNodes] = node;
node.QueueIndex = _numNodes;
node.InsertionIndex = _numNodesEverEnqueued++;
CascadeUp(_nodes[_numNodes]);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void Swap(TItem node1, TItem node2)
{
//Swap the nodes
_nodes[node1.QueueIndex] = node2;
_nodes[node2.QueueIndex] = node1;
//Swap their indicies
int temp = node1.QueueIndex;
node1.QueueIndex = node2.QueueIndex;
node2.QueueIndex = temp;
}
//Performance appears to be slightly better when this is NOT inlined o_O
private void CascadeUp(TItem node)
{
//aka Heapify-up
int parent = node.QueueIndex / 2;
while (parent >= 1)
{
var parentNode = _nodes[parent];
if (HasHigherPriority(parentNode, node))
break;
//Node has lower priority value, so move it up the heap
Swap(node, parentNode); //For some reason, this is faster with Swap() rather than (less..?) individual operations, like in CascadeDown()
parent = node.QueueIndex / 2;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void CascadeDown(TItem node)
{
//aka Heapify-down
TItem newParent;
int finalQueueIndex = node.QueueIndex;
while (true)
{
newParent = node;
int childLeftIndex = 2 * finalQueueIndex;
//Check if the left-child is higher-priority than the current node
if (childLeftIndex > _numNodes)
{
//This could be placed outside the loop, but then we'd have to check newParent != node twice
node.QueueIndex = finalQueueIndex;
_nodes[finalQueueIndex] = node;
break;
}
var childLeft = _nodes[childLeftIndex];
if (HasHigherPriority(childLeft, newParent))
{
newParent = childLeft;
}
//Check if the right-child is higher-priority than either the current node or the left child
int childRightIndex = childLeftIndex + 1;
if (childRightIndex <= _numNodes)
{
var childRight = _nodes[childRightIndex];
if (HasHigherPriority(childRight, newParent))
{
newParent = childRight;
}
}
//If either of the children has higher (smaller) priority, swap and continue cascading
if (newParent != node)
{
//Move new parent to its new index. node will be moved once, at the end
//Doing it this way is one less assignment operation than calling Swap()
_nodes[finalQueueIndex] = newParent;
int temp = newParent.QueueIndex;
newParent.QueueIndex = finalQueueIndex;
finalQueueIndex = temp;
}
else
{
//See note above
node.QueueIndex = finalQueueIndex;
_nodes[finalQueueIndex] = node;
break;
}
}
}
/// <summary>
/// Returns true if 'higher' has higher priority than 'lower', false otherwise.
/// Note that calling HasHigherPriority(node, node) (ie. both arguments the same node) will return false
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool HasHigherPriority(TItem higher, TItem lower)
{
var cmp = higher.Priority.CompareTo(lower.Priority);
return (cmp < 0 || (cmp == 0 && higher.InsertionIndex < lower.InsertionIndex));
}
/// <summary>
/// Removes the head of the queue (node with minimum priority; ties are broken by order of insertion), and returns it.
/// If queue is empty, result is undefined
/// O(log n)
/// </summary>
public bool TryDequeue(out TItem item)
{
if (_numNodes <= 0)
{
item = default(TItem);
return false;
}
#if DEBUG
if (!IsValidQueue())
{
throw new InvalidOperationException("Queue has been corrupted (Did you update a node priority manually instead of calling UpdatePriority()?" +
"Or add the same node to two different queues?)");
}
#endif
var returnMe = _nodes[1];
Remove(returnMe);
item = returnMe;
return true;
}
/// <summary>
/// Resize the queue so it can accept more nodes. All currently enqueued nodes are remain.
/// Attempting to decrease the queue size to a size too small to hold the existing nodes results in undefined behavior
/// O(n)
/// </summary>
public void Resize(int maxNodes)
{
#if DEBUG
if (maxNodes <= 0)
{
throw new InvalidOperationException("Queue size cannot be smaller than 1");
}
if (maxNodes < _numNodes)
{
throw new InvalidOperationException("Called Resize(" + maxNodes + "), but current queue contains " + _numNodes + " nodes");
}
#endif
TItem[] newArray = new TItem[maxNodes + 1];
int highestIndexToCopy = Math.Min(maxNodes, _numNodes);
for (int i = 1; i <= highestIndexToCopy; i++)
{
newArray[i] = _nodes[i];
}
_nodes = newArray;
}
/// <summary>
/// Returns the head of the queue, without removing it (use Dequeue() for that).
/// If the queue is empty, behavior is undefined.
/// O(1)
/// </summary>
public TItem First
{
get
{
#if DEBUG
if (_numNodes <= 0)
{
throw new InvalidOperationException("Cannot call .First on an empty queue");
}
#endif
return _nodes[1];
}
}
/// <summary>
/// This method must be called on a node every time its priority changes while it is in the queue.
/// <b>Forgetting to call this method will result in a corrupted queue!</b>
/// Calling this method on a node not in the queue results in undefined behavior
/// O(log n)
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void UpdatePriority(TItem node, TPriority priority)
{
#if DEBUG
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
if (!Contains(node))
{
throw new InvalidOperationException("Cannot call UpdatePriority() on a node which is not enqueued: " + node);
}
#endif
node.Priority = priority;
OnNodeUpdated(node);
}
private void OnNodeUpdated(TItem node)
{
//Bubble the updated node up or down as appropriate
int parentIndex = node.QueueIndex / 2;
var parentNode = _nodes[parentIndex];
if (parentIndex > 0 && HasHigherPriority(node, parentNode))
{
CascadeUp(node);
}
else
{
//Note that CascadeDown will be called if parentNode == node (that is, node is the root)
CascadeDown(node);
}
}
/// <summary>
/// Removes a node from the queue. The node does not need to be the head of the queue.
/// If the node is not in the queue, the result is undefined. If unsure, check Contains() first
/// O(log n)
/// </summary>
public void Remove(TItem node)
{
#if DEBUG
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
if (!Contains(node))
{
throw new InvalidOperationException("Cannot call Remove() on a node which is not enqueued: " + node);
}
#endif
//If the node is already the last node, we can remove it immediately
if (node.QueueIndex == _numNodes)
{
_nodes[_numNodes] = null;
_numNodes--;
return;
}
//Swap the node with the last node
var formerLastNode = _nodes[_numNodes];
Swap(node, formerLastNode);
_nodes[_numNodes] = null;
_numNodes--;
//Now bubble formerLastNode (which is no longer the last node) up or down as appropriate
OnNodeUpdated(formerLastNode);
}
public IEnumerator<TItem> GetEnumerator()
{
for (int i = 1; i <= _numNodes; i++)
yield return _nodes[i];
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
/// <summary>
/// <b>Should not be called in production code.</b>
/// Checks to make sure the queue is still in a valid state. Used for testing/debugging the queue.
/// </summary>
public bool IsValidQueue()
{
for (int i = 1; i < _nodes.Length; i++)
{
if (_nodes[i] != null)
{
int childLeftIndex = 2 * i;
if (childLeftIndex < _nodes.Length && _nodes[childLeftIndex] != null && HasHigherPriority(_nodes[childLeftIndex], _nodes[i]))
return false;
int childRightIndex = childLeftIndex + 1;
if (childRightIndex < _nodes.Length && _nodes[childRightIndex] != null && HasHigherPriority(_nodes[childRightIndex], _nodes[i]))
return false;
}
}
return true;
}
}
}

View File

@ -1,22 +0,0 @@
namespace Priority_Queue
{
/// Credit: https://github.com/BlueRaja/High-Speed-Priority-Queue-for-C-Sharp
public class GenericPriorityQueueNode<TPriority>
{
/// <summary>
/// The Priority to insert this node at. Must be set BEFORE adding a node to the queue (ideally just once, in the node's constructor).
/// Should not be manually edited once the node has been enqueued - use queue.UpdatePriority() instead
/// </summary>
public TPriority Priority { get; protected internal set; }
/// <summary>
/// Represents the current position in the queue
/// </summary>
public int QueueIndex { get; internal set; }
/// <summary>
/// Represents the order the node was inserted in
/// </summary>
public long InsertionIndex { get; internal set; }
}
}

View File

@ -1,24 +0,0 @@
using System;
namespace Priority_Queue
{
/// <summary>
/// Credit: https://github.com/BlueRaja/High-Speed-Priority-Queue-for-C-Sharp
/// A helper-interface only needed to make writing unit tests a bit easier (hence the 'internal' access modifier)
/// </summary>
internal interface IFixedSizePriorityQueue<TItem, in TPriority> : IPriorityQueue<TItem, TPriority>
where TPriority : IComparable<TPriority>
{
/// <summary>
/// Resize the queue so it can accept more nodes. All currently enqueued nodes are remain.
/// Attempting to decrease the queue size to a size too small to hold the existing nodes results in undefined behavior
/// </summary>
void Resize(int maxNodes);
/// <summary>
/// Returns the maximum number of items that can be enqueued at once in this queue. Once you hit this number (ie. once Count == MaxSize),
/// attempting to enqueue another item will cause undefined behavior.
/// </summary>
int MaxSize { get; }
}
}

View File

@ -1,56 +0,0 @@
using System;
using System.Collections.Generic;
namespace Priority_Queue
{
/// <summary>
/// Credit: https://github.com/BlueRaja/High-Speed-Priority-Queue-for-C-Sharp
/// The IPriorityQueue interface. This is mainly here for purists, and in case I decide to add more implementations later.
/// For speed purposes, it is actually recommended that you *don't* access the priority queue through this interface, since the JIT can
/// (theoretically?) optimize method calls from concrete-types slightly better.
/// </summary>
public interface IPriorityQueue<TItem, in TPriority> : IEnumerable<TItem>
where TPriority : IComparable<TPriority>
{
/// <summary>
/// Enqueue a node to the priority queue. Lower values are placed in front. Ties are broken by first-in-first-out.
/// See implementation for how duplicates are handled.
/// </summary>
void Enqueue(TItem node, TPriority priority);
/// <summary>
/// Removes the head of the queue (node with minimum priority; ties are broken by order of insertion), and returns it.
/// </summary>
bool TryDequeue(out TItem item);
/// <summary>
/// Removes every node from the queue.
/// </summary>
void Clear();
/// <summary>
/// Returns whether the given node is in the queue.
/// </summary>
bool Contains(TItem node);
/// <summary>
/// Removes a node from the queue. The node does not need to be the head of the queue.
/// </summary>
void Remove(TItem node);
/// <summary>
/// Call this method to change the priority of a node.
/// </summary>
void UpdatePriority(TItem node, TPriority priority);
/// <summary>
/// Returns the head of the queue, without removing it (use Dequeue() for that).
/// </summary>
TItem First { get; }
/// <summary>
/// Returns the number of nodes in the queue.
/// </summary>
int Count { get; }
}
}

View File

@ -1,247 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
namespace Priority_Queue
{
/// <summary>
/// Credit: https://github.com/BlueRaja/High-Speed-Priority-Queue-for-C-Sharp
/// A simplified priority queue implementation. Is stable, auto-resizes, and thread-safe, at the cost of being slightly slower than
/// FastPriorityQueue
/// </summary>
/// <typeparam name="TItem">The type to enqueue</typeparam>
/// <typeparam name="TPriority">The priority-type to use for nodes. Must extend IComparable&lt;TPriority&gt;</typeparam>
public class SimplePriorityQueue<TItem, TPriority> : IPriorityQueue<TItem, TPriority>
where TPriority : IComparable<TPriority>
{
private class SimpleNode : GenericPriorityQueueNode<TPriority>
{
public TItem Data { get; private set; }
public SimpleNode(TItem data)
{
Data = data;
}
}
private const int INITIAL_QUEUE_SIZE = 10;
private readonly GenericPriorityQueue<SimpleNode, TPriority> _queue;
public SimplePriorityQueue()
{
_queue = new GenericPriorityQueue<SimpleNode, TPriority>(INITIAL_QUEUE_SIZE);
}
/// <summary>
/// Given an item of type T, returns the exist SimpleNode in the queue
/// </summary>
private SimpleNode GetExistingNode(TItem item)
{
var comparer = EqualityComparer<TItem>.Default;
foreach (var node in _queue)
{
if (comparer.Equals(node.Data, item))
{
return node;
}
}
throw new InvalidOperationException("Item cannot be found in queue: " + item);
}
/// <summary>
/// Returns the number of nodes in the queue.
/// O(1)
/// </summary>
public int Count
{
get
{
lock (_queue)
{
return _queue.Count;
}
}
}
/// <summary>
/// Returns the head of the queue, without removing it (use Dequeue() for that).
/// Throws an exception when the queue is empty.
/// O(1)
/// </summary>
public TItem First
{
get
{
lock (_queue)
{
if (_queue.Count <= 0)
{
throw new InvalidOperationException("Cannot call .First on an empty queue");
}
SimpleNode first = _queue.First;
return (first != null ? first.Data : default(TItem));
}
}
}
/// <summary>
/// Removes every node from the queue.
/// O(n)
/// </summary>
public void Clear()
{
lock (_queue)
{
_queue.Clear();
}
}
/// <summary>
/// Returns whether the given item is in the queue.
/// O(n)
/// </summary>
public bool Contains(TItem item)
{
lock (_queue)
{
var comparer = EqualityComparer<TItem>.Default;
foreach (var node in _queue)
{
if (comparer.Equals(node.Data, item))
{
return true;
}
}
return false;
}
}
/// <summary>
/// Removes the head of the queue (node with minimum priority; ties are broken by order of insertion), and returns it.
/// If queue is empty, throws an exception
/// O(log n)
/// </summary>
public bool TryDequeue(out TItem item)
{
lock (_queue)
{
if (_queue.Count <= 0)
{
item = default(TItem);
return false;
}
if (_queue.TryDequeue(out SimpleNode node))
{
item = node.Data;
return true;
}
item = default(TItem);
return false;
}
}
/// <summary>
/// Enqueue a node to the priority queue. Lower values are placed in front. Ties are broken by first-in-first-out.
/// This queue automatically resizes itself, so there's no concern of the queue becoming 'full'.
/// Duplicates are allowed.
/// O(log n)
/// </summary>
public void Enqueue(TItem item, TPriority priority)
{
lock (_queue)
{
var node = new SimpleNode(item);
if (_queue.Count == _queue.MaxSize)
{
_queue.Resize(_queue.MaxSize * 2 + 1);
}
_queue.Enqueue(node, priority);
}
}
/// <summary>
/// Removes an item from the queue. The item does not need to be the head of the queue.
/// If the item is not in the queue, an exception is thrown. If unsure, check Contains() first.
/// If multiple copies of the item are enqueued, only the first one is removed.
/// O(n)
/// </summary>
public void Remove(TItem item)
{
lock (_queue)
{
try
{
_queue.Remove(GetExistingNode(item));
}
catch (InvalidOperationException ex)
{
throw new InvalidOperationException("Cannot call Remove() on a node which is not enqueued: " + item, ex);
}
}
}
/// <summary>
/// Call this method to change the priority of an item.
/// Calling this method on a item not in the queue will throw an exception.
/// If the item is enqueued multiple times, only the first one will be updated.
/// (If your requirements are complex enough that you need to enqueue the same item multiple times <i>and</i> be able
/// to update all of them, please wrap your items in a wrapper class so they can be distinguished).
/// O(n)
/// </summary>
public void UpdatePriority(TItem item, TPriority priority)
{
lock (_queue)
{
try
{
SimpleNode updateMe = GetExistingNode(item);
_queue.UpdatePriority(updateMe, priority);
}
catch (InvalidOperationException ex)
{
throw new InvalidOperationException("Cannot call UpdatePriority() on a node which is not enqueued: " + item, ex);
}
}
}
public IEnumerator<TItem> GetEnumerator()
{
var queueData = new List<TItem>();
lock (_queue)
{
//Copy to a separate list because we don't want to 'yield return' inside a lock
foreach (var node in _queue)
{
queueData.Add(node.Data);
}
}
return queueData.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
public bool IsValidQueue()
{
lock (_queue)
{
return _queue.IsValidQueue();
}
}
}
/// <summary>
/// A simplified priority queue implementation. Is stable, auto-resizes, and thread-safe, at the cost of being slightly slower than
/// FastPriorityQueue
/// This class is kept here for backwards compatibility. It's recommended you use Simple
/// </summary>
/// <typeparam name="TItem">The type to enqueue</typeparam>
public class SimplePriorityQueue<TItem> : SimplePriorityQueue<TItem, float> { }
}

View File

@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<ItemGroup>
<ProjectReference Include="..\MediaBrowser.Controller\MediaBrowser.Controller.csproj" />
@ -11,6 +11,7 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="OptimizedPriorityQueue" Version="4.2.0" />
<PackageReference Include="PlaylistsNET" Version="1.0.2" />
</ItemGroup>

View File

@ -74,18 +74,20 @@ namespace SocketHttpListener
}
}
private static byte[] readBytes(this Stream stream, byte[] buffer, int offset, int length)
private static async Task<byte[]> ReadBytesAsync(this Stream stream, byte[] buffer, int offset, int length)
{
var len = stream.Read(buffer, offset, length);
var len = await stream.ReadAsync(buffer, offset, length).ConfigureAwait(false);
if (len < 1)
return buffer.SubArray(0, offset);
var tmp = 0;
while (len < length)
{
tmp = stream.Read(buffer, offset + len, length - len);
tmp = await stream.ReadAsync(buffer, offset + len, length - len).ConfigureAwait(false);
if (tmp < 1)
{
break;
}
len += tmp;
}
@ -95,10 +97,9 @@ namespace SocketHttpListener
: buffer;
}
private static bool readBytes(
this Stream stream, byte[] buffer, int offset, int length, Stream dest)
private static async Task<bool> ReadBytesAsync(this Stream stream, byte[] buffer, int offset, int length, Stream dest)
{
var bytes = stream.readBytes(buffer, offset, length);
var bytes = await stream.ReadBytesAsync(buffer, offset, length).ConfigureAwait(false);
var len = bytes.Length;
dest.Write(bytes, 0, len);
@ -109,16 +110,16 @@ namespace SocketHttpListener
#region Internal Methods
internal static byte[] Append(this ushort code, string reason)
internal static async Task<byte[]> AppendAsync(this ushort code, string reason)
{
using (var buffer = new MemoryStream())
{
var tmp = code.ToByteArrayInternally(ByteOrder.Big);
buffer.Write(tmp, 0, 2);
await buffer.WriteAsync(tmp, 0, 2).ConfigureAwait(false);
if (reason != null && reason.Length > 0)
{
tmp = Encoding.UTF8.GetBytes(reason);
buffer.Write(tmp, 0, tmp.Length);
await buffer.WriteAsync(tmp, 0, tmp.Length).ConfigureAwait(false);
}
return buffer.ToArray();
@ -331,12 +332,10 @@ namespace SocketHttpListener
: string.Format("\"{0}\"", value.Replace("\"", "\\\""));
}
internal static byte[] ReadBytes(this Stream stream, int length)
{
return stream.readBytes(new byte[length], 0, length);
}
internal static Task<byte[]> ReadBytesAsync(this Stream stream, int length)
=> stream.ReadBytesAsync(new byte[length], 0, length);
internal static byte[] ReadBytes(this Stream stream, long length, int bufferLength)
internal static async Task<byte[]> ReadBytesAsync(this Stream stream, long length, int bufferLength)
{
using (var result = new MemoryStream())
{
@ -347,7 +346,7 @@ namespace SocketHttpListener
var end = false;
for (long i = 0; i < count; i++)
{
if (!stream.readBytes(buffer, 0, bufferLength, result))
if (!await stream.ReadBytesAsync(buffer, 0, bufferLength, result).ConfigureAwait(false))
{
end = true;
break;
@ -355,26 +354,14 @@ namespace SocketHttpListener
}
if (!end && rem > 0)
stream.readBytes(new byte[rem], 0, rem, result);
{
await stream.ReadBytesAsync(new byte[rem], 0, rem, result).ConfigureAwait(false);
}
return result.ToArray();
}
}
internal static async Task<byte[]> ReadBytesAsync(this Stream stream, int length)
{
var buffer = new byte[length];
var len = await stream.ReadAsync(buffer, 0, length).ConfigureAwait(false);
var bytes = len < 1
? new byte[0]
: len < length
? stream.readBytes(buffer, len, length - len)
: buffer;
return bytes;
}
internal static string RemovePrefix(this string value, params string[] prefixes)
{
var i = 0;
@ -493,19 +480,16 @@ namespace SocketHttpListener
return string.Format("{0}; {1}", m, parameters.ToString("; "));
}
internal static List<TSource> ToList<TSource>(this IEnumerable<TSource> source)
{
return new List<TSource>(source);
}
internal static ushort ToUInt16(this byte[] src, ByteOrder srcOrder)
{
return BitConverter.ToUInt16(src.ToHostOrder(srcOrder), 0);
src.ToHostOrder(srcOrder);
return BitConverter.ToUInt16(src, 0);
}
internal static ulong ToUInt64(this byte[] src, ByteOrder srcOrder)
{
return BitConverter.ToUInt64(src.ToHostOrder(srcOrder), 0);
src.ToHostOrder(srcOrder);
return BitConverter.ToUInt64(src, 0);
}
internal static string TrimEndSlash(this string value)
@ -852,14 +836,17 @@ namespace SocketHttpListener
/// <exception cref="ArgumentNullException">
/// <paramref name="src"/> is <see langword="null"/>.
/// </exception>
public static byte[] ToHostOrder(this byte[] src, ByteOrder srcOrder)
public static void ToHostOrder(this byte[] src, ByteOrder srcOrder)
{
if (src == null)
{
throw new ArgumentNullException(nameof(src));
}
return src.Length > 1 && !srcOrder.IsHostOrder()
? src.Reverse()
: src;
if (src.Length > 1 && !srcOrder.IsHostOrder())
{
Array.Reverse(src);
}
}
/// <summary>

View File

@ -3,7 +3,6 @@ using System.Collections;
using System.Collections.Generic;
using System.Net;
using System.Security.Cryptography.X509Certificates;
using MediaBrowser.Common.Net;
using MediaBrowser.Model.Cryptography;
using MediaBrowser.Model.IO;
using MediaBrowser.Model.Net;
@ -18,47 +17,55 @@ namespace SocketHttpListener.Net
internal ISocketFactory SocketFactory { get; private set; }
internal IFileSystem FileSystem { get; private set; }
internal IStreamHelper StreamHelper { get; private set; }
internal INetworkManager NetworkManager { get; private set; }
internal IEnvironmentInfo EnvironmentInfo { get; private set; }
public bool EnableDualMode { get; set; }
AuthenticationSchemes auth_schemes;
HttpListenerPrefixCollection prefixes;
AuthenticationSchemeSelector auth_selector;
string realm;
bool unsafe_ntlm_auth;
bool listening;
bool disposed;
private AuthenticationSchemes auth_schemes;
private HttpListenerPrefixCollection prefixes;
private AuthenticationSchemeSelector auth_selector;
private string realm;
private bool unsafe_ntlm_auth;
private bool listening;
private bool disposed;
Dictionary<HttpListenerContext, HttpListenerContext> registry; // Dictionary<HttpListenerContext,HttpListenerContext>
Dictionary<HttpConnection, HttpConnection> connections;
private Dictionary<HttpListenerContext, HttpListenerContext> registry;
private Dictionary<HttpConnection, HttpConnection> connections;
private ILogger _logger;
private X509Certificate _certificate;
public Action<HttpListenerContext> OnContext { get; set; }
public HttpListener(ILogger logger, ICryptoProvider cryptoProvider, ISocketFactory socketFactory,
INetworkManager networkManager, IStreamHelper streamHelper, IFileSystem fileSystem,
public HttpListener(
ILogger logger,
ICryptoProvider cryptoProvider,
ISocketFactory socketFactory,
IStreamHelper streamHelper,
IFileSystem fileSystem,
IEnvironmentInfo environmentInfo)
{
_logger = logger;
CryptoProvider = cryptoProvider;
SocketFactory = socketFactory;
NetworkManager = networkManager;
StreamHelper = streamHelper;
FileSystem = fileSystem;
EnvironmentInfo = environmentInfo;
prefixes = new HttpListenerPrefixCollection(logger, this);
registry = new Dictionary<HttpListenerContext, HttpListenerContext>();
connections = new Dictionary<HttpConnection, HttpConnection>();
auth_schemes = AuthenticationSchemes.Anonymous;
}
public HttpListener(ILogger logger, X509Certificate certificate, ICryptoProvider cryptoProvider,
ISocketFactory socketFactory, INetworkManager networkManager, IStreamHelper streamHelper,
IFileSystem fileSystem, IEnvironmentInfo environmentInfo)
: this(logger, cryptoProvider, socketFactory, networkManager, streamHelper, fileSystem, environmentInfo)
public HttpListener(
ILogger logger,
X509Certificate certificate,
ICryptoProvider cryptoProvider,
ISocketFactory socketFactory,
IStreamHelper streamHelper,
IFileSystem fileSystem,
IEnvironmentInfo environmentInfo)
: this(logger, cryptoProvider, socketFactory, streamHelper, fileSystem, environmentInfo)
{
_certificate = certificate;
}

View File

@ -7,18 +7,18 @@ namespace SocketHttpListener.Net
{
public class HttpListenerPrefixCollection : ICollection<string>, IEnumerable<string>, IEnumerable
{
List<string> prefixes = new List<string>();
HttpListener listener;
private List<string> _prefixes = new List<string>();
private HttpListener _listener;
private ILogger _logger;
internal HttpListenerPrefixCollection(ILogger logger, HttpListener listener)
{
_logger = logger;
this.listener = listener;
_listener = listener;
}
public int Count => prefixes.Count;
public int Count => _prefixes.Count;
public bool IsReadOnly => false;
@ -26,61 +26,90 @@ namespace SocketHttpListener.Net
public void Add(string uriPrefix)
{
listener.CheckDisposed();
_listener.CheckDisposed();
//ListenerPrefix.CheckUri(uriPrefix);
if (prefixes.Contains(uriPrefix))
if (_prefixes.Contains(uriPrefix))
{
return;
}
prefixes.Add(uriPrefix);
if (listener.IsListening)
HttpEndPointManager.AddPrefix(_logger, uriPrefix, listener);
_prefixes.Add(uriPrefix);
if (_listener.IsListening)
{
HttpEndPointManager.AddPrefix(_logger, uriPrefix, _listener);
}
}
public void AddRange(IEnumerable<string> uriPrefixes)
{
_listener.CheckDisposed();
foreach (var uriPrefix in uriPrefixes)
{
if (_prefixes.Contains(uriPrefix))
{
continue;
}
_prefixes.Add(uriPrefix);
if (_listener.IsListening)
{
HttpEndPointManager.AddPrefix(_logger, uriPrefix, _listener);
}
}
}
public void Clear()
{
listener.CheckDisposed();
prefixes.Clear();
if (listener.IsListening)
HttpEndPointManager.RemoveListener(_logger, listener);
_listener.CheckDisposed();
_prefixes.Clear();
if (_listener.IsListening)
{
HttpEndPointManager.RemoveListener(_logger, _listener);
}
}
public bool Contains(string uriPrefix)
{
listener.CheckDisposed();
return prefixes.Contains(uriPrefix);
_listener.CheckDisposed();
return _prefixes.Contains(uriPrefix);
}
public void CopyTo(string[] array, int offset)
{
listener.CheckDisposed();
prefixes.CopyTo(array, offset);
_listener.CheckDisposed();
_prefixes.CopyTo(array, offset);
}
public void CopyTo(Array array, int offset)
{
listener.CheckDisposed();
((ICollection)prefixes).CopyTo(array, offset);
_listener.CheckDisposed();
((ICollection)_prefixes).CopyTo(array, offset);
}
public IEnumerator<string> GetEnumerator()
{
return prefixes.GetEnumerator();
return _prefixes.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return prefixes.GetEnumerator();
return _prefixes.GetEnumerator();
}
public bool Remove(string uriPrefix)
{
listener.CheckDisposed();
_listener.CheckDisposed();
if (uriPrefix == null)
{
throw new ArgumentNullException(nameof(uriPrefix));
}
bool result = prefixes.Remove(uriPrefix);
if (result && listener.IsListening)
HttpEndPointManager.RemovePrefix(_logger, uriPrefix, listener);
bool result = _prefixes.Remove(uriPrefix);
if (result && _listener.IsListening)
{
HttpEndPointManager.RemovePrefix(_logger, uriPrefix, _listener);
}
return result;
}

View File

@ -30,9 +30,9 @@ namespace SocketHttpListener
private CookieCollection _cookies;
private AutoResetEvent _exitReceiving;
private object _forConn;
private object _forEvent;
private readonly SemaphoreSlim _forEvent = new SemaphoreSlim(1, 1);
private object _forMessageEventQueue;
private object _forSend;
private readonly SemaphoreSlim _forSend = new SemaphoreSlim(1, 1);
private const string _guid = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
private Queue<MessageEventArgs> _messageEventQueue;
private string _protocol;
@ -109,12 +109,15 @@ namespace SocketHttpListener
#region Private Methods
private void close(CloseStatusCode code, string reason, bool wait)
private async Task CloseAsync(CloseStatusCode code, string reason, bool wait)
{
close(new PayloadData(((ushort)code).Append(reason)), !code.IsReserved(), wait);
await CloseAsync(new PayloadData(
await ((ushort)code).AppendAsync(reason).ConfigureAwait(false)),
!code.IsReserved(),
wait).ConfigureAwait(false);
}
private void close(PayloadData payload, bool send, bool wait)
private async Task CloseAsync(PayloadData payload, bool send, bool wait)
{
lock (_forConn)
{
@ -126,11 +129,12 @@ namespace SocketHttpListener
_readyState = WebSocketState.CloseSent;
}
var e = new CloseEventArgs(payload);
e.WasClean =
closeHandshake(
var e = new CloseEventArgs(payload)
{
WasClean = await CloseHandshakeAsync(
send ? WebSocketFrame.CreateCloseFrame(Mask.Unmask, payload).ToByteArray() : null,
wait ? 1000 : 0);
wait ? 1000 : 0).ConfigureAwait(false)
};
_readyState = WebSocketState.Closed;
try
@ -143,9 +147,9 @@ namespace SocketHttpListener
}
}
private bool closeHandshake(byte[] frameAsBytes, int millisecondsTimeout)
private async Task<bool> CloseHandshakeAsync(byte[] frameAsBytes, int millisecondsTimeout)
{
var sent = frameAsBytes != null && writeBytes(frameAsBytes);
var sent = frameAsBytes != null && await WriteBytesAsync(frameAsBytes).ConfigureAwait(false);
var received =
millisecondsTimeout == 0 ||
(sent && _exitReceiving != null && _exitReceiving.WaitOne(millisecondsTimeout));
@ -189,11 +193,11 @@ namespace SocketHttpListener
_context = null;
}
private bool concatenateFragmentsInto(Stream dest)
private async Task<bool> ConcatenateFragmentsIntoAsync(Stream dest)
{
while (true)
{
var frame = WebSocketFrame.Read(_stream, true);
var frame = await WebSocketFrame.ReadAsync(_stream, true).ConfigureAwait(false);
if (frame.IsFinal)
{
/* FINAL */
@ -221,7 +225,7 @@ namespace SocketHttpListener
// CLOSE
if (frame.IsClose)
return processCloseFrame(frame);
return await ProcessCloseFrameAsync(frame).ConfigureAwait(false);
}
else
{
@ -236,10 +240,10 @@ namespace SocketHttpListener
}
// ?
return processUnsupportedFrame(
return await ProcessUnsupportedFrameAsync(
frame,
CloseStatusCode.IncorrectData,
"An incorrect data has been received while receiving fragmented data.");
"An incorrect data has been received while receiving fragmented data.").ConfigureAwait(false);
}
return true;
@ -299,44 +303,42 @@ namespace SocketHttpListener
_compression = CompressionMethod.None;
_cookies = new CookieCollection();
_forConn = new object();
_forEvent = new object();
_forSend = new object();
_messageEventQueue = new Queue<MessageEventArgs>();
_forMessageEventQueue = ((ICollection)_messageEventQueue).SyncRoot;
_readyState = WebSocketState.Connecting;
}
private void open()
private async Task OpenAsync()
{
try
{
startReceiving();
lock (_forEvent)
{
try
{
if (OnOpen != null)
{
OnOpen(this, EventArgs.Empty);
}
}
catch (Exception ex)
{
processException(ex, "An exception has occurred while OnOpen.");
}
}
}
catch (Exception ex)
{
processException(ex, "An exception has occurred while opening.");
await ProcessExceptionAsync(ex, "An exception has occurred while opening.").ConfigureAwait(false);
}
await _forEvent.WaitAsync().ConfigureAwait(false);
try
{
OnOpen?.Invoke(this, EventArgs.Empty);
}
catch (Exception ex)
{
await ProcessExceptionAsync(ex, "An exception has occurred while OnOpen.").ConfigureAwait(false);
}
finally
{
_forEvent.Release();
}
}
private bool processCloseFrame(WebSocketFrame frame)
private async Task<bool> ProcessCloseFrameAsync(WebSocketFrame frame)
{
var payload = frame.PayloadData;
close(payload, !payload.ContainsReservedCloseStatusCode, false);
await CloseAsync(payload, !payload.ContainsReservedCloseStatusCode, false).ConfigureAwait(false);
return false;
}
@ -352,7 +354,7 @@ namespace SocketHttpListener
return true;
}
private void processException(Exception exception, string message)
private async Task ProcessExceptionAsync(Exception exception, string message)
{
var code = CloseStatusCode.Abnormal;
var reason = message;
@ -365,25 +367,31 @@ namespace SocketHttpListener
error(message ?? code.GetMessage(), exception);
if (_readyState == WebSocketState.Connecting)
Close(HttpStatusCode.BadRequest);
{
await CloseAsync(HttpStatusCode.BadRequest).ConfigureAwait(false);
}
else
close(code, reason ?? code.GetMessage(), false);
{
await CloseAsync(code, reason ?? code.GetMessage(), false).ConfigureAwait(false);
}
}
private bool processFragmentedFrame(WebSocketFrame frame)
private Task<bool> ProcessFragmentedFrameAsync(WebSocketFrame frame)
{
return frame.IsContinuation // Not first fragment
? true
: processFragments(frame);
? Task.FromResult(true)
: ProcessFragmentsAsync(frame);
}
private bool processFragments(WebSocketFrame first)
private async Task<bool> ProcessFragmentsAsync(WebSocketFrame first)
{
using (var buff = new MemoryStream())
{
buff.WriteBytes(first.PayloadData.ApplicationData);
if (!concatenateFragmentsInto(buff))
if (!await ConcatenateFragmentsIntoAsync(buff).ConfigureAwait(false))
{
return false;
}
byte[] data;
if (_compression != CompressionMethod.None)
@ -412,36 +420,38 @@ namespace SocketHttpListener
return true;
}
private bool processUnsupportedFrame(WebSocketFrame frame, CloseStatusCode code, string reason)
private async Task<bool> ProcessUnsupportedFrameAsync(WebSocketFrame frame, CloseStatusCode code, string reason)
{
processException(new WebSocketException(code, reason), null);
await ProcessExceptionAsync(new WebSocketException(code, reason), null).ConfigureAwait(false);
return false;
}
private bool processWebSocketFrame(WebSocketFrame frame)
private Task<bool> ProcessWebSocketFrameAsync(WebSocketFrame frame)
{
// TODO: @bond change to if/else chain
return frame.IsCompressed && _compression == CompressionMethod.None
? processUnsupportedFrame(
? ProcessUnsupportedFrameAsync(
frame,
CloseStatusCode.IncorrectData,
"A compressed data has been received without available decompression method.")
: frame.IsFragmented
? processFragmentedFrame(frame)
? ProcessFragmentedFrameAsync(frame)
: frame.IsData
? processDataFrame(frame)
? Task.FromResult(processDataFrame(frame))
: frame.IsPing
? processPingFrame(frame)
? Task.FromResult(processPingFrame(frame))
: frame.IsPong
? processPongFrame(frame)
? Task.FromResult(processPongFrame(frame))
: frame.IsClose
? processCloseFrame(frame)
: processUnsupportedFrame(frame, CloseStatusCode.PolicyViolation, null);
? ProcessCloseFrameAsync(frame)
: ProcessUnsupportedFrameAsync(frame, CloseStatusCode.PolicyViolation, null);
}
private bool send(Opcode opcode, Stream stream)
private async Task<bool> SendAsync(Opcode opcode, Stream stream)
{
lock (_forSend)
await _forSend.WaitAsync().ConfigureAwait(false);
try
{
var src = stream;
var compressed = false;
@ -454,7 +464,7 @@ namespace SocketHttpListener
compressed = true;
}
sent = send(opcode, Mask.Unmask, stream, compressed);
sent = await SendAsync(opcode, Mask.Unmask, stream, compressed).ConfigureAwait(false);
if (!sent)
error("Sending a data has been interrupted.");
}
@ -472,16 +482,20 @@ namespace SocketHttpListener
return sent;
}
finally
{
_forSend.Release();
}
}
private bool send(Opcode opcode, Mask mask, Stream stream, bool compressed)
private async Task<bool> SendAsync(Opcode opcode, Mask mask, Stream stream, bool compressed)
{
var len = stream.Length;
/* Not fragmented */
if (len == 0)
return send(Fin.Final, opcode, mask, new byte[0], compressed);
return await SendAsync(Fin.Final, opcode, mask, new byte[0], compressed).ConfigureAwait(false);
var quo = len / FragmentLength;
var rem = (int)(len % FragmentLength);
@ -490,26 +504,26 @@ namespace SocketHttpListener
if (quo == 0)
{
buff = new byte[rem];
return stream.Read(buff, 0, rem) == rem &&
send(Fin.Final, opcode, mask, buff, compressed);
return await stream.ReadAsync(buff, 0, rem).ConfigureAwait(false) == rem &&
await SendAsync(Fin.Final, opcode, mask, buff, compressed).ConfigureAwait(false);
}
buff = new byte[FragmentLength];
if (quo == 1 && rem == 0)
return stream.Read(buff, 0, FragmentLength) == FragmentLength &&
send(Fin.Final, opcode, mask, buff, compressed);
return await stream.ReadAsync(buff, 0, FragmentLength).ConfigureAwait(false) == FragmentLength &&
await SendAsync(Fin.Final, opcode, mask, buff, compressed).ConfigureAwait(false);
/* Send fragmented */
// Begin
if (stream.Read(buff, 0, FragmentLength) != FragmentLength ||
!send(Fin.More, opcode, mask, buff, compressed))
if (await stream.ReadAsync(buff, 0, FragmentLength).ConfigureAwait(false) != FragmentLength ||
!await SendAsync(Fin.More, opcode, mask, buff, compressed).ConfigureAwait(false))
return false;
var n = rem == 0 ? quo - 2 : quo - 1;
for (long i = 0; i < n; i++)
if (stream.Read(buff, 0, FragmentLength) != FragmentLength ||
!send(Fin.More, Opcode.Cont, mask, buff, compressed))
if (await stream.ReadAsync(buff, 0, FragmentLength).ConfigureAwait(false) != FragmentLength ||
!await SendAsync(Fin.More, Opcode.Cont, mask, buff, compressed).ConfigureAwait(false))
return false;
// End
@ -518,98 +532,88 @@ namespace SocketHttpListener
else
buff = new byte[rem];
return stream.Read(buff, 0, rem) == rem &&
send(Fin.Final, Opcode.Cont, mask, buff, compressed);
return await stream.ReadAsync(buff, 0, rem).ConfigureAwait(false) == rem &&
await SendAsync(Fin.Final, Opcode.Cont, mask, buff, compressed).ConfigureAwait(false);
}
private bool send(Fin fin, Opcode opcode, Mask mask, byte[] data, bool compressed)
private Task<bool> SendAsync(Fin fin, Opcode opcode, Mask mask, byte[] data, bool compressed)
{
lock (_forConn)
{
if (_readyState != WebSocketState.Open)
{
return false;
return Task.FromResult(false);
}
return writeBytes(
return WriteBytesAsync(
WebSocketFrame.CreateWebSocketFrame(fin, opcode, mask, data, compressed).ToByteArray());
}
}
private Task sendAsync(Opcode opcode, Stream stream)
{
var completionSource = new TaskCompletionSource<bool>();
Task.Run(() =>
{
try
{
send(opcode, stream);
completionSource.TrySetResult(true);
}
catch (Exception ex)
{
completionSource.TrySetException(ex);
}
});
return completionSource.Task;
}
// As server
private bool sendHttpResponse(HttpResponse response)
{
return writeBytes(response.ToByteArray());
}
private Task<bool> SendHttpResponseAsync(HttpResponse response)
=> WriteBytesAsync(response.ToByteArray());
private void startReceiving()
{
if (_messageEventQueue.Count > 0)
{
_messageEventQueue.Clear();
}
_exitReceiving = new AutoResetEvent(false);
_receivePong = new AutoResetEvent(false);
Action receive = null;
receive = () => WebSocketFrame.ReadAsync(
_stream,
true,
frame =>
{
if (processWebSocketFrame(frame) && _readyState != WebSocketState.Closed)
{
receive();
receive = async () => await WebSocketFrame.ReadAsync(
_stream,
true,
async frame =>
{
if (await ProcessWebSocketFrameAsync(frame).ConfigureAwait(false) && _readyState != WebSocketState.Closed)
{
receive();
if (!frame.IsData)
return;
if (!frame.IsData)
{
return;
}
lock (_forEvent)
{
try
{
var e = dequeueFromMessageEventQueue();
if (e != null && _readyState == WebSocketState.Open)
OnMessage.Emit(this, e);
}
catch (Exception ex)
{
processException(ex, "An exception has occurred while OnMessage.");
}
}
}
else if (_exitReceiving != null)
{
_exitReceiving.Set();
}
},
ex => processException(ex, "An exception has occurred while receiving a message."));
await _forEvent.WaitAsync().ConfigureAwait(false);
try
{
var e = dequeueFromMessageEventQueue();
if (e != null && _readyState == WebSocketState.Open)
{
OnMessage.Emit(this, e);
}
}
catch (Exception ex)
{
await ProcessExceptionAsync(ex, "An exception has occurred while OnMessage.").ConfigureAwait(false);
}
finally
{
_forEvent.Release();
}
}
else if (_exitReceiving != null)
{
_exitReceiving.Set();
}
},
async ex => await ProcessExceptionAsync(ex, "An exception has occurred while receiving a message.")).ConfigureAwait(false);
receive();
}
private bool writeBytes(byte[] data)
private async Task<bool> WriteBytesAsync(byte[] data)
{
try
{
_stream.Write(data, 0, data.Length);
await _stream.WriteAsync(data, 0, data.Length).ConfigureAwait(false);
return true;
}
catch (Exception)
@ -623,10 +627,10 @@ namespace SocketHttpListener
#region Internal Methods
// As server
internal void Close(HttpResponse response)
internal async Task CloseAsync(HttpResponse response)
{
_readyState = WebSocketState.CloseSent;
sendHttpResponse(response);
await SendHttpResponseAsync(response).ConfigureAwait(false);
closeServerResources();
@ -634,22 +638,20 @@ namespace SocketHttpListener
}
// As server
internal void Close(HttpStatusCode code)
{
Close(createHandshakeCloseResponse(code));
}
internal Task CloseAsync(HttpStatusCode code)
=> CloseAsync(createHandshakeCloseResponse(code));
// As server
public void ConnectAsServer()
public async Task ConnectAsServer()
{
try
{
_readyState = WebSocketState.Open;
open();
await OpenAsync().ConfigureAwait(false);
}
catch (Exception ex)
{
processException(ex, "An exception has occurred while connecting.");
await ProcessExceptionAsync(ex, "An exception has occurred while connecting.").ConfigureAwait(false);
}
}
@ -660,18 +662,18 @@ namespace SocketHttpListener
/// <summary>
/// Closes the WebSocket connection, and releases all associated resources.
/// </summary>
public void Close()
public Task CloseAsync()
{
var msg = _readyState.CheckIfClosable();
if (msg != null)
{
error(msg);
return;
return Task.CompletedTask;
}
var send = _readyState == WebSocketState.Open;
close(new PayloadData(), send, send);
return CloseAsync(new PayloadData(), send, send);
}
/// <summary>
@ -689,11 +691,11 @@ namespace SocketHttpListener
/// <param name="reason">
/// A <see cref="string"/> that represents the reason for the close.
/// </param>
public void Close(CloseStatusCode code, string reason)
public async Task CloseAsync(CloseStatusCode code, string reason)
{
byte[] data = null;
var msg = _readyState.CheckIfClosable() ??
(data = ((ushort)code).Append(reason)).CheckIfValidControlData("reason");
(data = await ((ushort)code).AppendAsync(reason).ConfigureAwait(false)).CheckIfValidControlData("reason");
if (msg != null)
{
@ -703,7 +705,7 @@ namespace SocketHttpListener
}
var send = _readyState == WebSocketState.Open && !code.IsReserved();
close(new PayloadData(data), send, send);
await CloseAsync(new PayloadData(data), send, send).ConfigureAwait(false);
}
/// <summary>
@ -728,7 +730,7 @@ namespace SocketHttpListener
throw new Exception(msg);
}
return sendAsync(Opcode.Binary, new MemoryStream(data));
return SendAsync(Opcode.Binary, new MemoryStream(data));
}
/// <summary>
@ -753,7 +755,7 @@ namespace SocketHttpListener
throw new Exception(msg);
}
return sendAsync(Opcode.Text, new MemoryStream(Encoding.UTF8.GetBytes(data)));
return SendAsync(Opcode.Text, new MemoryStream(Encoding.UTF8.GetBytes(data)));
}
#endregion
@ -768,7 +770,7 @@ namespace SocketHttpListener
/// </remarks>
void IDisposable.Dispose()
{
Close(CloseStatusCode.Away, null);
CloseAsync(CloseStatusCode.Away, null).GetAwaiter().GetResult();
}
#endregion

View File

@ -2,6 +2,7 @@ using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
namespace SocketHttpListener
{
@ -177,7 +178,7 @@ namespace SocketHttpListener
return opcode == Opcode.Text || opcode == Opcode.Binary;
}
private static WebSocketFrame read(byte[] header, Stream stream, bool unmask)
private static async Task<WebSocketFrame> ReadAsync(byte[] header, Stream stream, bool unmask)
{
/* Header */
@ -229,7 +230,7 @@ namespace SocketHttpListener
? 2
: 8;
var extPayloadLen = size > 0 ? stream.ReadBytes(size) : new byte[0];
var extPayloadLen = size > 0 ? await stream.ReadBytesAsync(size).ConfigureAwait(false) : Array.Empty<byte>();
if (size > 0 && extPayloadLen.Length != size)
throw new WebSocketException(
"The 'Extended Payload Length' of a frame cannot be read from the data source.");
@ -239,7 +240,7 @@ namespace SocketHttpListener
/* Masking Key */
var masked = mask == Mask.Mask;
var maskingKey = masked ? stream.ReadBytes(4) : new byte[0];
var maskingKey = masked ? await stream.ReadBytesAsync(4).ConfigureAwait(false) : Array.Empty<byte>();
if (masked && maskingKey.Length != 4)
throw new WebSocketException(
"The 'Masking Key' of a frame cannot be read from the data source.");
@ -264,8 +265,8 @@ namespace SocketHttpListener
"The length of 'Payload Data' of a frame is greater than the allowable length.");
data = payloadLen > 126
? stream.ReadBytes((long)len, 1024)
: stream.ReadBytes((int)len);
? await stream.ReadBytesAsync((long)len, 1024).ConfigureAwait(false)
: await stream.ReadBytesAsync((int)len).ConfigureAwait(false);
//if (data.LongLength != (long)len)
// throw new WebSocketException(
@ -273,7 +274,7 @@ namespace SocketHttpListener
}
else
{
data = new byte[0];
data = Array.Empty<byte>();
}
var payload = new PayloadData(data, masked);
@ -281,7 +282,7 @@ namespace SocketHttpListener
{
payload.Mask(maskingKey);
frame._mask = Mask.Unmask;
frame._maskingKey = new byte[0];
frame._maskingKey = Array.Empty<byte>();
}
frame._payloadData = payload;
@ -302,10 +303,10 @@ namespace SocketHttpListener
return new WebSocketFrame(Opcode.Close, mask, payload);
}
internal static WebSocketFrame CreateCloseFrame(Mask mask, CloseStatusCode code, string reason)
internal static async Task<WebSocketFrame> CreateCloseFrameAsync(Mask mask, CloseStatusCode code, string reason)
{
return new WebSocketFrame(
Opcode.Close, mask, new PayloadData(((ushort)code).Append(reason)));
Opcode.Close, mask, new PayloadData(await ((ushort)code).AppendAsync(reason).ConfigureAwait(false)));
}
internal static WebSocketFrame CreatePingFrame(Mask mask)
@ -329,41 +330,39 @@ namespace SocketHttpListener
return new WebSocketFrame(fin, opcode, mask, new PayloadData(data), compressed);
}
internal static WebSocketFrame Read(Stream stream)
{
return Read(stream, true);
}
internal static Task<WebSocketFrame> ReadAsync(Stream stream)
=> ReadAsync(stream, true);
internal static WebSocketFrame Read(Stream stream, bool unmask)
internal static async Task<WebSocketFrame> ReadAsync(Stream stream, bool unmask)
{
var header = stream.ReadBytes(2);
var header = await stream.ReadBytesAsync(2).ConfigureAwait(false);
if (header.Length != 2)
{
throw new WebSocketException(
"The header part of a frame cannot be read from the data source.");
}
return read(header, stream, unmask);
return await ReadAsync(header, stream, unmask).ConfigureAwait(false);
}
internal static async void ReadAsync(
internal static async Task ReadAsync(
Stream stream, bool unmask, Action<WebSocketFrame> completed, Action<Exception> error)
{
try
{
var header = await stream.ReadBytesAsync(2).ConfigureAwait(false);
if (header.Length != 2)
{
throw new WebSocketException(
"The header part of a frame cannot be read from the data source.");
}
var frame = read(header, stream, unmask);
if (completed != null)
completed(frame);
var frame = await ReadAsync(header, stream, unmask).ConfigureAwait(false);
completed?.Invoke(frame);
}
catch (Exception ex)
{
if (error != null)
{
error(ex);
}
error.Invoke(ex);
}
}

View File

@ -1,7 +0,0 @@
#!/usr/bin/env bash
source ../common.build.sh
VERSION=`get_version ../..`
build_jellyfin ../../Jellyfin.Server Release debian-x64 `pwd`/dist/jellyfin_${VERSION}

View File

@ -1,7 +0,0 @@
#!/usr/bin/env bash
source ../common.build.sh
VERSION=`get_version ../..`
clean_jellyfin ../.. Release `pwd`/dist/jellyfin_${VERSION}

View File

@ -1,7 +0,0 @@
#!/usr/bin/env bash
source ../common.build.sh
VERSION=`get_version ../..`
package_portable ../.. `pwd`/dist/jellyfin_${VERSION}

View File

@ -1,7 +0,0 @@
#!/usr/bin/env bash
source ../common.build.sh
VERSION=`get_version ../..`
build_jellyfin ../../Jellyfin.Server Release ubuntu-x64 `pwd`/dist/jellyfin_${VERSION}

View File

@ -1,7 +0,0 @@
#!/usr/bin/env bash
source ../common.build.sh
VERSION=`get_version ../..`
clean_jellyfin ../.. Release `pwd`/dist/jellyfin_${VERSION}

View File

@ -1 +0,0 @@
dotnet

View File

@ -1,7 +0,0 @@
#!/usr/bin/env bash
source ../common.build.sh
VERSION=`get_version ../..`
package_portable ../.. `pwd`/dist/jellyfin_${VERSION}

View File

@ -1 +0,0 @@
dotnet