Simplify Folder Watcher (#1484)

* Refactored Library Watcher to use Hangfire under the hood.

* Support .kavitaignore at root level.

* Refactored a lot of the library watching code to process faster and handle when FileSystemWatcher runs out of internal buffer space. It's still not perfect, but good enough for basic use.

* Make folder watching as experimental and default it to off by default.

* Revert #1479

* Tweaked the messaging for OPDS to remove a note about download role.

Moved some code closer to where it's used.

* Cleaned up how the events widget reports

* Fixed a null issue when deleting series in the UI

* Cleaned up some debug code

* Added more information for when we skip a scan

* Cleaned up some logging messages in CoverGen tasks

* More log message tweaks

* Added some debug to help identify a rare issue

* Fixed a bug where save bookmarks as webp could get reset to false when saving other server settings

* Updated some documentation on library watcher.

* Make LibraryWatcher fire every 5 mins
This commit is contained in:
Joseph Milazzo 2022-08-28 15:20:46 -05:00 committed by GitHub
parent b64ed6df8d
commit b07aaf1eb5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 187 additions and 259 deletions

View File

@ -103,7 +103,7 @@ namespace API.Data
new() {Key = ServerSettingKey.ConvertBookmarkToWebP, Value = "false"}, new() {Key = ServerSettingKey.ConvertBookmarkToWebP, Value = "false"},
new() {Key = ServerSettingKey.EnableSwaggerUi, Value = "false"}, new() {Key = ServerSettingKey.EnableSwaggerUi, Value = "false"},
new() {Key = ServerSettingKey.TotalBackups, Value = "30"}, new() {Key = ServerSettingKey.TotalBackups, Value = "30"},
new() {Key = ServerSettingKey.EnableFolderWatching, Value = "true"}, new() {Key = ServerSettingKey.EnableFolderWatching, Value = "false"},
}.ToArray()); }.ToArray());
foreach (var defaultSetting in DefaultSettings) foreach (var defaultSetting in DefaultSettings)

View File

@ -10,6 +10,7 @@ using API.DTOs.System;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using Kavita.Common.Helpers; using Kavita.Common.Helpers;
using Microsoft.Extensions.FileSystemGlobbing;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Services namespace API.Services
@ -64,14 +65,17 @@ namespace API.Services
SearchOption searchOption = SearchOption.TopDirectoryOnly); SearchOption searchOption = SearchOption.TopDirectoryOnly);
IEnumerable<string> GetDirectories(string folderPath); IEnumerable<string> GetDirectories(string folderPath);
IEnumerable<string> GetDirectories(string folderPath, GlobMatcher matcher);
string GetParentDirectoryName(string fileOrFolder); string GetParentDirectoryName(string fileOrFolder);
#nullable enable #nullable enable
IList<string> ScanFiles(string folderPath, GlobMatcher? matcher = null); IList<string> ScanFiles(string folderPath, GlobMatcher? matcher = null);
DateTime GetLastWriteTime(string folderPath); DateTime GetLastWriteTime(string folderPath);
GlobMatcher CreateMatcherFromFile(string filePath);
#nullable disable #nullable disable
} }
public class DirectoryService : IDirectoryService public class DirectoryService : IDirectoryService
{ {
public const string KavitaIgnoreFile = ".kavitaignore";
public IFileSystem FileSystem { get; } public IFileSystem FileSystem { get; }
public string CacheDirectory { get; } public string CacheDirectory { get; }
public string CoverImageDirectory { get; } public string CoverImageDirectory { get; }
@ -531,6 +535,21 @@ namespace API.Services
.Where(path => ExcludeDirectories.Matches(path).Count == 0); .Where(path => ExcludeDirectories.Matches(path).Count == 0);
} }
/// <summary>
/// Gets a set of directories from the folder path. Automatically excludes directories that shouldn't be in scope.
/// </summary>
/// <param name="folderPath"></param>
/// <param name="matcher">A set of glob rules that will filter directories out</param>
/// <returns>List of directory paths, empty if path doesn't exist</returns>
public IEnumerable<string> GetDirectories(string folderPath, GlobMatcher matcher)
{
if (matcher == null) return GetDirectories(folderPath);
return GetDirectories(folderPath)
.Where(folder => !matcher.ExcludeMatches(
$"{FileSystem.DirectoryInfo.FromDirectoryName(folder).Name}{FileSystem.Path.AltDirectorySeparatorChar}"));
}
/// <summary> /// <summary>
/// Returns all directories, including subdirectories. Automatically excludes directories that shouldn't be in scope. /// Returns all directories, including subdirectories. Automatically excludes directories that shouldn't be in scope.
/// </summary> /// </summary>
@ -580,7 +599,7 @@ namespace API.Services
var files = new List<string>(); var files = new List<string>();
if (!Exists(folderPath)) return files; if (!Exists(folderPath)) return files;
var potentialIgnoreFile = FileSystem.Path.Join(folderPath, ".kavitaignore"); var potentialIgnoreFile = FileSystem.Path.Join(folderPath, KavitaIgnoreFile);
if (matcher == null) if (matcher == null)
{ {
matcher = CreateMatcherFromFile(potentialIgnoreFile); matcher = CreateMatcherFromFile(potentialIgnoreFile);
@ -591,17 +610,7 @@ namespace API.Services
} }
IEnumerable<string> directories; var directories = GetDirectories(folderPath, matcher);
if (matcher == null)
{
directories = GetDirectories(folderPath);
}
else
{
directories = GetDirectories(folderPath)
.Where(folder => matcher != null &&
!matcher.ExcludeMatches($"{FileSystem.DirectoryInfo.FromDirectoryName(folder).Name}{FileSystem.Path.AltDirectorySeparatorChar}"));
}
foreach (var directory in directories) foreach (var directory in directories)
{ {
@ -640,8 +649,12 @@ namespace API.Services
return directories.Max(d => FileSystem.Directory.GetLastWriteTime(d)); return directories.Max(d => FileSystem.Directory.GetLastWriteTime(d));
} }
/// <summary>
private GlobMatcher CreateMatcherFromFile(string filePath) /// Generates a GlobMatcher from a .kavitaignore file found at path. Returns null otherwise.
/// </summary>
/// <param name="filePath"></param>
/// <returns></returns>
public GlobMatcher CreateMatcherFromFile(string filePath)
{ {
if (!FileSystem.File.Exists(filePath)) if (!FileSystem.File.Exists(filePath))
{ {

View File

@ -80,8 +80,8 @@ public class MetadataService : IMetadataService
_logger.LogDebug("[MetadataService] Generating cover image for {File}", firstFile.FilePath); _logger.LogDebug("[MetadataService] Generating cover image for {File}", firstFile.FilePath);
chapter.CoverImage = _readingItemService.GetCoverImage(firstFile.FilePath, ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId), firstFile.Format); chapter.CoverImage = _readingItemService.GetCoverImage(firstFile.FilePath, ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId), firstFile.Format);
_unitOfWork.ChapterRepository.Update(chapter); // BUG: CoverImage isn't saving for Monter Masume with new scan loop _unitOfWork.ChapterRepository.Update(chapter);
_updateEvents.Add(MessageFactory.CoverUpdateEvent(chapter.Id, MessageFactoryEntityTypes.Chapter)); // TODO: IDEA: Instead of firing here where it's not yet saved, maybe collect the ids and fire after save _updateEvents.Add(MessageFactory.CoverUpdateEvent(chapter.Id, MessageFactoryEntityTypes.Chapter));
return Task.FromResult(true); return Task.FromResult(true);
} }
@ -111,7 +111,6 @@ public class MetadataService : IMetadataService
if (firstChapter == null) return Task.FromResult(false); if (firstChapter == null) return Task.FromResult(false);
volume.CoverImage = firstChapter.CoverImage; volume.CoverImage = firstChapter.CoverImage;
//await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, MessageFactory.CoverUpdateEvent(volume.Id, MessageFactoryEntityTypes.Volume), false);
_updateEvents.Add(MessageFactory.CoverUpdateEvent(volume.Id, MessageFactoryEntityTypes.Volume)); _updateEvents.Add(MessageFactory.CoverUpdateEvent(volume.Id, MessageFactoryEntityTypes.Volume));
return Task.FromResult(true); return Task.FromResult(true);
@ -148,7 +147,6 @@ public class MetadataService : IMetadataService
} }
} }
series.CoverImage = firstCover?.CoverImage ?? coverImage; series.CoverImage = firstCover?.CoverImage ?? coverImage;
//await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, MessageFactory.CoverUpdateEvent(series.Id, MessageFactoryEntityTypes.Series), false);
_updateEvents.Add(MessageFactory.CoverUpdateEvent(series.Id, MessageFactoryEntityTypes.Series)); _updateEvents.Add(MessageFactory.CoverUpdateEvent(series.Id, MessageFactoryEntityTypes.Series));
return Task.CompletedTask; return Task.CompletedTask;
} }
@ -161,7 +159,7 @@ public class MetadataService : IMetadataService
/// <param name="forceUpdate"></param> /// <param name="forceUpdate"></param>
private async Task ProcessSeriesCoverGen(Series series, bool forceUpdate) private async Task ProcessSeriesCoverGen(Series series, bool forceUpdate)
{ {
_logger.LogDebug("[MetadataService] Generating cover images for series: {SeriesName}", series.OriginalName); _logger.LogDebug("[MetadataService] Processing cover image generation for series: {SeriesName}", series.OriginalName);
try try
{ {
var volumeIndex = 0; var volumeIndex = 0;
@ -195,7 +193,7 @@ public class MetadataService : IMetadataService
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, "[MetadataService] There was an exception during updating metadata for {SeriesName} ", series.Name); _logger.LogError(ex, "[MetadataService] There was an exception during cover generation for {SeriesName} ", series.Name);
} }
} }
@ -211,14 +209,14 @@ public class MetadataService : IMetadataService
public async Task GenerateCoversForLibrary(int libraryId, bool forceUpdate = false) public async Task GenerateCoversForLibrary(int libraryId, bool forceUpdate = false)
{ {
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None); var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
_logger.LogInformation("[MetadataService] Beginning metadata refresh of {LibraryName}", library.Name); _logger.LogInformation("[MetadataService] Beginning cover generation refresh of {LibraryName}", library.Name);
_updateEvents.Clear(); _updateEvents.Clear();
var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id); var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
var stopwatch = Stopwatch.StartNew(); var stopwatch = Stopwatch.StartNew();
var totalTime = 0L; var totalTime = 0L;
_logger.LogInformation("[MetadataService] Refreshing Library {LibraryName}. Total Items: {TotalSize}. Total Chunks: {TotalChunks} with {ChunkSize} size", library.Name, chunkInfo.TotalSize, chunkInfo.TotalChunks, chunkInfo.ChunkSize); _logger.LogInformation("[MetadataService] Refreshing Library {LibraryName} for cover generation. Total Items: {TotalSize}. Total Chunks: {TotalChunks} with {ChunkSize} size", library.Name, chunkInfo.TotalSize, chunkInfo.TotalChunks, chunkInfo.ChunkSize);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.CoverUpdateProgressEvent(library.Id, 0F, ProgressEventType.Started, $"Starting {library.Name}")); MessageFactory.CoverUpdateProgressEvent(library.Id, 0F, ProgressEventType.Started, $"Starting {library.Name}"));
@ -229,7 +227,7 @@ public class MetadataService : IMetadataService
totalTime += stopwatch.ElapsedMilliseconds; totalTime += stopwatch.ElapsedMilliseconds;
stopwatch.Restart(); stopwatch.Restart();
_logger.LogInformation("[MetadataService] Processing chunk {ChunkNumber} / {TotalChunks} with size {ChunkSize}. Series ({SeriesStart} - {SeriesEnd}", _logger.LogDebug("[MetadataService] Processing chunk {ChunkNumber} / {TotalChunks} with size {ChunkSize}. Series ({SeriesStart} - {SeriesEnd})",
chunk, chunkInfo.TotalChunks, chunkInfo.ChunkSize, chunk * chunkInfo.ChunkSize, (chunk + 1) * chunkInfo.ChunkSize); chunk, chunkInfo.TotalChunks, chunkInfo.ChunkSize, chunk * chunkInfo.ChunkSize, (chunk + 1) * chunkInfo.ChunkSize);
var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id, var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id,
@ -255,7 +253,7 @@ public class MetadataService : IMetadataService
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, "[MetadataService] There was an exception during metadata refresh for {SeriesName}", series.Name); _logger.LogError(ex, "[MetadataService] There was an exception during cover generation refresh for {SeriesName}", series.Name);
} }
seriesIndex++; seriesIndex++;
} }
@ -272,7 +270,7 @@ public class MetadataService : IMetadataService
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.CoverUpdateProgressEvent(library.Id, 1F, ProgressEventType.Ended, $"Complete")); MessageFactory.CoverUpdateProgressEvent(library.Id, 1F, ProgressEventType.Ended, $"Complete"));
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesNumber} series in library {LibraryName} in {ElapsedMilliseconds} milliseconds total", chunkInfo.TotalSize, library.Name, totalTime); _logger.LogInformation("[MetadataService] Updated covers for {SeriesNumber} series in library {LibraryName} in {ElapsedMilliseconds} milliseconds total", chunkInfo.TotalSize, library.Name, totalTime);
} }
@ -321,7 +319,7 @@ public class MetadataService : IMetadataService
if (_unitOfWork.HasChanges()) if (_unitOfWork.HasChanges())
{ {
await _unitOfWork.CommitAsync(); await _unitOfWork.CommitAsync();
_logger.LogInformation("[MetadataService] Updated cover images for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds); _logger.LogInformation("[MetadataService] Updated covers for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
} }
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,

View File

@ -239,12 +239,12 @@ public class TaskScheduler : ITaskScheduler
public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false) public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false)
{ {
if (HasAlreadyEnqueuedTask("ScannerService", "ScanSeries", new object[] {seriesId, forceUpdate}, ScanQueue)) if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", new object[] {seriesId, forceUpdate}, ScanQueue))
{ {
_logger.LogInformation("A duplicate request to scan series occured. Skipping"); _logger.LogInformation("A duplicate request to scan series occured. Skipping");
return; return;
} }
if (RunningAnyTasksByMethod(new List<string>() {"ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"}, ScanQueue)) if (RunningAnyTasksByMethod(new List<string>() {ScannerService.Name, "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"}, ScanQueue))
{ {
_logger.LogInformation("A Scan is already running, rescheduling ScanSeries in 10 minutes"); _logger.LogInformation("A Scan is already running, rescheduling ScanSeries in 10 minutes");
BackgroundJob.Schedule(() => ScanSeries(libraryId, seriesId, forceUpdate), TimeSpan.FromMinutes(10)); BackgroundJob.Schedule(() => ScanSeries(libraryId, seriesId, forceUpdate), TimeSpan.FromMinutes(10));
@ -290,7 +290,7 @@ public class TaskScheduler : ITaskScheduler
/// <param name="args">object[] of arguments in the order they are passed to enqueued job</param> /// <param name="args">object[] of arguments in the order they are passed to enqueued job</param>
/// <param name="queue">Queue to check against. Defaults to "default"</param> /// <param name="queue">Queue to check against. Defaults to "default"</param>
/// <returns></returns> /// <returns></returns>
private static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue) public static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue)
{ {
var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue); var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue);
return enqueuedJobs.Any(j => j.Value.InEnqueuedState && return enqueuedJobs.Any(j => j.Value.InEnqueuedState &&

View File

@ -1,8 +1,8 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text.RegularExpressions;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data; using API.Data;
using Hangfire; using Hangfire;
@ -11,6 +11,52 @@ using Microsoft.Extensions.Logging;
namespace API.Services.Tasks.Scanner; namespace API.Services.Tasks.Scanner;
/// <summary>
/// Change information
/// </summary>
public class Change
{
/// <summary>
/// Gets or sets the type of the change.
/// </summary>
/// <value>
/// The type of the change.
/// </value>
public WatcherChangeTypes ChangeType { get; set; }
/// <summary>
/// Gets or sets the full path.
/// </summary>
/// <value>
/// The full path.
/// </value>
public string FullPath { get; set; }
/// <summary>
/// Gets or sets the name.
/// </summary>
/// <value>
/// The name.
/// </value>
public string Name { get; set; }
/// <summary>
/// Gets or sets the old full path.
/// </summary>
/// <value>
/// The old full path.
/// </value>
public string OldFullPath { get; set; }
/// <summary>
/// Gets or sets the old name.
/// </summary>
/// <value>
/// The old name.
/// </value>
public string OldName { get; set; }
}
public interface ILibraryWatcher public interface ILibraryWatcher
{ {
/// <summary> /// <summary>
@ -29,29 +75,6 @@ public interface ILibraryWatcher
Task RestartWatching(); Task RestartWatching();
} }
internal class FolderScanQueueable
{
public DateTime QueueTime { get; set; }
public string FolderPath { get; set; }
}
internal class FolderScanQueueableComparer : IEqualityComparer<FolderScanQueueable>
{
public bool Equals(FolderScanQueueable x, FolderScanQueueable y)
{
if (ReferenceEquals(x, y)) return true;
if (ReferenceEquals(x, null)) return false;
if (ReferenceEquals(y, null)) return false;
if (x.GetType() != y.GetType()) return false;
return x.FolderPath == y.FolderPath;
}
public int GetHashCode(FolderScanQueueable obj)
{
return HashCode.Combine(obj.FolderPath);
}
}
/// <summary> /// <summary>
/// Responsible for watching the file system and processing change events. This is mainly responsible for invoking /// Responsible for watching the file system and processing change events. This is mainly responsible for invoking
/// Scanner to quickly pickup on changes. /// Scanner to quickly pickup on changes.
@ -64,11 +87,13 @@ public class LibraryWatcher : ILibraryWatcher
private readonly IScannerService _scannerService; private readonly IScannerService _scannerService;
private readonly Dictionary<string, IList<FileSystemWatcher>> _watcherDictionary = new (); private readonly Dictionary<string, IList<FileSystemWatcher>> _watcherDictionary = new ();
/// <summary>
/// This is just here to prevent GC from Disposing our watchers
/// </summary>
private readonly IList<FileSystemWatcher> _fileWatchers = new List<FileSystemWatcher>();
private IList<string> _libraryFolders = new List<string>(); private IList<string> _libraryFolders = new List<string>();
private readonly Queue<FolderScanQueueable> _scanQueue = new Queue<FolderScanQueueable>();
private readonly TimeSpan _queueWaitTime; private readonly TimeSpan _queueWaitTime;
private readonly FolderScanQueueableComparer _folderScanQueueableComparer = new FolderScanQueueableComparer();
public LibraryWatcher(IDirectoryService directoryService, IUnitOfWork unitOfWork, ILogger<LibraryWatcher> logger, IScannerService scannerService, IHostEnvironment environment) public LibraryWatcher(IDirectoryService directoryService, IUnitOfWork unitOfWork, ILogger<LibraryWatcher> logger, IScannerService scannerService, IHostEnvironment environment)
@ -78,7 +103,7 @@ public class LibraryWatcher : ILibraryWatcher
_logger = logger; _logger = logger;
_scannerService = scannerService; _scannerService = scannerService;
_queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(10) : TimeSpan.FromMinutes(1); _queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(30) : TimeSpan.FromMinutes(5);
} }
@ -95,20 +120,16 @@ public class LibraryWatcher : ILibraryWatcher
{ {
_logger.LogDebug("Watching {FolderPath}", libraryFolder); _logger.LogDebug("Watching {FolderPath}", libraryFolder);
var watcher = new FileSystemWatcher(libraryFolder); var watcher = new FileSystemWatcher(libraryFolder);
watcher.NotifyFilter = NotifyFilters.CreationTime
| NotifyFilters.DirectoryName
| NotifyFilters.FileName
| NotifyFilters.LastWrite
| NotifyFilters.Size;
watcher.Changed += OnChanged; watcher.Changed += OnChanged;
watcher.Created += OnCreated; watcher.Created += OnCreated;
watcher.Deleted += OnDeleted; watcher.Deleted += OnDeleted;
watcher.Renamed += OnRenamed; watcher.Error += OnError;
watcher.Filter = "*.*"; watcher.Filter = "*.*";
watcher.IncludeSubdirectories = true; watcher.IncludeSubdirectories = true;
watcher.EnableRaisingEvents = true; watcher.EnableRaisingEvents = true;
_fileWatchers.Add(watcher);
if (!_watcherDictionary.ContainsKey(libraryFolder)) if (!_watcherDictionary.ContainsKey(libraryFolder))
{ {
_watcherDictionary.Add(libraryFolder, new List<FileSystemWatcher>()); _watcherDictionary.Add(libraryFolder, new List<FileSystemWatcher>());
@ -127,9 +148,9 @@ public class LibraryWatcher : ILibraryWatcher
fileSystemWatcher.Changed -= OnChanged; fileSystemWatcher.Changed -= OnChanged;
fileSystemWatcher.Created -= OnCreated; fileSystemWatcher.Created -= OnCreated;
fileSystemWatcher.Deleted -= OnDeleted; fileSystemWatcher.Deleted -= OnDeleted;
fileSystemWatcher.Renamed -= OnRenamed;
fileSystemWatcher.Dispose(); fileSystemWatcher.Dispose();
} }
_fileWatchers.Clear();
_watcherDictionary.Clear(); _watcherDictionary.Clear();
} }
@ -143,7 +164,7 @@ public class LibraryWatcher : ILibraryWatcher
{ {
if (e.ChangeType != WatcherChangeTypes.Changed) return; if (e.ChangeType != WatcherChangeTypes.Changed) return;
_logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}", e.FullPath, e.Name); _logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}", e.FullPath, e.Name);
ProcessChange(e.FullPath); ProcessChange(e.FullPath, string.IsNullOrEmpty(_directoryService.FileSystem.Path.GetExtension(e.Name)));
} }
private void OnCreated(object sender, FileSystemEventArgs e) private void OnCreated(object sender, FileSystemEventArgs e)
@ -152,87 +173,77 @@ public class LibraryWatcher : ILibraryWatcher
ProcessChange(e.FullPath, !_directoryService.FileSystem.File.Exists(e.Name)); ProcessChange(e.FullPath, !_directoryService.FileSystem.File.Exists(e.Name));
} }
/// <summary>
/// From testing, on Deleted only needs to pass through the event when a folder is deleted. If a file is deleted, Changed will handle automatically.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void OnDeleted(object sender, FileSystemEventArgs e) { private void OnDeleted(object sender, FileSystemEventArgs e) {
var isDirectory = string.IsNullOrEmpty(_directoryService.FileSystem.Path.GetExtension(e.Name));
if (!isDirectory) return;
_logger.LogDebug("[LibraryWatcher] Deleted: {FullPath}, {Name}", e.FullPath, e.Name); _logger.LogDebug("[LibraryWatcher] Deleted: {FullPath}, {Name}", e.FullPath, e.Name);
ProcessChange(e.FullPath, true);
// On deletion, we need another type of check. We need to check if e.Name has an extension or not
// NOTE: File deletion will trigger a folder change event, so this might not be needed
ProcessChange(e.FullPath, string.IsNullOrEmpty(_directoryService.FileSystem.Path.GetExtension(e.Name)));
} }
private void OnError(object sender, ErrorEventArgs e)
private void OnRenamed(object sender, RenamedEventArgs e)
{ {
_logger.LogDebug($"[LibraryWatcher] Renamed:"); _logger.LogError(e.GetException(), "[LibraryWatcher] An error occured, likely too many watches occured at once. Restarting Watchers");
_logger.LogDebug(" Old: {OldFullPath}", e.OldFullPath); Task.Run(RestartWatching);
_logger.LogDebug(" New: {FullPath}", e.FullPath);
ProcessChange(e.FullPath, _directoryService.FileSystem.Directory.Exists(e.FullPath));
} }
/// <summary> /// <summary>
/// Processes the file or folder change. /// Processes the file or folder change. If the change is a file change and not from a supported extension, it will be ignored.
/// </summary> /// </summary>
/// <remarks>This will ignore image files that are added to the system. However, they may still trigger scans due to folder changes.</remarks>
/// <param name="filePath">File or folder that changed</param> /// <param name="filePath">File or folder that changed</param>
/// <param name="isDirectoryChange">If the change is on a directory and not a file</param> /// <param name="isDirectoryChange">If the change is on a directory and not a file</param>
private void ProcessChange(string filePath, bool isDirectoryChange = false) private void ProcessChange(string filePath, bool isDirectoryChange = false)
{ {
// We need to check if directory or not var sw = Stopwatch.StartNew();
if (!isDirectoryChange && !new Regex(Parser.Parser.SupportedExtensions).IsMatch(new FileInfo(filePath).Extension)) return; try
var parentDirectory = _directoryService.GetParentDirectoryName(filePath);
if (string.IsNullOrEmpty(parentDirectory)) return;
// We need to find the library this creation belongs to
// Multiple libraries can point to the same base folder. In this case, we need use FirstOrDefault
var libraryFolder = _libraryFolders.FirstOrDefault(f => parentDirectory.Contains(f));
if (string.IsNullOrEmpty(libraryFolder)) return;
var rootFolder = _directoryService.GetFoldersTillRoot(libraryFolder, filePath).ToList();
if (!rootFolder.Any()) return;
// Select the first folder and join with library folder, this should give us the folder to scan.
var fullPath = Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(libraryFolder, rootFolder.First()));
var queueItem = new FolderScanQueueable()
{ {
FolderPath = fullPath, // We need to check if directory or not
QueueTime = DateTime.Now if (!isDirectoryChange &&
}; !(Parser.Parser.IsArchive(filePath) || Parser.Parser.IsBook(filePath))) return;
if (!_scanQueue.Contains(queueItem, _folderScanQueueableComparer))
{
_logger.LogDebug("[LibraryWatcher] Queuing job for {Folder} at {TimeStamp}", fullPath, DateTime.Now);
_scanQueue.Enqueue(queueItem);
}
ProcessQueue(); var parentDirectory = _directoryService.GetParentDirectoryName(filePath);
} if (string.IsNullOrEmpty(parentDirectory)) return;
/// <summary> // We need to find the library this creation belongs to
/// Instead of making things complicated with a separate thread, this service will process the queue whenever a change occurs // Multiple libraries can point to the same base folder. In this case, we need use FirstOrDefault
/// </summary> var libraryFolder = _libraryFolders.FirstOrDefault(f => parentDirectory.Contains(f));
private void ProcessQueue() if (string.IsNullOrEmpty(libraryFolder)) return;
{
var i = 0; var rootFolder = _directoryService.GetFoldersTillRoot(libraryFolder, filePath).ToList();
while (i < _scanQueue.Count) if (!rootFolder.Any()) return;
{
var item = _scanQueue.Peek(); // Select the first folder and join with library folder, this should give us the folder to scan.
if (item.QueueTime < DateTime.Now.Subtract(_queueWaitTime)) var fullPath =
Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(libraryFolder, rootFolder.First()));
var alreadyScheduled =
TaskScheduler.HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] {fullPath});
_logger.LogDebug("{FullPath} already enqueued: {Value}", fullPath, alreadyScheduled);
if (!alreadyScheduled)
{ {
_logger.LogDebug("[LibraryWatcher] Scheduling ScanSeriesFolder for {Folder}", item.FolderPath); _logger.LogDebug("[LibraryWatcher] Scheduling ScanFolder for {Folder}", fullPath);
BackgroundJob.Enqueue(() => _scannerService.ScanFolder(item.FolderPath)); BackgroundJob.Schedule(() => _scannerService.ScanFolder(fullPath), _queueWaitTime);
_scanQueue.Dequeue();
} }
else else
{ {
i++; _logger.LogDebug("[LibraryWatcher] Skipped scheduling ScanFolder for {Folder} as a job already queued",
fullPath);
} }
} }
catch (Exception ex)
if (_scanQueue.Count > 0)
{ {
Task.Delay(TimeSpan.FromSeconds(30)).ContinueWith(t=> ProcessQueue()); _logger.LogError(ex, "[LibraryWatcher] An error occured when processing a watch event");
} }
_logger.LogDebug("ProcessChange occured in {ElapsedMilliseconds}ms", sw.ElapsedMilliseconds);
} }
} }

View File

@ -80,7 +80,9 @@ namespace API.Services.Tasks.Scanner
string normalizedPath; string normalizedPath;
if (scanDirectoryByDirectory) if (scanDirectoryByDirectory)
{ {
var directories = _directoryService.GetDirectories(folderPath).ToList(); // This is used in library scan, so we should check first for a ignore file and use that here as well
var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(folderPath, DirectoryService.KavitaIgnoreFile);
var directories = _directoryService.GetDirectories(folderPath, _directoryService.CreateMatcherFromFile(potentialIgnoreFile)).ToList();
foreach (var directory in directories) foreach (var directory in directories)
{ {
@ -219,7 +221,7 @@ namespace API.Services.Tasks.Scanner
IDictionary<string, IList<SeriesModified>> seriesPaths, Action<Tuple<bool, IList<ParserInfo>>> processSeriesInfos, bool forceCheck = false) IDictionary<string, IList<SeriesModified>> seriesPaths, Action<Tuple<bool, IList<ParserInfo>>> processSeriesInfos, bool forceCheck = false)
{ {
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("Starting file scan", libraryName, ProgressEventType.Started)); await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", libraryName, ProgressEventType.Started));
foreach (var folderPath in folders) foreach (var folderPath in folders)
{ {
@ -284,7 +286,7 @@ namespace API.Services.Tasks.Scanner
} }
} }
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(string.Empty, libraryName, ProgressEventType.Ended)); await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", libraryName, ProgressEventType.Ended));
} }
private bool HasSeriesFolderNotChangedSinceLastScan(IDictionary<string, IList<SeriesModified>> seriesPaths, string normalizedFolder, bool forceCheck = false) private bool HasSeriesFolderNotChangedSinceLastScan(IDictionary<string, IList<SeriesModified>> seriesPaths, string normalizedFolder, bool forceCheck = false)

View File

@ -21,8 +21,6 @@ namespace API.Parser
public const string SupportedExtensions = public const string SupportedExtensions =
ArchiveFileExtensions + "|" + ImageFileExtensions + "|" + BookFileExtensions; ArchiveFileExtensions + "|" + ImageFileExtensions + "|" + BookFileExtensions;
public static readonly string[] SupportedGlobExtensions = new [] {@"**/*.png", @"**/*.cbz", @"**/*.pdf"};
private const RegexOptions MatchOptions = private const RegexOptions MatchOptions =
RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant; RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant;

View File

@ -1,111 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Helpers;
using API.Parser;
using Kavita.Common.Helpers;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks.Scanner;
/// <summary>
/// This is responsible for scanning and updating a Library
/// </summary>
public class ScanLibrary
{
private readonly IDirectoryService _directoryService;
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger _logger;
public ScanLibrary(IDirectoryService directoryService, IUnitOfWork unitOfWork, ILogger logger)
{
_directoryService = directoryService;
_unitOfWork = unitOfWork;
_logger = logger;
}
// public Task UpdateLibrary(Library library)
// {
//
//
// }
/// <summary>
/// Gets the list of all parserInfos given a Series (Will match on Name, LocalizedName, OriginalName). If the series does not exist within, return empty list.
/// </summary>
/// <param name="parsedSeries"></param>
/// <param name="series"></param>
/// <returns></returns>
public static IList<ParserInfo> GetInfosByName(Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries, Series series)
{
var allKeys = parsedSeries.Keys.Where(ps =>
SeriesHelper.FindSeries(series, ps));
var infos = new List<ParserInfo>();
foreach (var key in allKeys)
{
infos.AddRange(parsedSeries[key]);
}
return infos;
}
/// <summary>
/// This will Scan all files in a folder path. For each folder within the folderPath, FolderAction will be invoked for all files contained
/// </summary>
/// <param name="folderPath">A library folder or series folder</param>
/// <param name="folderAction">A callback async Task to be called once all files for each folder path are found</param>
public async Task ProcessFiles(string folderPath, bool isLibraryFolder, Func<IEnumerable<string>, string,Task> folderAction)
{
if (isLibraryFolder)
{
var directories = _directoryService.GetDirectories(folderPath).ToList();
foreach (var directory in directories)
{
// For a scan, this is doing everything in the directory loop before the folder Action is called...which leads to no progress indication
await folderAction(_directoryService.ScanFiles(directory), directory);
}
}
else
{
//folderAction(ScanFiles(folderPath));
await folderAction(_directoryService.ScanFiles(folderPath), folderPath);
}
}
private GlobMatcher CreateIgnoreMatcher(string ignoreFile)
{
if (!_directoryService.FileSystem.File.Exists(ignoreFile))
{
return null;
}
// Read file in and add each line to Matcher
var lines = _directoryService.FileSystem.File.ReadAllLines(ignoreFile);
if (lines.Length == 0)
{
_logger.LogError("Kavita Ignore file found but empty, ignoring: {IgnoreFile}", ignoreFile);
return null;
}
GlobMatcher matcher = new();
foreach (var line in lines)
{
matcher.AddExclude(line);
}
return matcher;
}
}

View File

@ -1,6 +1,7 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
using System.Globalization;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
@ -68,6 +69,7 @@ public enum ScanCancelReason
*/ */
public class ScannerService : IScannerService public class ScannerService : IScannerService
{ {
public const string Name = "ScannerService";
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<ScannerService> _logger; private readonly ILogger<ScannerService> _logger;
private readonly IMetadataService _metadataService; private readonly IMetadataService _metadataService;
@ -277,7 +279,7 @@ public class ScannerService : IScannerService
return ScanCancelReason.FolderMount; return ScanCancelReason.FolderMount;
} }
// If all series Folder paths haven't been modified since last scan, abort // If all series Folder paths haven't been modified since last scan, abort (NOTE: This flow never happens as ScanSeries will always bypass)
if (!bypassFolderChecks) if (!bypassFolderChecks)
{ {
@ -293,7 +295,7 @@ public class ScannerService : IScannerService
series.Name); series.Name);
await _eventHub.SendMessageAsync(MessageFactory.Info, await _eventHub.SendMessageAsync(MessageFactory.Info,
MessageFactory.InfoEvent($"{series.Name} scan has no work to do", MessageFactory.InfoEvent($"{series.Name} scan has no work to do",
"All folders have not been changed since last scan. Scan will be aborted.")); $"All folders have not been changed since last scan ({series.LastFolderScanned.ToString(CultureInfo.CurrentCulture)}). Scan will be aborted."));
return ScanCancelReason.NoChange; return ScanCancelReason.NoChange;
} }
} }
@ -304,7 +306,7 @@ public class ScannerService : IScannerService
series.Name); series.Name);
await _eventHub.SendMessageAsync(MessageFactory.Info, await _eventHub.SendMessageAsync(MessageFactory.Info,
MessageFactory.ErrorEvent($"{series.Name} scan has no work to do", MessageFactory.ErrorEvent($"{series.Name} scan has no work to do",
"The folder the series is in is missing. Delete series manually or perform a library scan.")); "The folder the series was in is missing. Delete series manually or perform a library scan."));
return ScanCancelReason.NoCancel; return ScanCancelReason.NoCancel;
} }
} }
@ -316,7 +318,7 @@ public class ScannerService : IScannerService
private static void RemoveParsedInfosNotForSeries(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Series series) private static void RemoveParsedInfosNotForSeries(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Series series)
{ {
var keys = parsedSeries.Keys; var keys = parsedSeries.Keys;
foreach (var key in keys.Where(key => !SeriesHelper.FindSeries(series, key))) // series.Format != key.Format || foreach (var key in keys.Where(key => !SeriesHelper.FindSeries(series, key)))
{ {
parsedSeries.Remove(key); parsedSeries.Remove(key);
} }
@ -420,7 +422,7 @@ public class ScannerService : IScannerService
_logger.LogInformation("[ScannerService] {LibraryName} scan has no work to do. All folders have not been changed since last scan", library.Name); _logger.LogInformation("[ScannerService] {LibraryName} scan has no work to do. All folders have not been changed since last scan", library.Name);
await _eventHub.SendMessageAsync(MessageFactory.Info, await _eventHub.SendMessageAsync(MessageFactory.Info,
MessageFactory.InfoEvent($"{library.Name} scan has no work to do", MessageFactory.InfoEvent($"{library.Name} scan has no work to do",
"All folders have not been changed since last scan. Scan will be aborted.")); $"All folders have not been changed since last scan ({library.Folders.Max(f => f.LastScanned).ToString(CultureInfo.CurrentCulture)}). Scan will be aborted."));
BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForLibrary(library.Id, false)); BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForLibrary(library.Id, false));
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanLibrary(library.Id, false)); BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanLibrary(library.Id, false));
@ -485,7 +487,7 @@ public class ScannerService : IScannerService
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended)); await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended));
_logger.LogInformation("[ScannerService] Finished file scan in {ScanAndUpdateTime}. Updating database", scanElapsedTime); _logger.LogInformation("[ScannerService] Finished file scan in {ScanAndUpdateTime} milliseconds. Updating database", scanElapsedTime);
var time = DateTime.Now; var time = DateTime.Now;
foreach (var folderPath in library.Folders) foreach (var folderPath in library.Folders)

View File

@ -337,35 +337,42 @@ namespace API.SignalR
/// Represents a file being scanned by Kavita for processing and grouping /// Represents a file being scanned by Kavita for processing and grouping
/// </summary> /// </summary>
/// <remarks>Does not have a progress as it's unknown how many files there are. Instead sends -1 to represent indeterminate</remarks> /// <remarks>Does not have a progress as it's unknown how many files there are. Instead sends -1 to represent indeterminate</remarks>
/// <param name="filename"></param> /// <param name="folderPath"></param>
/// <param name="libraryName"></param> /// <param name="libraryName"></param>
/// <param name="eventType"></param> /// <param name="eventType"></param>
/// <returns></returns> /// <returns></returns>
public static SignalRMessage FileScanProgressEvent(string filename, string libraryName, string eventType) public static SignalRMessage FileScanProgressEvent(string folderPath, string libraryName, string eventType)
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = FileScanProgress, Name = FileScanProgress,
Title = $"Scanning {libraryName}", Title = $"Scanning {libraryName}",
SubTitle = Path.GetFileName(filename), SubTitle = folderPath,
EventType = eventType, EventType = eventType,
Progress = ProgressType.Indeterminate, Progress = ProgressType.Indeterminate,
Body = new Body = new
{ {
Title = $"Scanning {libraryName}", Title = $"Scanning {libraryName}",
Subtitle = filename, Subtitle = folderPath,
Filename = filename, Filename = folderPath,
EventTime = DateTime.Now, EventTime = DateTime.Now,
} }
}; };
} }
/// <summary>
/// This informs the UI with details about what is being processed by the Scanner
/// </summary>
/// <param name="libraryName"></param>
/// <param name="eventType"></param>
/// <param name="seriesName"></param>
/// <returns></returns>
public static SignalRMessage LibraryScanProgressEvent(string libraryName, string eventType, string seriesName = "") public static SignalRMessage LibraryScanProgressEvent(string libraryName, string eventType, string seriesName = "")
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = ScanProgress, Name = ScanProgress,
Title = $"Scanning {libraryName}", Title = $"Processing {seriesName}",
SubTitle = seriesName, SubTitle = seriesName,
EventType = eventType, EventType = eventType,
Progress = ProgressType.Indeterminate, Progress = ProgressType.Indeterminate,

View File

@ -21,12 +21,8 @@ COPY --from=copytask /files/wwwroot /kavita/wwwroot
#Installs program dependencies #Installs program dependencies
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y libicu-dev libssl1.1 libgdiplus curl \ && apt-get install -y libicu-dev libssl1.1 libgdiplus curl \
&& apt-get install -y libvips --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
#Removes the libvips.so.42 file to fix the AVX CPU requirement issue
RUN rm /kavita/libvips.so.42
COPY entrypoint.sh /entrypoint.sh COPY entrypoint.sh /entrypoint.sh
EXPOSE 5000 EXPOSE 5000

View File

@ -77,7 +77,7 @@
<!-- TODO: Move this to Plugins tab once we build out some basic tables --> <!-- TODO: Move this to Plugins tab once we build out some basic tables -->
<div class="mb-3"> <div class="mb-3">
<label for="opds" aria-describedby="opds-info" class="form-label">OPDS</label> <label for="opds" aria-describedby="opds-info" class="form-label">OPDS</label>
<p class="accent" id="opds-info">OPDS support will allow all users to use OPDS to read and download content from the server. If OPDS is enabled, a user will not need download permissions to download media while using it.</p> <p class="accent" id="opds-info">OPDS support will allow all users to use OPDS to read and download content from the server.</p>
<div class="form-check form-switch"> <div class="form-check form-switch">
<input id="opds" type="checkbox" aria-label="OPDS Support" class="form-check-input" formControlName="enableOpds"> <input id="opds" type="checkbox" aria-label="OPDS Support" class="form-check-input" formControlName="enableOpds">
<label for="opds" class="form-check-label">Enable OPDS</label> <label for="opds" class="form-check-label">Enable OPDS</label>
@ -85,7 +85,7 @@
</div> </div>
<div class="mb-3"> <div class="mb-3">
<label for="folder-watching" class="form-label" aria-describedby="folder-watching-info">Folder Watching</label> <label for="folder-watching" class="form-label" aria-describedby="folder-watching-info">Folder Watching</label><app-tag-badge [selectionMode]="TagBadgeCursor.Clickable" class="ms-1" ngbTooltip="This feature may not always work reliably">Expiremental</app-tag-badge>
<p class="accent" id="folder-watching-info">Allows Kavita to monitor Library Folders to detect changes and invoke scanning on those changes. This allows content to be updated without manually invoking scans or waiting for nightly scans.</p> <p class="accent" id="folder-watching-info">Allows Kavita to monitor Library Folders to detect changes and invoke scanning on those changes. This allows content to be updated without manually invoking scans or waiting for nightly scans.</p>
<div class="form-check form-switch"> <div class="form-check form-switch">
<input id="folder-watching" type="checkbox" class="form-check-input" formControlName="enableFolderWatching" role="switch"> <input id="folder-watching" type="checkbox" class="form-check-input" formControlName="enableFolderWatching" role="switch">

View File

@ -3,6 +3,7 @@ import { FormGroup, Validators, FormControl } from '@angular/forms';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap'; import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { ToastrService } from 'ngx-toastr'; import { ToastrService } from 'ngx-toastr';
import { take } from 'rxjs/operators'; import { take } from 'rxjs/operators';
import { TagBadgeCursor } from 'src/app/shared/tag-badge/tag-badge.component';
import { SettingsService } from '../settings.service'; import { SettingsService } from '../settings.service';
import { DirectoryPickerComponent, DirectoryPickerResult } from '../_modals/directory-picker/directory-picker.component'; import { DirectoryPickerComponent, DirectoryPickerResult } from '../_modals/directory-picker/directory-picker.component';
import { ServerSettings } from '../_models/server-settings'; import { ServerSettings } from '../_models/server-settings';
@ -20,6 +21,10 @@ export class ManageSettingsComponent implements OnInit {
taskFrequencies: Array<string> = []; taskFrequencies: Array<string> = [];
logLevels: Array<string> = []; logLevels: Array<string> = [];
get TagBadgeCursor() {
return TagBadgeCursor;
}
constructor(private settingsService: SettingsService, private toastr: ToastrService, constructor(private settingsService: SettingsService, private toastr: ToastrService,
private modalService: NgbModal) { } private modalService: NgbModal) { }
@ -45,6 +50,7 @@ export class ManageSettingsComponent implements OnInit {
this.settingsForm.addControl('enableSwaggerUi', new FormControl(this.serverSettings.enableSwaggerUi, [Validators.required])); this.settingsForm.addControl('enableSwaggerUi', new FormControl(this.serverSettings.enableSwaggerUi, [Validators.required]));
this.settingsForm.addControl('totalBackups', new FormControl(this.serverSettings.totalBackups, [Validators.required, Validators.min(1), Validators.max(30)])); this.settingsForm.addControl('totalBackups', new FormControl(this.serverSettings.totalBackups, [Validators.required, Validators.min(1), Validators.max(30)]));
this.settingsForm.addControl('enableFolderWatching', new FormControl(this.serverSettings.enableFolderWatching, [Validators.required])); this.settingsForm.addControl('enableFolderWatching', new FormControl(this.serverSettings.enableFolderWatching, [Validators.required]));
this.settingsForm.addControl('convertBookmarkToWebP', new FormControl(this.serverSettings.convertBookmarkToWebP, []));
}); });
} }
@ -62,6 +68,7 @@ export class ManageSettingsComponent implements OnInit {
this.settingsForm.get('enableSwaggerUi')?.setValue(this.serverSettings.enableSwaggerUi); this.settingsForm.get('enableSwaggerUi')?.setValue(this.serverSettings.enableSwaggerUi);
this.settingsForm.get('totalBackups')?.setValue(this.serverSettings.totalBackups); this.settingsForm.get('totalBackups')?.setValue(this.serverSettings.totalBackups);
this.settingsForm.get('enableFolderWatching')?.setValue(this.serverSettings.enableFolderWatching); this.settingsForm.get('enableFolderWatching')?.setValue(this.serverSettings.enableFolderWatching);
this.settingsForm.get('convertBookmarkToWebP')?.setValue(this.serverSettings.convertBookmarkToWebP);
this.settingsForm.markAsPristine(); this.settingsForm.markAsPristine();
} }

View File

@ -141,7 +141,7 @@ export class ManageUsersComponent implements OnInit, OnDestroy {
setup(member: Member) { setup(member: Member) {
this.accountService.getInviteUrl(member.id, false).subscribe(url => { this.accountService.getInviteUrl(member.id, false).subscribe(url => {
console.log('Url: ', url); console.log('Invite Url: ', url);
if (url) { if (url) {
this.router.navigateByUrl(url); this.router.navigateByUrl(url);
} }

View File

@ -68,7 +68,6 @@ export class EventsWidgetComponent implements OnInit, OnDestroy {
ngOnInit(): void { ngOnInit(): void {
this.messageHub.messages$.pipe(takeUntil(this.onDestroy)).subscribe(event => { this.messageHub.messages$.pipe(takeUntil(this.onDestroy)).subscribe(event => {
if (event.event === EVENTS.NotificationProgress) { if (event.event === EVENTS.NotificationProgress) {
console.log('[Event Widget]: Event came in ', event.payload);
this.processNotificationProgressEvent(event); this.processNotificationProgressEvent(event);
} else if (event.event === EVENTS.Error) { } else if (event.event === EVENTS.Error) {
const values = this.errorSource.getValue(); const values = this.errorSource.getValue();

View File

@ -140,6 +140,12 @@ export class UtilityService {
} }
deepEqual(object1: any, object2: any) { deepEqual(object1: any, object2: any) {
if ((object1 === null || object1 === undefined) && (object2 !== null || object2 !== undefined)) return false;
if ((object2 === null || object2 === undefined) && (object1 !== null || object1 !== undefined)) return false;
if (object1 === null && object2 === null) return true;
if (object1 === undefined && object2 === undefined) return true;
const keys1 = Object.keys(object1); const keys1 = Object.keys(object1);
const keys2 = Object.keys(object2); const keys2 = Object.keys(object2);
if (keys1.length !== keys2.length) { if (keys1.length !== keys2.length) {

View File

@ -1,4 +1,4 @@
import { ChangeDetectionStrategy, ChangeDetectorRef, Component, Input, OnInit } from '@angular/core'; import { ChangeDetectionStrategy, Component, Input } from '@angular/core';
/** /**
* What type of cursor to apply to the tag badge * What type of cursor to apply to the tag badge