diff --git a/API.Tests/Repository/SeriesRepositoryTests.cs b/API.Tests/Repository/SeriesRepositoryTests.cs new file mode 100644 index 000000000..16f365d88 --- /dev/null +++ b/API.Tests/Repository/SeriesRepositoryTests.cs @@ -0,0 +1,156 @@ +using System.Collections.Generic; +using System.Data.Common; +using System.IO.Abstractions.TestingHelpers; +using System.Linq; +using System.Threading.Tasks; +using API.Data; +using API.Entities; +using API.Entities.Enums; +using API.Helpers; +using API.Services; +using AutoMapper; +using Microsoft.Data.Sqlite; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; + +namespace API.Tests.Repository; + +public class SeriesRepositoryTests +{ + private readonly IUnitOfWork _unitOfWork; + + private readonly DbConnection _connection; + private readonly DataContext _context; + + private const string CacheDirectory = "C:/kavita/config/cache/"; + private const string CoverImageDirectory = "C:/kavita/config/covers/"; + private const string BackupDirectory = "C:/kavita/config/backups/"; + private const string DataDirectory = "C:/data/"; + + public SeriesRepositoryTests() + { + var contextOptions = new DbContextOptionsBuilder().UseSqlite(CreateInMemoryDatabase()).Options; + _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; + + _context = new DataContext(contextOptions); + Task.Run(SeedDb).GetAwaiter().GetResult(); + + var config = new MapperConfiguration(cfg => cfg.AddProfile()); + var mapper = config.CreateMapper(); + _unitOfWork = new UnitOfWork(_context, mapper, null); + } + + #region Setup + + private static DbConnection CreateInMemoryDatabase() + { + var connection = new SqliteConnection("Filename=:memory:"); + + connection.Open(); + + return connection; + } + + private async Task SeedDb() + { + await _context.Database.MigrateAsync(); + var filesystem = CreateFileSystem(); + + await Seed.SeedSettings(_context, + new DirectoryService(Substitute.For>(), filesystem)); + + var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync(); + setting.Value = CacheDirectory; + + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync(); + setting.Value = BackupDirectory; + + _context.ServerSetting.Update(setting); + + var lib = new Library() + { + Name = "Manga", Folders = new List() {new FolderPath() {Path = "C:/data/"}} + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + lib + } + }); + + return await _context.SaveChangesAsync() > 0; + } + + private async Task ResetDb() + { + _context.Series.RemoveRange(_context.Series.ToList()); + _context.AppUserRating.RemoveRange(_context.AppUserRating.ToList()); + _context.Genre.RemoveRange(_context.Genre.ToList()); + _context.CollectionTag.RemoveRange(_context.CollectionTag.ToList()); + _context.Person.RemoveRange(_context.Person.ToList()); + + await _context.SaveChangesAsync(); + } + + private static MockFileSystem CreateFileSystem() + { + var fileSystem = new MockFileSystem(); + fileSystem.Directory.SetCurrentDirectory("C:/kavita/"); + fileSystem.AddDirectory("C:/kavita/config/"); + fileSystem.AddDirectory(CacheDirectory); + fileSystem.AddDirectory(CoverImageDirectory); + fileSystem.AddDirectory(BackupDirectory); + fileSystem.AddDirectory(DataDirectory); + + return fileSystem; + } + + #endregion + + private async Task SetupSeriesData() + { + var library = new Library() + { + Name = "Manga", + Type = LibraryType.Manga, + Folders = new List() + { + new FolderPath() {Path = "C:/data/manga/"} + } + }; + + library.Series = new List() + { + DbFactory.Series("The Idaten Deities Know Only Peace", "Heion Sedai no Idaten-tachi"), + }; + + _unitOfWork.LibraryRepository.Add(library); + await _unitOfWork.CommitAsync(); + } + + + [InlineData("Heion Sedai no Idaten-tachi", "", "The Idaten Deities Know Only Peace")] // Matching on localized name in DB + public async Task GetFullSeriesByAnyName_Should(string seriesName, string localizedName, string? expected) + { + var firstSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1); + var series = + await _unitOfWork.SeriesRepository.GetFullSeriesByAnyName(seriesName, localizedName, + 1); + if (expected == null) + { + Assert.Null(series); + } + else + { + Assert.NotNull(series); + Assert.Equal(expected, series.Name); + } + } + +} diff --git a/API.Tests/Services/ParseScannedFilesTests.cs b/API.Tests/Services/ParseScannedFilesTests.cs index 2bcbb1271..b63205b7a 100644 --- a/API.Tests/Services/ParseScannedFilesTests.cs +++ b/API.Tests/Services/ParseScannedFilesTests.cs @@ -156,96 +156,6 @@ public class ParseScannedFilesTests #endregion - #region GetInfosByName - - [Fact] - public void GetInfosByName_ShouldReturnGivenMatchingSeriesName() - { - var fileSystem = new MockFileSystem(); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var psf = new ParseScannedFiles(Substitute.For>(), ds, - new MockReadingItemService(new DefaultParser(ds)), Substitute.For()); - - var infos = new List() - { - ParserInfoFactory.CreateParsedInfo("Accel World", "1", "0", "Accel World v1.cbz", false), - ParserInfoFactory.CreateParsedInfo("Accel World", "2", "0", "Accel World v2.cbz", false) - }; - var parsedSeries = new Dictionary> - { - { - new ParsedSeries() - { - Format = MangaFormat.Archive, - Name = "Accel World", - NormalizedName = API.Parser.Parser.Normalize("Accel World") - }, - infos - }, - { - new ParsedSeries() - { - Format = MangaFormat.Pdf, - Name = "Accel World", - NormalizedName = API.Parser.Parser.Normalize("Accel World") - }, - new List() - } - }; - - var series = DbFactory.Series("Accel World"); - series.Format = MangaFormat.Pdf; - - Assert.Empty(ParseScannedFiles.GetInfosByName(parsedSeries, series)); - - series.Format = MangaFormat.Archive; - Assert.Equal(2, ParseScannedFiles.GetInfosByName(parsedSeries, series).Count()); - - } - - [Fact] - public void GetInfosByName_ShouldReturnGivenMatchingNormalizedSeriesName() - { - var fileSystem = new MockFileSystem(); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var psf = new ParseScannedFiles(Substitute.For>(), ds, - new MockReadingItemService(new DefaultParser(ds)), Substitute.For()); - - var infos = new List() - { - ParserInfoFactory.CreateParsedInfo("Accel World", "1", "0", "Accel World v1.cbz", false), - ParserInfoFactory.CreateParsedInfo("Accel World", "2", "0", "Accel World v2.cbz", false) - }; - var parsedSeries = new Dictionary> - { - { - new ParsedSeries() - { - Format = MangaFormat.Archive, - Name = "Accel World", - NormalizedName = API.Parser.Parser.Normalize("Accel World") - }, - infos - }, - { - new ParsedSeries() - { - Format = MangaFormat.Pdf, - Name = "Accel World", - NormalizedName = API.Parser.Parser.Normalize("Accel World") - }, - new List() - } - }; - - var series = DbFactory.Series("accel world"); - series.Format = MangaFormat.Archive; - Assert.Equal(2, ParseScannedFiles.GetInfosByName(parsedSeries, series).Count()); - - } - - #endregion - #region MergeName // NOTE: I don't think I can test MergeName as it relies on Tracking Files, which is more complicated than I need diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index 7b99763a2..321d0d06f 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -168,17 +168,17 @@ namespace API.Controllers [Authorize(Policy = "RequireAdminRole")] [HttpPost("scan")] - public ActionResult Scan(int libraryId) + public ActionResult Scan(int libraryId, bool force = false) { - _taskScheduler.ScanLibrary(libraryId); + _taskScheduler.ScanLibrary(libraryId, force); return Ok(); } [Authorize(Policy = "RequireAdminRole")] [HttpPost("refresh-metadata")] - public ActionResult RefreshMetadata(int libraryId) + public ActionResult RefreshMetadata(int libraryId, bool force = true) { - _taskScheduler.RefreshMetadata(libraryId); + _taskScheduler.RefreshMetadata(libraryId, force); return Ok(); } diff --git a/API/DTOs/SeriesDto.cs b/API/DTOs/SeriesDto.cs index 2904bf57c..bbf65e9fb 100644 --- a/API/DTOs/SeriesDto.cs +++ b/API/DTOs/SeriesDto.cs @@ -58,5 +58,9 @@ namespace API.DTOs /// The highest level folder for this Series /// public string FolderPath { get; set; } + /// + /// The last time the folder for this series was scanned + /// + public DateTime LastFolderScanned { get; set; } } } diff --git a/API/Data/DbFactory.cs b/API/Data/DbFactory.cs index ad97958da..58cd834ef 100644 --- a/API/Data/DbFactory.cs +++ b/API/Data/DbFactory.cs @@ -24,6 +24,26 @@ namespace API.Data OriginalName = name, LocalizedName = name, NormalizedName = Parser.Parser.Normalize(name), + NormalizedLocalizedName = Parser.Parser.Normalize(name), + SortName = name, + Volumes = new List(), + Metadata = SeriesMetadata(Array.Empty()) + }; + } + + public static Series Series(string name, string localizedName) + { + if (string.IsNullOrEmpty(localizedName)) + { + localizedName = name; + } + return new Series + { + Name = name, + OriginalName = name, + LocalizedName = localizedName, + NormalizedName = Parser.Parser.Normalize(name), + NormalizedLocalizedName = Parser.Parser.Normalize(localizedName), SortName = name, Volumes = new List(), Metadata = SeriesMetadata(Array.Empty()) diff --git a/API/Data/Repositories/SeriesRepository.cs b/API/Data/Repositories/SeriesRepository.cs index 528d46902..3324d8713 100644 --- a/API/Data/Repositories/SeriesRepository.cs +++ b/API/Data/Repositories/SeriesRepository.cs @@ -1220,15 +1220,19 @@ public class SeriesRepository : ISeriesRepository /// public Task GetFullSeriesByAnyName(string seriesName, string localizedName, int libraryId) { - var localizedSeries = Parser.Parser.Normalize(seriesName); + var normalizedSeries = Parser.Parser.Normalize(seriesName); var normalizedLocalized = Parser.Parser.Normalize(localizedName); - return _context.Series - .Where(s => s.NormalizedName.Equals(localizedSeries) - || s.NormalizedName.Equals(normalizedLocalized) - || s.NormalizedLocalizedName.Equals(localizedSeries) - || s.NormalizedLocalizedName.Equals(normalizedLocalized)) + var query = _context.Series .Where(s => s.LibraryId == libraryId) - .Include(s => s.Metadata) + .Where(s => s.NormalizedName.Equals(normalizedSeries) + || (s.NormalizedLocalizedName.Equals(normalizedSeries) && s.NormalizedLocalizedName != string.Empty)); + if (!string.IsNullOrEmpty(normalizedLocalized)) + { + query = query.Where(s => + s.NormalizedName.Equals(normalizedLocalized) || s.NormalizedLocalizedName.Equals(normalizedLocalized)); + } + + return query.Include(s => s.Metadata) .ThenInclude(m => m.People) .Include(s => s.Metadata) .ThenInclude(m => m.Genres) diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index 5de343ea4..3c064dc11 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -492,10 +492,10 @@ namespace API.Services { var stopLookingForDirectories = false; var dirs = new Dictionary(); - foreach (var folder in libraryFolders) + foreach (var folder in libraryFolders.Select(Parser.Parser.NormalizePath)) { if (stopLookingForDirectories) break; - foreach (var file in filePaths) + foreach (var file in filePaths.Select(Parser.Parser.NormalizePath)) { if (!file.Contains(folder)) continue; diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index d419a0fa8..df7f20152 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -19,7 +19,7 @@ public interface ITaskScheduler Task ScheduleTasks(); Task ScheduleStatsTasks(); void ScheduleUpdaterTasks(); - void ScanLibrary(int libraryId); + void ScanLibrary(int libraryId, bool force = false); void CleanupChapters(int[] chapterIds); void RefreshMetadata(int libraryId, bool forceUpdate = true); void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false); @@ -174,9 +174,12 @@ public class TaskScheduler : ITaskScheduler _scannerService.ScanLibraries(); } - public void ScanLibrary(int libraryId) + public void ScanLibrary(int libraryId, bool force = false) { - if (HasAlreadyEnqueuedTask("ScannerService","ScanLibrary", new object[] {libraryId}, ScanQueue)) + var alreadyEnqueued = + HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, true}, ScanQueue) || + HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, false}, ScanQueue); + if (alreadyEnqueued) { _logger.LogInformation("A duplicate request to scan library for library occured. Skipping"); return; @@ -184,12 +187,12 @@ public class TaskScheduler : ITaskScheduler if (RunningAnyTasksByMethod(new List() {"ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"}, ScanQueue)) { _logger.LogInformation("A Scan is already running, rescheduling ScanLibrary in 3 hours"); - BackgroundJob.Schedule(() => ScanLibrary(libraryId), TimeSpan.FromHours(3)); + BackgroundJob.Schedule(() => ScanLibrary(libraryId, force), TimeSpan.FromHours(3)); return; } _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); - BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId)); + BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, force)); // When we do a scan, force cache to re-unpack in case page numbers change BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory()); } @@ -201,7 +204,11 @@ public class TaskScheduler : ITaskScheduler public void RefreshMetadata(int libraryId, bool forceUpdate = true) { - if (HasAlreadyEnqueuedTask("MetadataService","GenerateCoversForLibrary", new object[] {libraryId, forceUpdate})) + var alreadyEnqueued = HasAlreadyEnqueuedTask("MetadataService", "GenerateCoversForLibrary", + new object[] {libraryId, true}) || + HasAlreadyEnqueuedTask("MetadataService", "GenerateCoversForLibrary", + new object[] {libraryId, false}); + if (alreadyEnqueued) { _logger.LogInformation("A duplicate request to refresh metadata for library occured. Skipping"); return; @@ -232,7 +239,7 @@ public class TaskScheduler : ITaskScheduler } if (RunningAnyTasksByMethod(new List() {"ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"}, ScanQueue)) { - _logger.LogInformation("A Scan is already running, rescheduling ScanSeries in 10 mins"); + _logger.LogInformation("A Scan is already running, rescheduling ScanSeries in 10 minutes"); BackgroundJob.Schedule(() => ScanSeries(libraryId, seriesId, forceUpdate), TimeSpan.FromMinutes(10)); return; } @@ -276,7 +283,7 @@ public class TaskScheduler : ITaskScheduler /// object[] of arguments in the order they are passed to enqueued job /// Queue to check against. Defaults to "default" /// - public static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue) + private static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue) { var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue); return enqueuedJobs.Any(j => j.Value.InEnqueuedState && diff --git a/API/Services/Tasks/Scanner/ParseScannedFiles.cs b/API/Services/Tasks/Scanner/ParseScannedFiles.cs index 5b46f212c..d993c2c9e 100644 --- a/API/Services/Tasks/Scanner/ParseScannedFiles.cs +++ b/API/Services/Tasks/Scanner/ParseScannedFiles.cs @@ -3,10 +3,8 @@ using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; -using API.Entities; using API.Entities.Enums; using API.Extensions; -using API.Helpers; using API.Parser; using API.SignalR; using Microsoft.Extensions.Logging; @@ -68,26 +66,6 @@ namespace API.Services.Tasks.Scanner _eventHub = eventHub; } - /// - /// Gets the list of all parserInfos given a Series (Will match on Name, LocalizedName, OriginalName). If the series does not exist within, return empty list. - /// - /// - /// - /// - public static IList GetInfosByName(Dictionary> parsedSeries, Series series) - { - var allKeys = parsedSeries.Keys.Where(ps => - SeriesHelper.FindSeries(series, ps)); - - var infos = new List(); - foreach (var key in allKeys) - { - infos.AddRange(parsedSeries[key]); - } - - return infos; - } - /// /// This will Scan all files in a folder path. For each folder within the folderPath, FolderAction will be invoked for all files contained @@ -192,7 +170,7 @@ namespace API.Services.Tasks.Scanner /// /// /// Series Name to group this info into - public string MergeName(ConcurrentDictionary> scannedSeries, ParserInfo info) + private string MergeName(ConcurrentDictionary> scannedSeries, ParserInfo info) { var normalizedSeries = Parser.Parser.Normalize(info.Series); var normalizedLocalSeries = Parser.Parser.Normalize(info.LocalizedSeries); @@ -230,7 +208,7 @@ namespace API.Services.Tasks.Scanner /// - /// This is a new version which will process series by folder groups. + /// This will process series by folder groups. /// /// /// @@ -263,8 +241,16 @@ namespace API.Services.Tasks.Scanner } _logger.LogDebug("Found {Count} files for {Folder}", files.Count, folder); await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(folderPath, libraryName, ProgressEventType.Updated)); + if (files.Count == 0) + { + _logger.LogInformation("[ScannerService] {Folder} is empty", folder); + return; + } var scannedSeries = new ConcurrentDictionary>(); - var infos = files.Select(file => _readingItemService.ParseFile(file, folderPath, libraryType)).Where(info => info != null).ToList(); + var infos = files + .Select(file => _readingItemService.ParseFile(file, folderPath, libraryType)) + .Where(info => info != null) + .ToList(); MergeLocalizedSeriesWithSeries(infos); @@ -320,17 +306,36 @@ namespace API.Services.Tasks.Scanner /// World of Acceleration v02.cbz having Series "Accel World" and Localized Series of "World of Acceleration" /// /// A collection of ParserInfos - private static void MergeLocalizedSeriesWithSeries(IReadOnlyCollection infos) + private void MergeLocalizedSeriesWithSeries(IReadOnlyCollection infos) { var hasLocalizedSeries = infos.Any(i => !string.IsNullOrEmpty(i.LocalizedSeries)); if (!hasLocalizedSeries) return; - var localizedSeries = infos.Select(i => i.LocalizedSeries).Distinct() + var localizedSeries = infos + .Where(i => !i.IsSpecial) + .Select(i => i.LocalizedSeries) + .Distinct() .FirstOrDefault(i => !string.IsNullOrEmpty(i)); if (string.IsNullOrEmpty(localizedSeries)) return; - var nonLocalizedSeries = infos.Select(i => i.Series).Distinct() - .FirstOrDefault(series => !series.Equals(localizedSeries)); + // NOTE: If we have multiple series in a folder with a localized title, then this will fail. It will group into one series. User needs to fix this themselves. + string nonLocalizedSeries; + var nonLocalizedSeriesFound = infos.Where(i => !i.IsSpecial).Select(i => i.Series).Distinct().ToList(); + if (nonLocalizedSeriesFound.Count == 1) + { + nonLocalizedSeries = nonLocalizedSeriesFound.First(); + } + else + { + // There can be a case where there are multiple series in a folder that causes merging. + if (nonLocalizedSeriesFound.Count > 2) + { + _logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder: {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound)); + } + nonLocalizedSeries = nonLocalizedSeriesFound.FirstOrDefault(s => !s.Equals(localizedSeries)); + } + + if (string.IsNullOrEmpty(nonLocalizedSeries)) return; var normalizedNonLocalizedSeries = Parser.Parser.Normalize(nonLocalizedSeries); foreach (var infoNeedingMapping in infos.Where(i => diff --git a/API/Services/Tasks/Scanner/ProcessSeries.cs b/API/Services/Tasks/Scanner/ProcessSeries.cs index 29b9cab1d..bebfae4ea 100644 --- a/API/Services/Tasks/Scanner/ProcessSeries.cs +++ b/API/Services/Tasks/Scanner/ProcessSeries.cs @@ -88,7 +88,7 @@ public class ProcessSeries : IProcessSeries // Check if there is a Series var firstInfo = parsedInfos.First(); - Series series = null; + Series series; try { series = @@ -97,29 +97,29 @@ public class ProcessSeries : IProcessSeries } catch (Exception ex) { - _logger.LogError(ex, "There was an exception finding existing series for {SeriesName} with Localized name of {LocalizedName}. This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan", firstInfo.Series, firstInfo.LocalizedSeries); + _logger.LogError(ex, "There was an exception finding existing series for {SeriesName} with Localized name of {LocalizedName} for library {LibraryId}. This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan", firstInfo.Series, firstInfo.LocalizedSeries, library.Id); + await _eventHub.SendMessageAsync(MessageFactory.Error, + MessageFactory.ErrorEvent($"There was an exception finding existing series for {firstInfo.Series} with Localized name of {firstInfo.LocalizedSeries} for library {library.Id}", + "This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan.")); return; } if (series == null) { seriesAdded = true; - series = DbFactory.Series(firstInfo.Series); - series.LocalizedName = firstInfo.LocalizedSeries; + series = DbFactory.Series(firstInfo.Series, firstInfo.LocalizedSeries); } if (series.LibraryId == 0) series.LibraryId = library.Id; try { - _logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName); UpdateVolumes(series, parsedInfos); series.Pages = series.Volumes.Sum(v => v.Pages); series.NormalizedName = Parser.Parser.Normalize(series.Name); - series.NormalizedLocalizedName = Parser.Parser.Normalize(series.LocalizedName); series.OriginalName ??= parsedInfos[0].Series; if (series.Format == MangaFormat.Unknown) { @@ -144,13 +144,17 @@ public class ProcessSeries : IProcessSeries if (!series.LocalizedNameLocked && !string.IsNullOrEmpty(localizedSeries)) { series.LocalizedName = localizedSeries; + series.NormalizedLocalizedName = Parser.Parser.Normalize(series.LocalizedName); } // Update series FolderPath here (TODO: Move this into it's own private method) var seriesDirs = _directoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(l => l.Path), parsedInfos.Select(f => f.FullFilePath).ToList()); if (seriesDirs.Keys.Count == 0) { - _logger.LogCritical("Scan Series has files spread outside a main series folder. This has negative performance effects. Please ensure all series are in a folder"); + _logger.LogCritical("Scan Series has files spread outside a main series folder. This has negative performance effects. Please ensure all series are under a single folder from library"); + await _eventHub.SendMessageAsync(MessageFactory.Info, + MessageFactory.InfoEvent($"{series.Name} has files spread outside a single series folder", + "This has negative performance effects. Please ensure all series are under a single folder from library")); } else { diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 332270fef..54953cbdf 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -29,7 +29,7 @@ public interface IScannerService [Queue(TaskScheduler.ScanQueue)] [DisableConcurrentExecution(60 * 60 * 60)] [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] - Task ScanLibrary(int libraryId); + Task ScanLibrary(int libraryId, bool forceUpdate = false); [Queue(TaskScheduler.ScanQueue)] [DisableConcurrentExecution(60 * 60 * 60)] @@ -62,6 +62,10 @@ public enum ScanCancelReason /// There has been no change to the filesystem since last scan /// NoChange = 2, + /// + /// The underlying folder is missing + /// + FolderMissing = 3 } /** @@ -117,10 +121,15 @@ public class ScannerService : IScannerService var library = libraries.FirstOrDefault(l => l.Folders.Select(Parser.Parser.NormalizePath).Contains(libraryFolder)); if (library != null) { - BackgroundJob.Enqueue(() => ScanLibrary(library.Id)); + BackgroundJob.Enqueue(() => ScanLibrary(library.Id, false)); } } + /// + /// + /// + /// + /// Not Used. Scan series will always force [Queue(TaskScheduler.ScanQueue)] public async Task ScanSeries(int seriesId, bool bypassFolderOptimizationChecks = true) { @@ -130,12 +139,7 @@ public class ScannerService : IScannerService var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId}); var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId, LibraryIncludes.Folders); var libraryPaths = library.Folders.Select(f => f.Path).ToList(); - if (await ShouldScanSeries(seriesId, library, libraryPaths, series, bypassFolderOptimizationChecks) != ScanCancelReason.NoCancel) return; - - - var parsedSeries = new Dictionary>(); - var seenSeries = new List(); - var processTasks = new List(); + if (await ShouldScanSeries(seriesId, library, libraryPaths, series, true) != ScanCancelReason.NoCancel) return; var folderPath = series.FolderPath; if (string.IsNullOrEmpty(folderPath) || !_directoryService.Exists(folderPath)) @@ -150,22 +154,32 @@ public class ScannerService : IScannerService } folderPath = seriesDirs.Keys.FirstOrDefault(); + + // We should check if folderPath is a library folder path and if so, return early and tell user to correct their setup. + if (libraryPaths.Contains(folderPath)) + { + _logger.LogCritical("[ScannerSeries] {SeriesName} scan aborted. Files for series are not in a nested folder under library path. Correct this and rescan", series.Name); + await _eventHub.SendMessageAsync(MessageFactory.Error, MessageFactory.ErrorEvent($"{series.Name} scan aborted", "Files for series are not in a nested folder under library path. Correct this and rescan.")); + return; + } } if (string.IsNullOrEmpty(folderPath)) { - _logger.LogCritical("Scan Series could not find a single, valid folder root for files"); + _logger.LogCritical("[ScannerSeries] Scan Series could not find a single, valid folder root for files"); await _eventHub.SendMessageAsync(MessageFactory.Error, MessageFactory.ErrorEvent($"{series.Name} scan aborted", "Scan Series could not find a single, valid folder root for files")); return; } + var parsedSeries = new Dictionary>(); + var processTasks = new List(); + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Started, series.Name)); await _processSeries.Prime(); void TrackFiles(Tuple> parsedInfo) { - var skippedScan = parsedInfo.Item1; var parsedFiles = parsedInfo.Item2; if (parsedFiles.Count == 0) return; @@ -176,44 +190,21 @@ public class ScannerService : IScannerService Format = parsedFiles.First().Format }; - if (skippedScan) + if (!foundParsedSeries.NormalizedName.Equals(series.NormalizedName)) { - seenSeries.AddRange(parsedFiles.Select(pf => new ParsedSeries() - { - Name = pf.Series, - NormalizedName = Parser.Parser.Normalize(pf.Series), - Format = pf.Format - })); return; } - seenSeries.Add(foundParsedSeries); processTasks.Add(_processSeries.ProcessSeriesAsync(parsedFiles, library)); parsedSeries.Add(foundParsedSeries, parsedFiles); } _logger.LogInformation("Beginning file scan on {SeriesName}", series.Name); - var scanElapsedTime = await ScanFiles(library, new []{folderPath}, false, TrackFiles, bypassFolderOptimizationChecks); + var scanElapsedTime = await ScanFiles(library, new []{folderPath}, false, TrackFiles, true); _logger.LogInformation("ScanFiles for {Series} took {Time}", series.Name, scanElapsedTime); await Task.WhenAll(processTasks); - // At this point, we've already inserted the series into the DB OR we haven't and seenSeries has our series - // We now need to do any leftover work, like removing - // We need to handle if parsedSeries is empty but seenSeries has our series - if (seenSeries.Any(s => s.NormalizedName.Equals(series.NormalizedName)) && parsedSeries.Keys.Count == 0) - { - // Nothing has changed - _logger.LogInformation("[ScannerService] {SeriesName} scan has no work to do. All folders have not been changed since last scan", series.Name); - await _eventHub.SendMessageAsync(MessageFactory.Info, - MessageFactory.InfoEvent($"{series.Name} scan has no work to do", - "All folders have not been changed since last scan. Scan will be aborted.")); - - _processSeries.EnqueuePostSeriesProcessTasks(series.LibraryId, seriesId, false); - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name)); - return; - } - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name)); // Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder @@ -222,8 +213,8 @@ public class ScannerService : IScannerService // If nothing was found, first validate any of the files still exist. If they don't then we have a deletion and can skip the rest of the logic flow if (parsedSeries.Count == 0) { - var anyFilesExist = - (await _unitOfWork.SeriesRepository.GetFilesForSeries(series.Id)).Any(m => File.Exists(m.FilePath)); + var seriesFiles = (await _unitOfWork.SeriesRepository.GetFilesForSeries(series.Id)); + var anyFilesExist = seriesFiles.Where(f => f.FilePath.Contains(series.FolderPath)).Any(m => File.Exists(m.FilePath)); if (!anyFilesExist) { @@ -287,21 +278,34 @@ public class ScannerService : IScannerService } // If all series Folder paths haven't been modified since last scan, abort - // NOTE: On windows, the parent folder will not update LastWriteTime if a subfolder was updated with files. Need to do a bit of light I/O. if (!bypassFolderChecks) { var allFolders = seriesFolderPaths.SelectMany(path => _directoryService.GetDirectories(path)).ToList(); allFolders.AddRange(seriesFolderPaths); - if (allFolders.All(folder => _directoryService.GetLastWriteTime(folder) <= series.LastFolderScanned)) + try { - _logger.LogInformation( - "[ScannerService] {SeriesName} scan has no work to do. All folders have not been changed since last scan", + if (allFolders.All(folder => _directoryService.GetLastWriteTime(folder) <= series.LastFolderScanned)) + { + _logger.LogInformation( + "[ScannerService] {SeriesName} scan has no work to do. All folders have not been changed since last scan", + series.Name); + await _eventHub.SendMessageAsync(MessageFactory.Info, + MessageFactory.InfoEvent($"{series.Name} scan has no work to do", + "All folders have not been changed since last scan. Scan will be aborted.")); + return ScanCancelReason.NoChange; + } + } + catch (IOException ex) + { + // If there is an exception it means that the folder doesn't exist. So we should delete the series + _logger.LogError(ex, "[ScannerService] Scan series for {SeriesName} found the folder path no longer exists", series.Name); await _eventHub.SendMessageAsync(MessageFactory.Info, - MessageFactory.InfoEvent($"{series.Name} scan has no work to do", "All folders have not been changed since last scan. Scan will be aborted.")); - return ScanCancelReason.NoChange; + MessageFactory.ErrorEvent($"{series.Name} scan has no work to do", + "The folder the series is in is missing. Delete series manually or perform a library scan.")); + return ScanCancelReason.NoCancel; } } @@ -393,7 +397,7 @@ public class ScannerService : IScannerService [Queue(TaskScheduler.ScanQueue)] [DisableConcurrentExecution(60 * 60 * 60)] [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] - public async Task ScanLibrary(int libraryId) + public async Task ScanLibrary(int libraryId, bool forceUpdate = false) { var sw = Stopwatch.StartNew(); var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders); @@ -405,7 +409,7 @@ public class ScannerService : IScannerService var wasLibraryUpdatedSinceLastScan = (library.LastModified.Truncate(TimeSpan.TicksPerMinute) > library.LastScanned.Truncate(TimeSpan.TicksPerMinute)) && library.LastScanned != DateTime.MinValue; - if (!wasLibraryUpdatedSinceLastScan) + if (!forceUpdate && !wasLibraryUpdatedSinceLastScan) { var haveFoldersChangedSinceLastScan = library.Folders .All(f => _directoryService.GetLastWriteTime(f.Path).Truncate(TimeSpan.TicksPerMinute) > f.LastScanned.Truncate(TimeSpan.TicksPerMinute)); diff --git a/UI/Web/src/app/_models/series.ts b/UI/Web/src/app/_models/series.ts index ae52f902a..9c3c9bd7e 100644 --- a/UI/Web/src/app/_models/series.ts +++ b/UI/Web/src/app/_models/series.ts @@ -48,6 +48,10 @@ export interface Series { * DateTime representing last time a chapter was added to the Series */ lastChapterAdded: string; + /** + * DateTime representing last time the series folder was scanned + */ + lastFolderScanned: string; /** * Number of words in the series */ diff --git a/UI/Web/src/app/_services/action-factory.service.ts b/UI/Web/src/app/_services/action-factory.service.ts index 9223c57ac..6b38dbaa4 100644 --- a/UI/Web/src/app/_services/action-factory.service.ts +++ b/UI/Web/src/app/_services/action-factory.service.ts @@ -18,9 +18,9 @@ export enum Action { */ MarkAsUnread = 1, /** - * Invoke a Scan Library + * Invoke a Scan on Series/Library */ - ScanLibrary = 2, + Scan = 2, /** * Delete the entity */ @@ -129,7 +129,7 @@ export class ActionFactoryService { }); this.seriesActions.push({ - action: Action.ScanLibrary, + action: Action.Scan, title: 'Scan Series', callback: this.dummyCallback, requiresAdmin: true @@ -171,7 +171,7 @@ export class ActionFactoryService { }); this.libraryActions.push({ - action: Action.ScanLibrary, + action: Action.Scan, title: 'Scan Library', callback: this.dummyCallback, requiresAdmin: true diff --git a/UI/Web/src/app/_services/action.service.ts b/UI/Web/src/app/_services/action.service.ts index d863887a4..ba905174c 100644 --- a/UI/Web/src/app/_services/action.service.ts +++ b/UI/Web/src/app/_services/action.service.ts @@ -52,11 +52,15 @@ export class ActionService implements OnDestroy { * @param callback Optional callback to perform actions after API completes * @returns */ - scanLibrary(library: Partial, callback?: LibraryActionCallback) { + async scanLibrary(library: Partial, callback?: LibraryActionCallback) { if (!library.hasOwnProperty('id') || library.id === undefined) { return; } - this.libraryService.scan(library?.id).pipe(take(1)).subscribe((res: any) => { + + // Prompt user if we should do a force or not + const force = false; // await this.promptIfForce(); + + this.libraryService.scan(library.id, force).pipe(take(1)).subscribe((res: any) => { this.toastr.info('Scan queued for ' + library.name); if (callback) { callback(library); @@ -83,7 +87,9 @@ export class ActionService implements OnDestroy { return; } - this.libraryService.refreshMetadata(library?.id).pipe(take(1)).subscribe((res: any) => { + const forceUpdate = true; //await this.promptIfForce(); + + this.libraryService.refreshMetadata(library?.id, forceUpdate).pipe(take(1)).subscribe((res: any) => { this.toastr.info('Scan queued for ' + library.name); if (callback) { callback(library); @@ -152,7 +158,7 @@ export class ActionService implements OnDestroy { * @param series Series, must have libraryId and name populated * @param callback Optional callback to perform actions after API completes */ - scanSeries(series: Series, callback?: SeriesActionCallback) { + async scanSeries(series: Series, callback?: SeriesActionCallback) { this.seriesService.scan(series.libraryId, series.id).pipe(take(1)).subscribe((res: any) => { this.toastr.info('Scan queued for ' + series.name); if (callback) { @@ -545,4 +551,16 @@ export class ActionService implements OnDestroy { } }); } + + private async promptIfForce(extraContent: string = '') { + // Prompt user if we should do a force or not + const config = this.confirmService.defaultConfirm; + config.header = 'Force Scan'; + config.buttons = [ + {text: 'Yes', type: 'secondary'}, + {text: 'No', type: 'primary'}, + ]; + const msg = 'Do you want to force this scan? This is will ignore optimizations that reduce processing and I/O. ' + extraContent; + return !await this.confirmService.confirm(msg, config); // Not because primary is the false state + } } diff --git a/UI/Web/src/app/_services/library.service.ts b/UI/Web/src/app/_services/library.service.ts index ce03c2666..5aac12cfd 100644 --- a/UI/Web/src/app/_services/library.service.ts +++ b/UI/Web/src/app/_services/library.service.ts @@ -76,16 +76,16 @@ export class LibraryService { return this.httpClient.post(this.baseUrl + 'library/grant-access', {username, selectedLibraries}); } - scan(libraryId: number) { - return this.httpClient.post(this.baseUrl + 'library/scan?libraryId=' + libraryId, {}); + scan(libraryId: number, force = false) { + return this.httpClient.post(this.baseUrl + 'library/scan?libraryId=' + libraryId + '&force=' + force, {}); } analyze(libraryId: number) { return this.httpClient.post(this.baseUrl + 'library/analyze?libraryId=' + libraryId, {}); } - refreshMetadata(libraryId: number) { - return this.httpClient.post(this.baseUrl + 'library/refresh-metadata?libraryId=' + libraryId, {}); + refreshMetadata(libraryId: number, forceUpdate = false) { + return this.httpClient.post(this.baseUrl + 'library/refresh-metadata?libraryId=' + libraryId + '&force=' + forceUpdate, {}); } create(model: {name: string, type: number, folders: string[]}) { diff --git a/UI/Web/src/app/_services/series.service.ts b/UI/Web/src/app/_services/series.service.ts index 2c7cbe71c..cc9c4ef60 100644 --- a/UI/Web/src/app/_services/series.service.ts +++ b/UI/Web/src/app/_services/series.service.ts @@ -153,8 +153,8 @@ export class SeriesService { return this.httpClient.post(this.baseUrl + 'series/refresh-metadata', {libraryId: series.libraryId, seriesId: series.id}); } - scan(libraryId: number, seriesId: number) { - return this.httpClient.post(this.baseUrl + 'series/scan', {libraryId: libraryId, seriesId: seriesId}); + scan(libraryId: number, seriesId: number, force = false) { + return this.httpClient.post(this.baseUrl + 'series/scan', {libraryId: libraryId, seriesId: seriesId, forceUpdate: force}); } analyzeFiles(libraryId: number, seriesId: number) { diff --git a/UI/Web/src/app/cards/_modals/edit-series-modal/edit-series-modal.component.html b/UI/Web/src/app/cards/_modals/edit-series-modal/edit-series-modal.component.html index 7194667f9..cb60bfb40 100644 --- a/UI/Web/src/app/cards/_modals/edit-series-modal/edit-series-modal.component.html +++ b/UI/Web/src/app/cards/_modals/edit-series-modal/edit-series-modal.component.html @@ -344,9 +344,10 @@
Format: {{series.format | mangaFormat}}
-
Created: {{series.created | date:'shortDate'}}
+
Created: {{series.created | date:'shortDate'}}
Last Read: {{series.latestReadDate | date:'shortDate' | defaultDate}}
-
Last Added To: {{series.lastChapterAdded | date:'shortDate' | defaultDate}}
+
Last Added To: {{series.lastChapterAdded | date:'short' | defaultDate}}
+
Last Scanned: {{series.lastFolderScanned | date:'short' | defaultDate}}
Folder Path: {{series.folderPath | defaultValue}}
diff --git a/UI/Web/src/app/cards/card-item/card-actionables/card-actionables.component.html b/UI/Web/src/app/cards/card-item/card-actionables/card-actionables.component.html index 504366f5d..cd07213e9 100644 --- a/UI/Web/src/app/cards/card-item/card-actionables/card-actionables.component.html +++ b/UI/Web/src/app/cards/card-item/card-actionables/card-actionables.component.html @@ -7,5 +7,5 @@
- + \ No newline at end of file diff --git a/UI/Web/src/app/cards/series-card/series-card.component.ts b/UI/Web/src/app/cards/series-card/series-card.component.ts index d7b1c98bd..40767c661 100644 --- a/UI/Web/src/app/cards/series-card/series-card.component.ts +++ b/UI/Web/src/app/cards/series-card/series-card.component.ts @@ -82,7 +82,7 @@ export class SeriesCardComponent implements OnInit, OnChanges, OnDestroy { case(Action.MarkAsUnread): this.markAsUnread(series); break; - case(Action.ScanLibrary): + case(Action.Scan): this.scanLibrary(series); break; case(Action.RefreshMetadata): diff --git a/UI/Web/src/app/library-detail/library-detail.component.ts b/UI/Web/src/app/library-detail/library-detail.component.ts index af2f1d996..eb62bacae 100644 --- a/UI/Web/src/app/library-detail/library-detail.component.ts +++ b/UI/Web/src/app/library-detail/library-detail.component.ts @@ -203,7 +203,7 @@ export class LibraryDetailComponent implements OnInit, OnDestroy { lib = {id: this.libraryId, name: this.libraryName}; } switch (action) { - case(Action.ScanLibrary): + case(Action.Scan): this.actionService.scanLibrary(lib); break; case(Action.RefreshMetadata): diff --git a/UI/Web/src/app/nav/events-widget/events-widget.component.ts b/UI/Web/src/app/nav/events-widget/events-widget.component.ts index a86d0dc93..2f7bfef62 100644 --- a/UI/Web/src/app/nav/events-widget/events-widget.component.ts +++ b/UI/Web/src/app/nav/events-widget/events-widget.component.ts @@ -152,11 +152,15 @@ export class EventsWidgetComponent implements OnInit, OnDestroy { async seeMore(event: ErrorEvent | InfoEvent) { const config = new ConfirmConfig(); - config.buttons = [ - {text: 'Ok', type: 'secondary'}, - ]; if (event.name === EVENTS.Error) { - config.buttons = [{text: 'Dismiss', type: 'primary'}, ...config.buttons]; + config.buttons = [ + {text: 'Ok', type: 'secondary'}, + {text: 'Dismiss', type: 'primary'} + ]; + } else { + config.buttons = [ + {text: 'Ok', type: 'primary'}, + ]; } config.header = event.title; config.content = event.subTitle; diff --git a/UI/Web/src/app/series-detail/series-detail.component.ts b/UI/Web/src/app/series-detail/series-detail.component.ts index 5c0c81232..bec4cb6b4 100644 --- a/UI/Web/src/app/series-detail/series-detail.component.ts +++ b/UI/Web/src/app/series-detail/series-detail.component.ts @@ -345,7 +345,7 @@ export class SeriesDetailComponent implements OnInit, OnDestroy, AfterContentChe this.loadSeries(series.id); }); break; - case(Action.ScanLibrary): + case(Action.Scan): this.actionService.scanSeries(series, () => { this.actionInProgress = false; this.changeDetectionRef.markForCheck(); diff --git a/UI/Web/src/app/shared/confirm-dialog/_models/confirm-button.ts b/UI/Web/src/app/shared/confirm-dialog/_models/confirm-button.ts index a54ace910..12352ad58 100644 --- a/UI/Web/src/app/shared/confirm-dialog/_models/confirm-button.ts +++ b/UI/Web/src/app/shared/confirm-dialog/_models/confirm-button.ts @@ -3,5 +3,5 @@ export interface ConfirmButton { /** * Type for css class. ie) primary, secondary */ - type: string; + type: 'secondary' | 'primary'; } \ No newline at end of file diff --git a/UI/Web/src/app/shared/confirm-dialog/_models/confirm-config.ts b/UI/Web/src/app/shared/confirm-dialog/_models/confirm-config.ts index fe8d989af..9f0d2db8e 100644 --- a/UI/Web/src/app/shared/confirm-dialog/_models/confirm-config.ts +++ b/UI/Web/src/app/shared/confirm-dialog/_models/confirm-config.ts @@ -5,4 +5,8 @@ export class ConfirmConfig { header: string = 'Confirm'; content: string = ''; buttons: Array = []; + /** + * If the close button shouldn't be rendered + */ + disableEscape: boolean = false; } diff --git a/UI/Web/src/app/shared/confirm-dialog/confirm-dialog.component.html b/UI/Web/src/app/shared/confirm-dialog/confirm-dialog.component.html index 30d38d2f9..2f4754513 100644 --- a/UI/Web/src/app/shared/confirm-dialog/confirm-dialog.component.html +++ b/UI/Web/src/app/shared/confirm-dialog/confirm-dialog.component.html @@ -2,9 +2,7 @@ @@ -12,5 +10,4 @@
- diff --git a/UI/Web/src/app/shared/confirm.service.ts b/UI/Web/src/app/shared/confirm.service.ts index f1cbbb881..48b7dbc2a 100644 --- a/UI/Web/src/app/shared/confirm.service.ts +++ b/UI/Web/src/app/shared/confirm.service.ts @@ -34,6 +34,9 @@ export class ConfirmService { config = this.defaultConfirm; config.content = content; } + if (content !== undefined && content !== '' && config!.content === '') { + config!.content = content; + } const modalRef = this.modalService.open(ConfirmDialogComponent); modalRef.componentInstance.config = config; diff --git a/UI/Web/src/app/sidenav/side-nav/side-nav.component.ts b/UI/Web/src/app/sidenav/side-nav/side-nav.component.ts index 1403e007e..0635ec55a 100644 --- a/UI/Web/src/app/sidenav/side-nav/side-nav.component.ts +++ b/UI/Web/src/app/sidenav/side-nav/side-nav.component.ts @@ -78,7 +78,7 @@ export class SideNavComponent implements OnInit, OnDestroy { handleAction(action: Action, library: Library) { switch (action) { - case(Action.ScanLibrary): + case(Action.Scan): this.actionService.scanLibrary(library); break; case(Action.RefreshMetadata):