diff --git a/API.Tests/API.Tests.csproj b/API.Tests/API.Tests.csproj index 73a19fd5d..e01bab216 100644 --- a/API.Tests/API.Tests.csproj +++ b/API.Tests/API.Tests.csproj @@ -30,4 +30,8 @@ + + + + diff --git a/API.Tests/Extensions/Test Data/modified on run.txt b/API.Tests/Extensions/Test Data/modified on run.txt deleted file mode 100644 index d6a609edc..000000000 --- a/API.Tests/Extensions/Test Data/modified on run.txt +++ /dev/null @@ -1,3 +0,0 @@ -This file should be modified by the unit test08/20/2021 10:26:03 -08/20/2021 10:26:29 -08/22/2021 12:39:58 diff --git a/API.Tests/Parser/ComicParserTests.cs b/API.Tests/Parser/ComicParserTests.cs index a18ea21c9..c0de4755a 100644 --- a/API.Tests/Parser/ComicParserTests.cs +++ b/API.Tests/Parser/ComicParserTests.cs @@ -23,27 +23,34 @@ namespace API.Tests.Parser [InlineData("Amazing Man Comics chapter 25", "Amazing Man Comics")] [InlineData("Amazing Man Comics issue #25", "Amazing Man Comics")] [InlineData("Teen Titans v1 038 (1972) (c2c).cbr", "Teen Titans")] + [InlineData("Batman Beyond 02 (of 6) (1999)", "Batman Beyond")] + [InlineData("Batman Beyond - Return of the Joker (2001)", "Batman Beyond - Return of the Joker")] + [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "Invincible")] + [InlineData("Batman Wayne Family Adventures - Ep. 001 - Moving In", "Batman Wayne Family Adventures")] + [InlineData("Saga 001 (2012) (Digital) (Empire-Zone).cbr", "Saga")] public void ParseComicSeriesTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseComicSeries(filename)); } [Theory] - [InlineData("01 Spider-Man & Wolverine 01.cbr", "1")] - [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "4")] + [InlineData("01 Spider-Man & Wolverine 01.cbr", "0")] + [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")] [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] - [InlineData("Batman & Catwoman - Trail of the Gun 01", "1")] + [InlineData("Batman & Catwoman - Trail of the Gun 01", "0")] [InlineData("Batman & Daredevil - King of New York", "0")] - [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")] + [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "0")] [InlineData("Batman & Robin the Teen Wonder #0", "0")] [InlineData("Batman & Wildcat (1 of 3)", "0")] - [InlineData("Batman And Superman World's Finest #01", "1")] - [InlineData("Babe 01", "1")] - [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "1")] + [InlineData("Batman And Superman World's Finest #01", "0")] + [InlineData("Babe 01", "0")] + [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "0")] [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] - [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "2")] + [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "0")] [InlineData("Superman v1 024 (09-10 1943)", "1")] [InlineData("Amazing Man Comics chapter 25", "0")] + [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "0")] + [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")] public void ParseComicVolumeTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseComicVolume(filename)); @@ -66,6 +73,9 @@ namespace API.Tests.Parser [InlineData("Superman v1 024 (09-10 1943)", "24")] [InlineData("Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr", "70.5")] [InlineData("Amazing Man Comics chapter 25", "25")] + [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "33.5")] + [InlineData("Batman Wayne Family Adventures - Ep. 014 - Moving In", "14")] + [InlineData("Saga 001 (2012) (Digital) (Empire-Zone)", "1")] public void ParseComicChapterTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseComicChapter(filename)); diff --git a/API.Tests/Parser/MangaParserTests.cs b/API.Tests/Parser/MangaParserTests.cs index 720dffd71..85e73add9 100644 --- a/API.Tests/Parser/MangaParserTests.cs +++ b/API.Tests/Parser/MangaParserTests.cs @@ -159,7 +159,13 @@ namespace API.Tests.Parser [InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "Hentai Ouji to Warawanai Neko.")] [InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "The 100 Girlfriends Who Really, Really, Really, Really, Really Love You")] [InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo")] + [InlineData("The Duke of Death and His Black Maid - Ch. 177 - The Ball (3).cbz", "The Duke of Death and His Black Maid")] [InlineData("A Compendium of Ghosts - 031 - The Third Story_ Part 12 (Digital) (Cobalt001)", "A Compendium of Ghosts")] + [InlineData("The Duke of Death and His Black Maid - Vol. 04 Ch. 054.5 - V4 Omake", "The Duke of Death and His Black Maid")] + [InlineData("Vol. 04 Ch. 054.5", "")] + [InlineData("Great_Teacher_Onizuka_v16[TheSpectrum]", "Great Teacher Onizuka")] + [InlineData("[Renzokusei]_Kimi_wa_Midara_na_Boku_no_Joou_Ch5_Final_Chapter", "Kimi wa Midara na Boku no Joou")] + [InlineData("Battle Royale, v01 (2000) [TokyoPop] [Manga-Sketchbook]", "Battle Royale")] public void ParseSeriesTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename)); @@ -412,6 +418,22 @@ namespace API.Tests.Parser FullFilePath = filepath, IsSpecial = false }); + filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "", + Chapters = "0", Filename = "Vol. 00 Ch. 000.cbz", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = false + }); + + filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Toukyou Akazukin", Volumes = "1", Edition = "", + Chapters = "1", Filename = "Vol. 01 Ch. 001.cbz", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = false + }); + // If an image is cover exclusively, ignore it filepath = @"E:\Manga\Seraph of the End\cover.png"; expected.Add(filepath, null); diff --git a/API.Tests/Services/DirectoryServiceTests.cs b/API.Tests/Services/DirectoryServiceTests.cs index 4dcb77dec..db756ebab 100644 --- a/API.Tests/Services/DirectoryServiceTests.cs +++ b/API.Tests/Services/DirectoryServiceTests.cs @@ -89,6 +89,15 @@ namespace API.Tests.Services } + [Theory] + [InlineData(new string[] {"C:/Manga/"}, new string[] {"C:/Manga/Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")] + public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] folders, string expectedDirectory) + { + var actual = DirectoryService.FindHighestDirectoriesFromFiles(rootDirectories, folders); + var expected = new Dictionary {{expectedDirectory, ""}}; + Assert.Equal(expected, actual); + } + [Theory] [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake", "Omake,Specials,Love Hina")] @@ -102,6 +111,7 @@ namespace API.Tests.Services [InlineData(@"C:/", @"C://Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")] [InlineData(@"C:\\", @"C://Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")] [InlineData(@"C://mount/gdrive/Library/Test Library/Comics", @"C://mount/gdrive/Library/Test Library/Comics/Dragon Age/Test", "Test,Dragon Age")] + [InlineData(@"M:\", @"M:\Toukyou Akazukin\Vol. 01 Ch. 005.cbz", @"Toukyou Akazukin")] public void GetFoldersTillRoot_Test(string rootPath, string fullpath, string expectedArray) { var expected = expectedArray.Split(","); diff --git a/API.Tests/Services/MetadataServiceTests.cs b/API.Tests/Services/MetadataServiceTests.cs index 796201538..4c447885f 100644 --- a/API.Tests/Services/MetadataServiceTests.cs +++ b/API.Tests/Services/MetadataServiceTests.cs @@ -4,6 +4,8 @@ using API.Entities; using API.Interfaces; using API.Interfaces.Services; using API.Services; +using API.SignalR; +using Microsoft.AspNetCore.SignalR; using Microsoft.Extensions.Logging; using NSubstitute; using Xunit; @@ -19,10 +21,11 @@ namespace API.Tests.Services private readonly IBookService _bookService = Substitute.For(); private readonly IArchiveService _archiveService = Substitute.For(); private readonly ILogger _logger = Substitute.For>(); + private readonly IHubContext _messageHub = Substitute.For>(); public MetadataServiceTests() { - _metadataService = new MetadataService(_unitOfWork, _logger, _archiveService, _bookService, _imageService); + _metadataService = new MetadataService(_unitOfWork, _logger, _archiveService, _bookService, _imageService, _messageHub); } [Fact] @@ -108,5 +111,16 @@ namespace API.Tests.Services LastModified = new FileInfo(Path.Join(_testDirectory, "file in folder.zip")).LastWriteTime }, false, false)); } + + [Fact] + + public void ShouldUpdateCoverImage_OnSecondRun_HasCoverImage_NoForceUpdate_NoLock() + { + Assert.False(MetadataService.ShouldUpdateCoverImage(new byte[] {1}, new MangaFile() + { + FilePath = Path.Join(_testDirectory, "file in folder.zip"), + LastModified = DateTime.Now + }, false, false)); + } } } diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs index 2c7a999f0..93b254c8e 100644 --- a/API.Tests/Services/ScannerServiceTests.cs +++ b/API.Tests/Services/ScannerServiceTests.cs @@ -14,8 +14,10 @@ using API.Parser; using API.Services; using API.Services.Tasks; using API.Services.Tasks.Scanner; +using API.SignalR; using API.Tests.Helpers; using AutoMapper; +using Microsoft.AspNetCore.SignalR; using Microsoft.Data.Sqlite; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Infrastructure; @@ -34,6 +36,7 @@ namespace API.Tests.Services private readonly IImageService _imageService = Substitute.For(); private readonly ILogger _metadataLogger = Substitute.For>(); private readonly ICacheService _cacheService = Substitute.For(); + private readonly IHubContext _messageHub = Substitute.For>(); private readonly DbConnection _connection; private readonly DataContext _context; @@ -52,8 +55,8 @@ namespace API.Tests.Services IUnitOfWork unitOfWork = new UnitOfWork(_context, Substitute.For(), null); - IMetadataService metadataService = Substitute.For(unitOfWork, _metadataLogger, _archiveService, _bookService, _imageService); - _scannerService = new ScannerService(unitOfWork, _logger, _archiveService, metadataService, _bookService, _cacheService); + IMetadataService metadataService = Substitute.For(unitOfWork, _metadataLogger, _archiveService, _bookService, _imageService, _messageHub); + _scannerService = new ScannerService(unitOfWork, _logger, _archiveService, metadataService, _bookService, _cacheService, _messageHub); } private async Task SeedDb() @@ -111,6 +114,7 @@ namespace API.Tests.Services Assert.Empty(_scannerService.FindSeriesNotOnDisk(existingSeries, infos)); } + // TODO: Figure out how to do this with ParseScannedFiles // [Theory] // [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")] diff --git a/API/Comparators/NaturalSortComparer.cs b/API/Comparators/NaturalSortComparer.cs index e558f94ae..9bf79db81 100644 --- a/API/Comparators/NaturalSortComparer.cs +++ b/API/Comparators/NaturalSortComparer.cs @@ -23,7 +23,7 @@ namespace API.Comparators { if (x == y) return 0; - // BUG: Operations that change non-concurrent collections must have exclusive access. A concurrent update was performed on this collection and corrupted its state. The collection's state is no longer correct. + // Should be fixed: Operations that change non-concurrent collections must have exclusive access. A concurrent update was performed on this collection and corrupted its state. The collection's state is no longer correct. if (!_table.TryGetValue(x ?? Empty, out var x1)) { x1 = Regex.Split(x ?? Empty, "([0-9]+)"); @@ -33,6 +33,7 @@ namespace API.Comparators if (!_table.TryGetValue(y ?? Empty, out var y1)) { y1 = Regex.Split(y ?? Empty, "([0-9]+)"); + // Should be fixed: EXCEPTION: An item with the same key has already been added. Key: M:\Girls of the Wild's\Girls of the Wild's - Ep. 083 (Season 1) [LINE Webtoon].cbz _table.Add(y ?? Empty, y1); } diff --git a/API/Controllers/PluginController.cs b/API/Controllers/PluginController.cs new file mode 100644 index 000000000..b176c0628 --- /dev/null +++ b/API/Controllers/PluginController.cs @@ -0,0 +1,45 @@ +using System.Threading.Tasks; +using API.DTOs; +using API.Interfaces; +using API.Interfaces.Services; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; + +namespace API.Controllers +{ + public class PluginController : BaseApiController + { + private readonly IUnitOfWork _unitOfWork; + private readonly ITokenService _tokenService; + private readonly ILogger _logger; + + public PluginController(IUnitOfWork unitOfWork, ITokenService tokenService, ILogger logger) + { + _unitOfWork = unitOfWork; + _tokenService = tokenService; + _logger = logger; + } + + /// + /// Authenticate with the Server given an apiKey. This will log you in by returning the user object and the JWT token. + /// + /// + /// Name of the Plugin + /// + [HttpPost("authenticate")] + public async Task> Authenticate(string apiKey, string pluginName) + { + // NOTE: In order to log information about plugins, we need some Plugin Description information for each request + // Should log into access table so we can tell the user + var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey); + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); + _logger.LogInformation("Plugin {PluginName} has authenticated with {UserName} ({UserId})'s API Key", pluginName, user.UserName, userId); + return new UserDto + { + Username = user.UserName, + Token = await _tokenService.CreateToken(user), + ApiKey = user.ApiKey, + }; + } + } +} diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index cf759fcb3..ec6386032 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -3,7 +3,6 @@ using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; -using API.Comparators; using API.Data.Repositories; using API.DTOs; using API.DTOs.Reader; @@ -21,17 +20,15 @@ namespace API.Controllers /// public class ReaderController : BaseApiController { - private readonly IDirectoryService _directoryService; private readonly ICacheService _cacheService; private readonly IUnitOfWork _unitOfWork; private readonly ILogger _logger; private readonly IReaderService _readerService; /// - public ReaderController(IDirectoryService directoryService, ICacheService cacheService, + public ReaderController(ICacheService cacheService, IUnitOfWork unitOfWork, ILogger logger, IReaderService readerService) { - _directoryService = directoryService; _cacheService = cacheService; _unitOfWork = unitOfWork; _logger = logger; @@ -55,14 +52,9 @@ namespace API.Controllers { var (path, _) = await _cacheService.GetCachedPagePath(chapter, page); if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}"); - - var content = await _directoryService.ReadFileAsync(path); var format = Path.GetExtension(path).Replace(".", ""); - // Calculates SHA1 Hash for byte[] - Response.AddCacheHeader(content); - - return File(content, "image/" + format); + return PhysicalFile(path, "image/" + format); } catch (Exception) { diff --git a/API/Controllers/UploadController.cs b/API/Controllers/UploadController.cs index 05274d2de..5830b2225 100644 --- a/API/Controllers/UploadController.cs +++ b/API/Controllers/UploadController.cs @@ -190,7 +190,7 @@ namespace API.Controllers if (_unitOfWork.HasChanges()) { await _unitOfWork.CommitAsync(); - _taskScheduler.RefreshSeriesMetadata(series.LibraryId, series.Id); + _taskScheduler.RefreshSeriesMetadata(series.LibraryId, series.Id, true); return Ok(); } diff --git a/API/Data/Repositories/SeriesRepository.cs b/API/Data/Repositories/SeriesRepository.cs index 7fa26fcc2..3d8e8c661 100644 --- a/API/Data/Repositories/SeriesRepository.cs +++ b/API/Data/Repositories/SeriesRepository.cs @@ -18,7 +18,6 @@ namespace API.Data.Repositories { private readonly DataContext _context; private readonly IMapper _mapper; - private readonly NaturalSortComparer _naturalSortComparer = new (); public SeriesRepository(DataContext context, IMapper mapper) { _context = context; @@ -118,11 +117,12 @@ namespace API.Data.Repositories return volumes; } - private void SortSpecialChapters(IEnumerable volumes) + private static void SortSpecialChapters(IEnumerable volumes) { + var sorter = new NaturalSortComparer(); foreach (var v in volumes.Where(vDto => vDto.Number == 0)) { - v.Chapters = v.Chapters.OrderBy(x => x.Range, _naturalSortComparer).ToList(); + v.Chapters = v.Chapters.OrderBy(x => x.Range, sorter).ToList(); } } diff --git a/API/Entities/AppUserProgress.cs b/API/Entities/AppUserProgress.cs index cb3c1b33c..08fffa540 100644 --- a/API/Entities/AppUserProgress.cs +++ b/API/Entities/AppUserProgress.cs @@ -9,7 +9,7 @@ namespace API.Entities /// Represents the progress a single user has on a given Chapter. /// //[Index(nameof(SeriesId), nameof(VolumeId), nameof(ChapterId), nameof(AppUserId), IsUnique = true)] - public class AppUserProgress : IEntityDate, IHasConcurrencyToken + public class AppUserProgress : IEntityDate { /// /// Id of Entity @@ -55,16 +55,5 @@ namespace API.Entities /// Last date this was updated /// public DateTime LastModified { get; set; } - - /// - [ConcurrencyCheck] - public uint RowVersion { get; private set; } - - - /// - public void OnSavingChanges() - { - RowVersion++; - } } } diff --git a/API/Entities/MangaFile.cs b/API/Entities/MangaFile.cs index 2376ec721..72c620ce9 100644 --- a/API/Entities/MangaFile.cs +++ b/API/Entities/MangaFile.cs @@ -30,9 +30,13 @@ namespace API.Entities public int ChapterId { get; set; } // Methods + /// + /// If the File on disk's last modified time is after what is stored in MangaFile + /// + /// public bool HasFileBeenModified() { - return !File.GetLastWriteTime(FilePath).Equals(LastModified); + return File.GetLastWriteTime(FilePath) > LastModified; } } } diff --git a/API/Interfaces/ITaskScheduler.cs b/API/Interfaces/ITaskScheduler.cs index ead76e36a..08a450ac2 100644 --- a/API/Interfaces/ITaskScheduler.cs +++ b/API/Interfaces/ITaskScheduler.cs @@ -14,7 +14,7 @@ namespace API.Interfaces void CleanupChapters(int[] chapterIds); void RefreshMetadata(int libraryId, bool forceUpdate = true); void CleanupTemp(); - void RefreshSeriesMetadata(int libraryId, int seriesId); + void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false); void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false); void CancelStatsTasks(); void RunStatCollection(); diff --git a/API/Interfaces/Services/IMetadataService.cs b/API/Interfaces/Services/IMetadataService.cs index 70b10b861..f0595cf26 100644 --- a/API/Interfaces/Services/IMetadataService.cs +++ b/API/Interfaces/Services/IMetadataService.cs @@ -1,4 +1,5 @@ -using API.Entities; +using System.Threading.Tasks; +using API.Entities; namespace API.Interfaces.Services { @@ -11,14 +12,14 @@ namespace API.Interfaces.Services /// void RefreshMetadata(int libraryId, bool forceUpdate = false); - public void UpdateMetadata(Chapter chapter, bool forceUpdate); - public void UpdateMetadata(Volume volume, bool forceUpdate); - public void UpdateMetadata(Series series, bool forceUpdate); + public bool UpdateMetadata(Chapter chapter, bool forceUpdate); + public bool UpdateMetadata(Volume volume, bool forceUpdate); + public bool UpdateMetadata(Series series, bool forceUpdate); /// /// Performs a forced refresh of metatdata just for a series and it's nested entities /// /// /// - void RefreshMetadataForSeries(int libraryId, int seriesId); + Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false); } -} \ No newline at end of file +} diff --git a/API/Interfaces/Services/IScannerService.cs b/API/Interfaces/Services/IScannerService.cs index d235adfb5..b67290bfc 100644 --- a/API/Interfaces/Services/IScannerService.cs +++ b/API/Interfaces/Services/IScannerService.cs @@ -12,8 +12,8 @@ namespace API.Interfaces.Services /// /// Library to scan against /// Force overwriting for cover images - void ScanLibrary(int libraryId, bool forceUpdate); - void ScanLibraries(); + Task ScanLibrary(int libraryId, bool forceUpdate); + Task ScanLibraries(); Task ScanSeries(int libraryId, int seriesId, bool forceUpdate, CancellationToken token); } } diff --git a/API/Interfaces/Services/ReaderService.cs b/API/Interfaces/Services/ReaderService.cs index e5be064c5..99b7157d2 100644 --- a/API/Interfaces/Services/ReaderService.cs +++ b/API/Interfaces/Services/ReaderService.cs @@ -1,6 +1,7 @@  using System; using System.Collections.Generic; +using System.Data; using System.Linq; using System.Threading.Tasks; using API.Comparators; @@ -17,7 +18,6 @@ namespace API.Interfaces.Services private readonly ILogger _logger; private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer(); private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst(); - private readonly NaturalSortComparer _naturalSortComparer = new NaturalSortComparer(); public ReaderService(IUnitOfWork unitOfWork, ILogger logger) { @@ -44,7 +44,8 @@ namespace API.Interfaces.Services if (userProgress == null) { // Create a user object - var userWithProgress = await _unitOfWork.UserRepository.GetUserByIdAsync(userId, AppUserIncludes.Progress); + var userWithProgress = + await _unitOfWork.UserRepository.GetUserByIdAsync(userId, AppUserIncludes.Progress); userWithProgress.Progresses ??= new List(); userWithProgress.Progresses.Add(new AppUserProgress { @@ -74,7 +75,6 @@ namespace API.Interfaces.Services } catch (Exception exception) { - // When opening a fresh chapter, this seems to fail (sometimes) _logger.LogError(exception, "Could not save progress"); await _unitOfWork.RollbackAsync(); } @@ -118,7 +118,7 @@ namespace API.Interfaces.Services if (currentVolume.Number == 0) { // Handle specials by sorting on their Filename aka Range - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, _naturalSortComparer), currentChapter.Number); + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, new NaturalSortComparer()), currentChapter.Number); if (chapterId > 0) return chapterId; } @@ -169,7 +169,7 @@ namespace API.Interfaces.Services if (currentVolume.Number == 0) { - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, _naturalSortComparer).Reverse(), currentChapter.Number); + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, new NaturalSortComparer()).Reverse(), currentChapter.Number); if (chapterId > 0) return chapterId; } diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index aa354ce7c..a6cecc931 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -102,11 +102,17 @@ namespace API.Parser @"^(?.*)( |_)Vol\.?(\d+|tbd)", RegexOptions.IgnoreCase | RegexOptions.Compiled, RegexTimeout), + // Mad Chimera World - Volume 005 - Chapter 026.cbz (couldn't figure out how to get Volume negative lookaround working on below regex), + // The Duke of Death and His Black Maid - Vol. 04 Ch. 054.5 - V4 Omake + new Regex( + @"(?.+?)(\s|_|-)+(?:Vol(ume|\.)?(\s|_|-)+\d+)(\s|_|-)+(?:(Ch|Chapter|Ch)\.?)(\s|_|-)+(?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled, + RegexTimeout), // Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip, VanDread-v01-c01.zip new Regex( - @"(?.*)(\b|_)v(?\d+-?\d*)(\s|_|-)", - RegexOptions.IgnoreCase | RegexOptions.Compiled, - RegexTimeout), + @"(?.*)(\b|_)v(?\d+-?\d*)(\s|_|-)", + RegexOptions.IgnoreCase | RegexOptions.Compiled, + RegexTimeout), // Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto] new Regex( @"(?.*)( - )(?:v|vo|c)\d", @@ -117,11 +123,6 @@ namespace API.Parser @"(?.*)(?:, Chapter )(?\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled, RegexTimeout), - // Mad Chimera World - Volume 005 - Chapter 026.cbz (couldn't figure out how to get Volume negative lookaround working on below regex) - new Regex( - @"(?.*)(\s|_|-)(?:Volume(\s|_|-)+\d+)(\s|_|-)+(?:Chapter)(\s|_|-)(?\d+)", - RegexOptions.IgnoreCase | RegexOptions.Compiled, - RegexTimeout), // Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz new Regex( @"(?.*)(\s|_|-)(?!Vol)(\s|_|-)(?:Chapter)(\s|_|-)(?\d+)", @@ -149,7 +150,7 @@ namespace API.Parser RegexTimeout), // Momo The Blood Taker - Chapter 027 Violent Emotion.cbz, Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz new Regex( - @"(?.*)(\b|_|-|\s)(?:(chapter(\b|_|-|\s))|sp)\d", + @"^(?(?!Vol).+?)(?:(ch(apter|\.)(\b|_|-|\s))|sp)\d", RegexOptions.IgnoreCase | RegexOptions.Compiled, RegexTimeout), // Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) @@ -294,9 +295,14 @@ namespace API.Parser @"^(?.*)(?: |_)i(ssue) #\d+", RegexOptions.IgnoreCase | RegexOptions.Compiled, RegexTimeout), + // Batman Wayne Family Adventures - Ep. 001 - Moving In + new Regex( + @"^(?.+?)(\s|_|-)?(?:Ep\.?)(\s|_|-)+\d+", + RegexOptions.IgnoreCase | RegexOptions.Compiled, + RegexTimeout), // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) new Regex( - @"^(?.*)(?: \d+)", + @"^(?.+?)(?: \d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled, RegexTimeout), // Batman & Robin the Teen Wonder #0 @@ -323,41 +329,44 @@ namespace API.Parser private static readonly Regex[] ComicVolumeRegex = new[] { - // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS) - new Regex( - @"^(?\d+) (- |_)?(?.*(\d{4})?)( |_)(\(|\d+)", - RegexOptions.IgnoreCase | RegexOptions.Compiled, - RegexTimeout), - // 01 Spider-Man & Wolverine 01.cbr - new Regex( - @"^(?\d+) (?:- )?(?.*) (\d+)?", - RegexOptions.IgnoreCase | RegexOptions.Compiled, - RegexTimeout), - // Batman & Wildcat (1 of 3) - new Regex( - @"(?.*(\d{4})?)( |_)(?:\((?\d+) of \d+)", - RegexOptions.IgnoreCase | RegexOptions.Compiled, - RegexTimeout), + // // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS) + // new Regex( + // @"^(?\d+) (- |_)?(?.*(\d{4})?)( |_)(\(|\d+)", + // RegexOptions.IgnoreCase | RegexOptions.Compiled, + // RegexTimeout), + // // 01 Spider-Man & Wolverine 01.cbr + // new Regex( + // @"^(?\d+) (?:- )?(?.*) (\d+)?", + // RegexOptions.IgnoreCase | RegexOptions.Compiled, + // RegexTimeout), + // // Batman & Wildcat (1 of 3) + // new Regex( + // @"(?.*(\d{4})?)( |_)(?:\((?\d+) of \d+)", + // RegexOptions.IgnoreCase | RegexOptions.Compiled, + // RegexTimeout), // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) new Regex( @"^(?.*)(?: |_)v(?\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled, RegexTimeout), // Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005) - new Regex( - @"^(?.*)(?\d+)", - RegexOptions.IgnoreCase | RegexOptions.Compiled, - RegexTimeout), + // BUG: Negative lookbehind has to be fixed width + // NOTE: The case this is built for does not make much sense. + // new Regex( + // @"^(?.+?)(?\d+)", + // RegexOptions.IgnoreCase | RegexOptions.Compiled, + // RegexTimeout), + // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) - new Regex( - @"^(?.*)(?\d+))", - RegexOptions.IgnoreCase | RegexOptions.Compiled, - RegexTimeout), - // Batman & Robin the Teen Wonder #0 - new Regex( - @"^(?.*)(?: |_)#(?\d+)", - RegexOptions.IgnoreCase | RegexOptions.Compiled, - RegexTimeout), + // new Regex( + // @"^(?.+?)(?\d+))", + // RegexOptions.IgnoreCase | RegexOptions.Compiled, + // RegexTimeout), + // // Batman & Robin the Teen Wonder #0 + // new Regex( + // @"^(?.*)(?: |_)#(?\d+)", + // RegexOptions.IgnoreCase | RegexOptions.Compiled, + // RegexTimeout), }; private static readonly Regex[] ComicChapterRegex = new[] @@ -387,6 +396,11 @@ namespace API.Parser @"^(?.*)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-", RegexOptions.IgnoreCase | RegexOptions.Compiled, RegexTimeout), + // Saga 001 (2012) (Digital) (Empire-Zone) + new Regex( + @"(?.+?)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)\s\(\d{4}", + RegexOptions.IgnoreCase | RegexOptions.Compiled, + RegexTimeout), // Amazing Man Comics chapter 25 new Regex( @"^(?!Vol)(?.*)( |_)c(hapter)( |_)(?\d*)", @@ -930,6 +944,9 @@ namespace API.Parser /// /// Translates _ -> spaces, trims front and back of string, removes release groups + /// + /// Hippos_the_Great [Digital], -> Hippos the Great + /// /// /// /// @@ -942,7 +959,7 @@ namespace API.Parser title = RemoveSpecialTags(title); title = title.Replace("_", " ").Trim(); - if (title.EndsWith("-")) + if (title.EndsWith("-") || title.EndsWith(",")) { title = title.Substring(0, title.Length - 1); } diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index 6d545ea21..4a1ef7dd9 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -305,6 +305,44 @@ namespace API.Services } + /// + /// Finds the highest directories from a set of MangaFiles + /// + /// List of top level folders which files belong to + /// List of file paths that belong to libraryFolders + /// + public static Dictionary FindHighestDirectoriesFromFiles(IEnumerable libraryFolders, IList filePaths) + { + var stopLookingForDirectories = false; + var dirs = new Dictionary(); + foreach (var folder in libraryFolders) + { + if (stopLookingForDirectories) break; + foreach (var file in filePaths) + { + if (!file.Contains(folder)) continue; + + var parts = GetFoldersTillRoot(folder, file).ToList(); + if (parts.Count == 0) + { + // Break from all loops, we done, just scan folder.Path (library root) + dirs.Add(folder, string.Empty); + stopLookingForDirectories = true; + break; + } + + var fullPath = Path.Join(folder, parts.Last()); + if (!dirs.ContainsKey(fullPath)) + { + dirs.Add(fullPath, string.Empty); + } + } + } + + return dirs; + } + + /// /// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed /// up processing. diff --git a/API/Services/ImageService.cs b/API/Services/ImageService.cs index cc4d92742..11aa4716d 100644 --- a/API/Services/ImageService.cs +++ b/API/Services/ImageService.cs @@ -14,13 +14,11 @@ namespace API.Services { private readonly ILogger _logger; private readonly IDirectoryService _directoryService; - private readonly NaturalSortComparer _naturalSortComparer; public ImageService(ILogger logger, IDirectoryService directoryService) { _logger = logger; _directoryService = directoryService; - _naturalSortComparer = new NaturalSortComparer(); } /// @@ -38,7 +36,7 @@ namespace API.Services } var firstImage = _directoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions) - .OrderBy(f => f, _naturalSortComparer).FirstOrDefault(); + .OrderBy(f => f, new NaturalSortComparer()).FirstOrDefault(); return firstImage; } diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs index aa175c021..68c706bc3 100644 --- a/API/Services/MetadataService.cs +++ b/API/Services/MetadataService.cs @@ -9,6 +9,8 @@ using API.Entities.Enums; using API.Extensions; using API.Interfaces; using API.Interfaces.Services; +using API.SignalR; +using Microsoft.AspNetCore.SignalR; using Microsoft.Extensions.Logging; namespace API.Services @@ -20,6 +22,7 @@ namespace API.Services private readonly IArchiveService _archiveService; private readonly IBookService _bookService; private readonly IImageService _imageService; + private readonly IHubContext _messageHub; private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst(); /// /// Width of the Thumbnail generation @@ -27,13 +30,14 @@ namespace API.Services public static readonly int ThumbnailWidth = 320; // 153w x 230h public MetadataService(IUnitOfWork unitOfWork, ILogger logger, - IArchiveService archiveService, IBookService bookService, IImageService imageService) + IArchiveService archiveService, IBookService bookService, IImageService imageService, IHubContext messageHub) { _unitOfWork = unitOfWork; _logger = logger; _archiveService = archiveService; _bookService = bookService; _imageService = imageService; + _messageHub = messageHub; } /// @@ -81,14 +85,17 @@ namespace API.Services /// /// /// Force updating cover image even if underlying file has not been modified or chapter already has a cover image - public void UpdateMetadata(Chapter chapter, bool forceUpdate) + public bool UpdateMetadata(Chapter chapter, bool forceUpdate) { var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault(); if (ShouldUpdateCoverImage(chapter.CoverImage, firstFile, forceUpdate, chapter.CoverImageLocked)) { chapter.CoverImage = GetCoverImage(firstFile); + return true; } + + return false; } /// @@ -96,17 +103,18 @@ namespace API.Services /// /// /// Force updating cover image even if underlying file has not been modified or chapter already has a cover image - public void UpdateMetadata(Volume volume, bool forceUpdate) + public bool UpdateMetadata(Volume volume, bool forceUpdate) { + // We need to check if Volume coverImage matches first chapters if forceUpdate is false if (volume == null || !ShouldUpdateCoverImage(volume.CoverImage, null, forceUpdate - , false)) return; + , false)) return false; volume.Chapters ??= new List(); var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).FirstOrDefault(); - - if (firstChapter == null) return; + if (firstChapter == null) return false; volume.CoverImage = firstChapter.CoverImage; + return true; } /// @@ -114,9 +122,10 @@ namespace API.Services /// /// /// Force updating cover image even if underlying file has not been modified or chapter already has a cover image - public void UpdateMetadata(Series series, bool forceUpdate) + public bool UpdateMetadata(Series series, bool forceUpdate) { - if (series == null) return; + var madeUpdate = false; + if (series == null) return false; if (ShouldUpdateCoverImage(series.CoverImage, null, forceUpdate, series.CoverImageLocked)) { series.Volumes ??= new List(); @@ -129,39 +138,46 @@ namespace API.Services { coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparerForInChapterSorting) .FirstOrDefault(c => !c.IsSpecial)?.CoverImage; + madeUpdate = true; } if (!HasCoverImage(coverImage)) { coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparerForInChapterSorting) .FirstOrDefault()?.CoverImage; + madeUpdate = true; } } series.CoverImage = firstCover?.CoverImage ?? coverImage; } - UpdateSeriesSummary(series, forceUpdate); + return UpdateSeriesSummary(series, forceUpdate) || madeUpdate ; } - private void UpdateSeriesSummary(Series series, bool forceUpdate) + private bool UpdateSeriesSummary(Series series, bool forceUpdate) { - if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return; + if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return false; var isBook = series.Library.Type == LibraryType.Book; var firstVolume = series.Volumes.FirstWithChapters(isBook); var firstChapter = firstVolume?.Chapters.GetFirstChapterWithFiles(); var firstFile = firstChapter?.Files.FirstOrDefault(); - if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return; - if (Parser.Parser.IsPdf(firstFile.FilePath)) return; + if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return false; + if (Parser.Parser.IsPdf(firstFile.FilePath)) return false; - var summary = Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath); - if (string.IsNullOrEmpty(series.Summary)) + if (series.Format is MangaFormat.Archive or MangaFormat.Epub) { - series.Summary = summary; + var summary = Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath); + if (!string.IsNullOrEmpty(series.Summary)) + { + series.Summary = summary; + firstFile.LastModified = DateTime.Now; + return true; + } } - - firstFile.LastModified = DateTime.Now; + firstFile.LastModified = DateTime.Now; // NOTE: Should I put this here as well since it might not have actually been parsed? + return false; } @@ -180,17 +196,19 @@ namespace API.Services _logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name); foreach (var series in library.Series) { + var volumeUpdated = false; foreach (var volume in series.Volumes) { + var chapterUpdated = false; foreach (var chapter in volume.Chapters) { - UpdateMetadata(chapter, forceUpdate); + chapterUpdated = UpdateMetadata(chapter, forceUpdate); } - UpdateMetadata(volume, forceUpdate); + volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate); } - UpdateMetadata(series, forceUpdate); + UpdateMetadata(series, volumeUpdated || forceUpdate); _unitOfWork.SeriesRepository.Update(series); } @@ -207,7 +225,7 @@ namespace API.Services /// /// /// - public void RefreshMetadataForSeries(int libraryId, int seriesId) + public async Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false) { var sw = Stopwatch.StartNew(); var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult(); @@ -219,23 +237,26 @@ namespace API.Services return; } _logger.LogInformation("Beginning metadata refresh of {SeriesName}", series.Name); + var volumeUpdated = false; foreach (var volume in series.Volumes) { + var chapterUpdated = false; foreach (var chapter in volume.Chapters) { - UpdateMetadata(chapter, true); + chapterUpdated = UpdateMetadata(chapter, forceUpdate); } - UpdateMetadata(volume, true); + volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate); } - UpdateMetadata(series, true); + UpdateMetadata(series, volumeUpdated || forceUpdate); _unitOfWork.SeriesRepository.Update(series); - if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result) + if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync()) { _logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds); + await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.RefreshMetadataEvent(libraryId, seriesId)); } } } diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index aebaf665c..fe6931b92 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -141,10 +141,10 @@ namespace API.Services BackgroundJob.Enqueue(() => DirectoryService.ClearDirectory(tempDirectory)); } - public void RefreshSeriesMetadata(int libraryId, int seriesId) + public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false) { _logger.LogInformation("Enqueuing series metadata refresh for: {SeriesId}", seriesId); - BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId)); + BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate)); } public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false) diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index d83c7054d..58f0c4491 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -14,7 +14,9 @@ using API.Interfaces; using API.Interfaces.Services; using API.Parser; using API.Services.Tasks.Scanner; +using API.SignalR; using Hangfire; +using Microsoft.AspNetCore.SignalR; using Microsoft.Extensions.Logging; namespace API.Services.Tasks @@ -27,10 +29,11 @@ namespace API.Services.Tasks private readonly IMetadataService _metadataService; private readonly IBookService _bookService; private readonly ICacheService _cacheService; + private readonly IHubContext _messageHub; private readonly NaturalSortComparer _naturalSort = new (); public ScannerService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService, - IMetadataService metadataService, IBookService bookService, ICacheService cacheService) + IMetadataService metadataService, IBookService bookService, ICacheService cacheService, IHubContext messageHub) { _unitOfWork = unitOfWork; _logger = logger; @@ -38,6 +41,7 @@ namespace API.Services.Tasks _metadataService = metadataService; _bookService = bookService; _cacheService = cacheService; + _messageHub = messageHub; } [DisableConcurrentExecution(timeoutInSeconds: 360)] @@ -47,7 +51,7 @@ namespace API.Services.Tasks var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId); var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId, seriesId); - var dirs = FindHighestDirectoriesFromFiles(library, files); + var dirs = DirectoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(f => f.Path), files.Select(f => f.FilePath).ToList()); var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new []{ seriesId }); _logger.LogInformation("Beginning file scan on {SeriesName}", series.Name); @@ -63,6 +67,37 @@ namespace API.Services.Tasks parsedSeries.Remove(key); } + if (parsedSeries.Count == 0) + { + // We need to do an additional check for an edge case: If the scan ran and the files do not match the existing Series name, then it is very likely, + // the files have crap naming and if we don't correct, the series will get deleted due to the parser not being able to fallback onto folder parsing as the root + // is the series folder. + var existingFolder = dirs.Keys.FirstOrDefault(key => key.Contains(series.OriginalName)); + if (dirs.Keys.Count == 1 && !string.IsNullOrEmpty(existingFolder)) + { + dirs = new Dictionary(); + var path = Path.GetPathRoot(existingFolder); + if (!string.IsNullOrEmpty(path)) + { + dirs[path] = string.Empty; + } + } + _logger.LogDebug("{SeriesName} has bad naming convention, forcing rescan at a higher directory.", series.OriginalName); + scanner = new ParseScannedFiles(_bookService, _logger); + parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2); + totalFiles += totalFiles2; + scanElapsedTime += scanElapsedTime2; + + // If a root level folder scan occurs, then multiple series gets passed in and thus we get a unique constraint issue + // Hence we clear out anything but what we selected for + firstSeries = library.Series.FirstOrDefault(); + keys = parsedSeries.Keys; + foreach (var key in keys.Where(key => !firstSeries.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || firstSeries?.Format != key.Format)) + { + parsedSeries.Remove(key); + } + } + var sw = new Stopwatch(); UpdateLibrary(library, parsedSeries); @@ -74,8 +109,10 @@ namespace API.Services.Tasks totalFiles, parsedSeries.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, series.Name); CleanupDbEntities(); - BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId)); + BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate)); BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds)); + // Tell UI that this series is done + await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.ScanSeriesEvent(seriesId), cancellationToken: token); } else { @@ -83,54 +120,18 @@ namespace API.Services.Tasks "There was a critical error that resulted in a failed scan. Please check logs and rescan"); await _unitOfWork.RollbackAsync(); } - } - /// - /// Finds the highest directories from a set of MangaFiles - /// - /// - /// - /// - private static Dictionary FindHighestDirectoriesFromFiles(Library library, IList files) - { - var stopLookingForDirectories = false; - var dirs = new Dictionary(); - foreach (var folder in library.Folders) - { - if (stopLookingForDirectories) break; - foreach (var file in files) - { - if (!file.FilePath.Contains(folder.Path)) continue; - - var parts = DirectoryService.GetFoldersTillRoot(folder.Path, file.FilePath).ToList(); - if (parts.Count == 0) - { - // Break from all loops, we done, just scan folder.Path (library root) - dirs.Add(folder.Path, string.Empty); - stopLookingForDirectories = true; - break; - } - - var fullPath = Path.Join(folder.Path, parts.Last()); - if (!dirs.ContainsKey(fullPath)) - { - dirs.Add(fullPath, string.Empty); - } - } - } - - return dirs; } [DisableConcurrentExecution(timeoutInSeconds: 360)] [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] - public void ScanLibraries() + public async Task ScanLibraries() { var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList(); foreach (var lib in libraries) { - ScanLibrary(lib.Id, false); + await ScanLibrary(lib.Id, false); } } @@ -145,7 +146,7 @@ namespace API.Services.Tasks /// [DisableConcurrentExecution(360)] [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] - public void ScanLibrary(int libraryId, bool forceUpdate) + public async Task ScanLibrary(int libraryId, bool forceUpdate) { Library library; try @@ -188,6 +189,7 @@ namespace API.Services.Tasks CleanupAbandonedChapters(); BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate)); + await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibrary, MessageFactory.ScanLibraryEvent(libraryId, "complete")); } /// diff --git a/API/Services/Tasks/VersionUpdaterService.cs b/API/Services/Tasks/VersionUpdaterService.cs index a949a870e..fbd3d4f10 100644 --- a/API/Services/Tasks/VersionUpdaterService.cs +++ b/API/Services/Tasks/VersionUpdaterService.cs @@ -140,11 +140,7 @@ namespace API.Services.Tasks connections.AddRange(await _tracker.GetConnectionsForUser(admin)); } - await _messageHub.Clients.Users(admins).SendAsync("UpdateAvailable", new SignalRMessage - { - Name = "UpdateAvailable", - Body = update - }); + await _messageHub.Clients.Users(admins).SendAsync(SignalREvents.UpdateVersion, MessageFactory.UpdateVersionEvent(update)); } diff --git a/API/SignalR/MessageFactory.cs b/API/SignalR/MessageFactory.cs new file mode 100644 index 000000000..ad6eed5c9 --- /dev/null +++ b/API/SignalR/MessageFactory.cs @@ -0,0 +1,56 @@ +using System.Threading; +using API.DTOs.Update; + +namespace API.SignalR +{ + public static class MessageFactory + { + public static SignalRMessage ScanSeriesEvent(int seriesId) + { + return new SignalRMessage() + { + Name = SignalREvents.ScanSeries, + Body = new + { + SeriesId = seriesId + } + }; + } + + public static SignalRMessage ScanLibraryEvent(int libraryId, string stage) + { + return new SignalRMessage() + { + Name = SignalREvents.ScanLibrary, + Body = new + { + LibraryId = libraryId, + Stage = stage + } + }; + } + + public static SignalRMessage RefreshMetadataEvent(int libraryId, int seriesId) + { + return new SignalRMessage() + { + Name = SignalREvents.RefreshMetadata, + Body = new + { + SeriesId = seriesId, + LibraryId = libraryId + } + }; + } + + public static SignalRMessage UpdateVersionEvent(UpdateNotificationDto update) + { + return new SignalRMessage + { + Name = SignalREvents.UpdateVersion, + Body = update + }; + } + + } +} diff --git a/API/SignalR/SignalREvents.cs b/API/SignalR/SignalREvents.cs new file mode 100644 index 000000000..fcd077146 --- /dev/null +++ b/API/SignalR/SignalREvents.cs @@ -0,0 +1,11 @@ +namespace API.SignalR +{ + public static class SignalREvents + { + public const string UpdateVersion = "UpdateVersion"; + public const string ScanSeries = "ScanSeries"; + public const string RefreshMetadata = "RefreshMetadata"; + public const string ScanLibrary = "ScanLibrary"; + + } +} diff --git a/UI/Web/src/app/_guards/admin.guard.ts b/UI/Web/src/app/_guards/admin.guard.ts index e9483530e..b88c8d51c 100644 --- a/UI/Web/src/app/_guards/admin.guard.ts +++ b/UI/Web/src/app/_guards/admin.guard.ts @@ -19,7 +19,7 @@ export class AdminGuard implements CanActivate { if (this.accountService.hasAdminRole(user)) { return true; } - + this.toastr.error('You are not authorized to view this page.'); return false; }) diff --git a/UI/Web/src/app/_guards/auth.guard.ts b/UI/Web/src/app/_guards/auth.guard.ts index f477290fc..924f03bf2 100644 --- a/UI/Web/src/app/_guards/auth.guard.ts +++ b/UI/Web/src/app/_guards/auth.guard.ts @@ -19,7 +19,9 @@ export class AuthGuard implements CanActivate { if (user) { return true; } - this.toastr.error('You are not authorized to view this page.'); + if (this.toastr.toasts.filter(toast => toast.message === 'Unauthorized' || toast.message === 'You are not authorized to view this page.').length === 0) { + this.toastr.error('You are not authorized to view this page.'); + } localStorage.setItem(this.urlKey, window.location.pathname); this.router.navigateByUrl('/libraries'); return false; diff --git a/UI/Web/src/app/_models/events/scan-library-event.ts b/UI/Web/src/app/_models/events/scan-library-event.ts new file mode 100644 index 000000000..b0c663502 --- /dev/null +++ b/UI/Web/src/app/_models/events/scan-library-event.ts @@ -0,0 +1,4 @@ +export interface ScanLibraryEvent { + libraryId: number; + stage: 'complete'; +} \ No newline at end of file diff --git a/UI/Web/src/app/_models/events/scan-series-event.ts b/UI/Web/src/app/_models/events/scan-series-event.ts new file mode 100644 index 000000000..45f7a07bc --- /dev/null +++ b/UI/Web/src/app/_models/events/scan-series-event.ts @@ -0,0 +1,3 @@ +export interface ScanSeriesEvent { + seriesId: number; +} \ No newline at end of file diff --git a/UI/Web/src/app/_services/action-factory.service.ts b/UI/Web/src/app/_services/action-factory.service.ts index a66c74a59..001b17ad3 100644 --- a/UI/Web/src/app/_services/action-factory.service.ts +++ b/UI/Web/src/app/_services/action-factory.service.ts @@ -259,6 +259,12 @@ export class ActionFactoryService { callback: this.dummyCallback, requiresAdmin: false }, + { + action: Action.IncognitoRead, + title: 'Read in Incognito', + callback: this.dummyCallback, + requiresAdmin: false + }, { action: Action.AddToReadingList, title: 'Add to Reading List', diff --git a/UI/Web/src/app/_services/message-hub.service.ts b/UI/Web/src/app/_services/message-hub.service.ts index 33c39f18b..f5d193f6a 100644 --- a/UI/Web/src/app/_services/message-hub.service.ts +++ b/UI/Web/src/app/_services/message-hub.service.ts @@ -1,13 +1,18 @@ -import { Injectable } from '@angular/core'; +import { EventEmitter, Injectable } from '@angular/core'; import { HubConnection, HubConnectionBuilder } from '@microsoft/signalr'; import { NgbModal, NgbModalRef } from '@ng-bootstrap/ng-bootstrap'; import { User } from '@sentry/angular'; import { BehaviorSubject, ReplaySubject } from 'rxjs'; import { environment } from 'src/environments/environment'; import { UpdateNotificationModalComponent } from '../shared/update-notification/update-notification-modal.component'; +import { ScanLibraryEvent } from '../_models/events/scan-library-event'; +import { ScanSeriesEvent } from '../_models/events/scan-series-event'; export enum EVENTS { - UpdateAvailable = 'UpdateAvailable' + UpdateAvailable = 'UpdateAvailable', + ScanSeries = 'ScanSeries', + ScanLibrary = 'ScanLibrary', + RefreshMetadata = 'RefreshMetadata', } export interface Message { @@ -26,6 +31,9 @@ export class MessageHubService { private messagesSource = new ReplaySubject>(1); public messages$ = this.messagesSource.asObservable(); + public scanSeries: EventEmitter = new EventEmitter(); + public scanLibrary: EventEmitter = new EventEmitter(); + constructor(private modalService: NgbModal) { } createHubConnection(user: User) { @@ -44,6 +52,25 @@ export class MessageHubService { //console.log('[Hub] Body: ', body); }); + this.hubConnection.on(EVENTS.ScanSeries, resp => { + this.messagesSource.next({ + event: EVENTS.ScanSeries, + payload: resp.body + }); + this.scanSeries.emit(resp.body); + }); + + this.hubConnection.on(EVENTS.ScanLibrary, resp => { + this.messagesSource.next({ + event: EVENTS.ScanLibrary, + payload: resp.body + }); + this.scanLibrary.emit(resp.body); + // if ((resp.body as ScanLibraryEvent).stage === 'complete') { + // this.toastr. + // } + }); + this.hubConnection.on(EVENTS.UpdateAvailable, resp => { this.messagesSource.next({ event: EVENTS.UpdateAvailable, diff --git a/UI/Web/src/app/manga-reader/_models/reader-enums.ts b/UI/Web/src/app/manga-reader/_models/reader-enums.ts index 0117ed5d2..9738edf9f 100644 --- a/UI/Web/src/app/manga-reader/_models/reader-enums.ts +++ b/UI/Web/src/app/manga-reader/_models/reader-enums.ts @@ -2,21 +2,21 @@ export enum FITTING_OPTION { HEIGHT = 'full-height', WIDTH = 'full-width', ORIGINAL = 'original' - } +} export enum SPLIT_PAGE_PART { NO_SPLIT = 'none', LEFT_PART = 'left', RIGHT_PART = 'right' - } +} export enum PAGING_DIRECTION { FORWARD = 1, BACKWARDS = -1, - } +} export enum COLOR_FILTER { NONE = '', SEPIA = 'filter-sepia', DARK = 'filter-dark' - } +} diff --git a/UI/Web/src/app/manga-reader/infinite-scroller/infinite-scroller.component.html b/UI/Web/src/app/manga-reader/infinite-scroller/infinite-scroller.component.html index 51285ac77..c0d5ff542 100644 --- a/UI/Web/src/app/manga-reader/infinite-scroller/infinite-scroller.component.html +++ b/UI/Web/src/app/manga-reader/infinite-scroller/infinite-scroller.component.html @@ -1,15 +1,15 @@ -
+
Captures Scroll Events: {{!this.isScrolling && this.allImagesLoaded}} Is Scrolling: {{isScrollingForwards() ? 'Forwards' : 'Backwards'}} {{this.isScrolling}} All Images Loaded: {{this.allImagesLoaded}} Prefetched {{minPageLoaded}}-{{maxPageLoaded}} - Current Page:{{pageNum}} - Width: {{webtoonImageWidth}} Pages: {{pageNum}} / {{totalPages}} At Top: {{atTop}} At Bottom: {{atBottom}} - + Total Height: {{getTotalHeight()}} + Total Scroll: {{getTotalScroll()}} + Scroll Top: {{getScrollTop()}}
- image + image