From b3ec8e87561f69afb9ee85dbb8885f183ea70001 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Tue, 6 Apr 2021 08:59:44 -0500 Subject: [PATCH] Bugfixes! (#157) * More cases for parsing regex * Fixed a bug where chapter cover images weren't being updated due to a missed not. * Removed a piece of code that was needed for upgrading, since all beta users agreed to wipe db. * Fixed InProgress to properly respect order and show more recent activity first. Issue is with IEntityDate LastModified not updating in DataContext. * Updated dependencies to lastest stable. * LastModified on Volumes wasn't updating, validated it does update when data is changed. * Fixed #152 - Sorting issue when finding cover image. * Fixed #151 - Sort files during scan. * Fixed #161 - Remove files that don't exist from chapters during scan. * Fixed #155 - Ignore images that start with !, expand cover detection by checking for the word cover as well as folder, and some code cleanup to make code more concise. * Fixed #153 - Ensure that we persist series name changes and don't override on scanning. * Fixed a broken unit test --- .../Comparers/NaturalSortComparerTest.cs | 36 +++++++++++++++++ API.Tests/ParserTest.cs | 22 ++++++++++ API/Comparators/NaturalSortComparer.cs | 16 +------- API/Extensions/SeriesExtensions.cs | 27 +++++++++++++ API/Parser/Parser.cs | 20 +++++++++- API/Services/ArchiveService.cs | 40 ++++++++++++------- API/Services/Tasks/ScannerService.cs | 33 ++++++++++----- 7 files changed, 154 insertions(+), 40 deletions(-) create mode 100644 API/Extensions/SeriesExtensions.cs diff --git a/API.Tests/Comparers/NaturalSortComparerTest.cs b/API.Tests/Comparers/NaturalSortComparerTest.cs index 1e4c63daf..60cd287ef 100644 --- a/API.Tests/Comparers/NaturalSortComparerTest.cs +++ b/API.Tests/Comparers/NaturalSortComparerTest.cs @@ -1,4 +1,5 @@ using System; +using System.Linq; using API.Comparators; using Xunit; @@ -15,6 +16,10 @@ namespace API.Tests.Comparers new[] {"Beelzebub_153b_RHS.zip", "Beelzebub_01_[Noodles].zip",}, new[] {"Beelzebub_01_[Noodles].zip", "Beelzebub_153b_RHS.zip"} )] + [InlineData( + new[] {"[SCX-Scans]_Vandread_v02_Act02.zip", "[SCX-Scans]_Vandread_v02_Act01.zip",}, + new[] {"[SCX-Scans]_Vandread_v02_Act01.zip", "[SCX-Scans]_Vandread_v02_Act02.zip",} + )] public void TestNaturalSortComparer(string[] input, string[] expected) { NaturalSortComparer nc = new NaturalSortComparer(); @@ -27,5 +32,36 @@ namespace API.Tests.Comparers i++; } } + + + [Theory] + [InlineData( + new[] {"x1.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"}, + new[] {"x1.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"} + )] + [InlineData( + new[] {"x2.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"}, + new[] {"x2.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"} + )] + [InlineData( + new[] {"Beelzebub_153b_RHS.zip", "Beelzebub_01_[Noodles].zip",}, + new[] {"Beelzebub_01_[Noodles].zip", "Beelzebub_153b_RHS.zip"} + )] + [InlineData( + new[] {"[SCX-Scans]_Vandread_v02_Act02.zip", "[SCX-Scans]_Vandread_v02_Act01.zip","[SCX-Scans]_Vandread_v02_Act07.zip",}, + new[] {"[SCX-Scans]_Vandread_v02_Act01.zip", "[SCX-Scans]_Vandread_v02_Act02.zip","[SCX-Scans]_Vandread_v02_Act07.zip",} + )] + public void TestNaturalSortComparerLinq(string[] input, string[] expected) + { + NaturalSortComparer nc = new NaturalSortComparer(); + var output = input.OrderBy(c => c, nc); + + var i = 0; + foreach (var s in output) + { + Assert.Equal(s, expected[i]); + i++; + } + } } } \ No newline at end of file diff --git a/API.Tests/ParserTest.cs b/API.Tests/ParserTest.cs index d5c1addec..a166d653b 100644 --- a/API.Tests/ParserTest.cs +++ b/API.Tests/ParserTest.cs @@ -338,6 +338,7 @@ namespace API.Tests [InlineData("test.jpeg", true)] [InlineData("test.png", true)] [InlineData(".test.jpg", false)] + [InlineData("!test.jpg", false)] public void IsImageTest(string filename, bool expected) { Assert.Equal(expected, IsImage(filename)); @@ -358,6 +359,27 @@ namespace API.Tests Assert.Equal(expectedSeries, actual.Series); } + + [Theory] + [InlineData("Love Hina - Special.jpg", false)] + [InlineData("folder.jpg", true)] + [InlineData("DearS_v01_cover.jpg", true)] + [InlineData("DearS_v01_covers.jpg", false)] + [InlineData("!cover.jpg", true)] + public void IsCoverImageTest(string inputPath, bool expected) + { + Assert.Equal(expected, IsCoverImage(inputPath)); + } + + [Theory] + [InlineData("__MACOSX/Love Hina - Special.jpg", true)] + [InlineData("TEST/Love Hina - Special.jpg", false)] + [InlineData("__macosx/Love Hina/", false)] + [InlineData("MACOSX/Love Hina/", false)] + public void HasBlacklistedFolderInPathTest(string inputPath, bool expected) + { + Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath)); + } [Fact] diff --git a/API/Comparators/NaturalSortComparer.cs b/API/Comparators/NaturalSortComparer.cs index 8ba4a9ec9..961ab40ff 100644 --- a/API/Comparators/NaturalSortComparer.cs +++ b/API/Comparators/NaturalSortComparer.cs @@ -8,23 +8,13 @@ namespace API.Comparators public class NaturalSortComparer : IComparer, IDisposable { private readonly bool _isAscending; + private Dictionary _table = new(); public NaturalSortComparer(bool inAscendingOrder = true) { _isAscending = inAscendingOrder; } - #region IComparer Members - - public int Compare(string x, string y) - { - throw new NotImplementedException(); - } - - #endregion - - #region IComparer Members - int IComparer.Compare(string x, string y) { if (x == y) @@ -81,10 +71,6 @@ namespace API.Comparators return x.CompareTo(y); } - #endregion - - private Dictionary _table = new Dictionary(); - public void Dispose() { SuppressFinalize(this); diff --git a/API/Extensions/SeriesExtensions.cs b/API/Extensions/SeriesExtensions.cs new file mode 100644 index 000000000..f2a896987 --- /dev/null +++ b/API/Extensions/SeriesExtensions.cs @@ -0,0 +1,27 @@ +using System.Collections.Generic; +using API.Entities; + +namespace API.Extensions +{ + public static class SeriesExtensions + { + /// + /// Checks against all the name variables of the Series if it matches anything in the list. + /// + /// + /// + /// + public static bool NameInList(this Series series, IEnumerable list) + { + foreach (var name in list) + { + if (name == series.Name || name == series.LocalizedName || name == series.OriginalName || name == series.NormalizedName) + { + return true; + } + } + + return false; + } + } +} \ No newline at end of file diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index 5cc6b1df3..af6124309 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -15,6 +15,7 @@ namespace API.Parser private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly Regex FolderRegex = new Regex(@"(? + /// Tests whether the file is a cover image such that: contains "cover", is named "folder", and is an image + /// + /// + /// + public static bool IsCoverImage(string name) + { + return IsImage(name, true) && (FolderRegex.IsMatch(name)); + } + + public static bool HasBlacklistedFolderInPath(string path) + { + return path.Contains("__MACOSX"); + } + } } \ No newline at end of file diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index b598f3e8a..fba12280b 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -6,6 +6,7 @@ using System.IO.Compression; using System.Linq; using System.Xml.Serialization; using API.Archive; +using API.Comparators; using API.Extensions; using API.Interfaces.Services; using API.Services.Tasks; @@ -24,11 +25,13 @@ namespace API.Services { private readonly ILogger _logger; private const int ThumbnailWidth = 320; // 153w x 230h - private static readonly RecyclableMemoryStreamManager _streamManager = new RecyclableMemoryStreamManager(); + private static readonly RecyclableMemoryStreamManager _streamManager = new(); + private readonly NaturalSortComparer _comparer; public ArchiveService(ILogger logger) { _logger = logger; + _comparer = new NaturalSortComparer(); } /// @@ -58,7 +61,7 @@ namespace API.Services } } } - + public int GetNumberOfPagesFromArchive(string archivePath) { if (!IsValidArchive(archivePath)) @@ -76,14 +79,14 @@ namespace API.Services { _logger.LogDebug("Using default compression handling"); using ZipArchive archive = ZipFile.OpenRead(archivePath); - return archive.Entries.Count(e => !e.FullName.Contains("__MACOSX") && Parser.Parser.IsImage(e.FullName)); + return archive.Entries.Count(e => !Parser.Parser.HasBlacklistedFolderInPath(e.FullName) && Parser.Parser.IsImage(e.FullName)); } case ArchiveLibrary.SharpCompress: { _logger.LogDebug("Using SharpCompress compression handling"); using var archive = ArchiveFactory.Open(archivePath); return archive.Entries.Count(entry => !entry.IsDirectory && - !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX") + !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) && Parser.Parser.IsImage(entry.Key)); } case ArchiveLibrary.NotSupported: @@ -121,8 +124,14 @@ namespace API.Services { _logger.LogDebug("Using default compression handling"); using var archive = ZipFile.OpenRead(archivePath); - var folder = archive.Entries.SingleOrDefault(x => !x.FullName.Contains("__MACOSX") && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder"); - var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && !x.FullName.Contains("__MACOSX") && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList(); + // NOTE: We can probably reduce our iteration by performing 1 filter on MACOSX then do our folder check and image chack. + var folder = archive.Entries.SingleOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x.FullName) + && Parser.Parser.IsImage(x.FullName) + && Parser.Parser.IsCoverImage(x.FullName)); + var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) + && !Parser.Parser.HasBlacklistedFolderInPath(x.FullName) + && Parser.Parser.IsImage(x.FullName)) + .OrderBy(x => x.FullName, _comparer).ToList(); var entry = folder ?? entries[0]; return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry); @@ -131,9 +140,12 @@ namespace API.Services { _logger.LogDebug("Using SharpCompress compression handling"); using var archive = ArchiveFactory.Open(archivePath); - return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory - && !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX") - && Parser.Parser.IsImage(entry.Key)), createThumbnail); + var entries = archive.Entries + .Where(entry => !entry.IsDirectory + && !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) + && Parser.Parser.IsImage(entry.Key)) + .OrderBy(x => x.Key, _comparer); + return FindCoverImage(entries, createThumbnail); } case ArchiveLibrary.NotSupported: _logger.LogError("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath); @@ -199,7 +211,7 @@ namespace API.Services // Sometimes ZipArchive will list the directory and others it will just keep it in the FullName return archive.Entries.Count > 0 && !Path.HasExtension(archive.Entries.ElementAt(0).FullName) || - archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !e.FullName.Contains("__MACOSX")); + archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !Parser.Parser.HasBlacklistedFolderInPath(e.FullName)); } private byte[] CreateThumbnail(byte[] entry, string formatExtension = ".jpg") @@ -266,7 +278,7 @@ namespace API.Services { foreach (var entry in entries) { - if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && Parser.Parser.IsXml(entry.Key)) + if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && !Parser.Parser.HasBlacklistedFolderInPath(entry.Key) && Parser.Parser.IsXml(entry.Key)) { using var ms = _streamManager.GetStream(); entry.WriteTo(ms); @@ -300,7 +312,7 @@ namespace API.Services { _logger.LogDebug("Using default compression handling"); using var archive = ZipFile.OpenRead(archivePath); - var entry = archive.Entries.SingleOrDefault(x => !x.FullName.Contains("__MACOSX") && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName)); + var entry = archive.Entries.SingleOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x.FullName) && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName)); if (entry != null) { using var stream = entry.Open(); @@ -314,7 +326,7 @@ namespace API.Services _logger.LogDebug("Using SharpCompress compression handling"); using var archive = ArchiveFactory.Open(archivePath); info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory - && !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX") + && !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) && Parser.Parser.IsXml(entry.Key))); break; } @@ -400,7 +412,7 @@ namespace API.Services _logger.LogDebug("Using SharpCompress compression handling"); using var archive = ArchiveFactory.Open(archivePath); ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory - && !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX") + && !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) && Parser.Parser.IsImage(entry.Key)), extractPath); break; } diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 68b4fbb2c..17797b875 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -5,6 +5,7 @@ using System.Diagnostics; using System.IO; using System.Linq; using System.Threading.Tasks; +using API.Comparators; using API.Entities; using API.Entities.Enums; using API.Extensions; @@ -23,6 +24,7 @@ namespace API.Services.Tasks private readonly IArchiveService _archiveService; private readonly IMetadataService _metadataService; private ConcurrentDictionary> _scannedSeries; + private readonly NaturalSortComparer _naturalSort; public ScannerService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService, IMetadataService metadataService) @@ -31,6 +33,7 @@ namespace API.Services.Tasks _logger = logger; _archiveService = archiveService; _metadataService = metadataService; + _naturalSort = new NaturalSortComparer(true); } @@ -159,10 +162,13 @@ namespace API.Services.Tasks // First, remove any series that are not in parsedSeries list var foundSeries = parsedSeries.Select(s => Parser.Parser.Normalize(s.Key)).ToList(); - var missingSeries = library.Series.Where(existingSeries => - !foundSeries.Contains(existingSeries.NormalizedName) || !parsedSeries.ContainsKey(existingSeries.Name) - || (existingSeries.LocalizedName != null && !parsedSeries.ContainsKey(existingSeries.LocalizedName)) - || !parsedSeries.ContainsKey(existingSeries.OriginalName)); + // var missingSeries = library.Series.Where(existingSeries => + // !foundSeries.Contains(existingSeries.NormalizedName) || !parsedSeries.ContainsKey(existingSeries.Name) + // || (existingSeries.LocalizedName != null && !parsedSeries.ContainsKey(existingSeries.LocalizedName)) + // || !parsedSeries.ContainsKey(existingSeries.OriginalName)); + + var missingSeries = library.Series.Where(existingSeries => !existingSeries.NameInList(foundSeries) + || !existingSeries.NameInList(parsedSeries.Keys)); var removeCount = 0; foreach (var existingSeries in missingSeries) { @@ -198,7 +204,7 @@ namespace API.Services.Tasks Parallel.ForEach(librarySeries, (series) => { _logger.LogInformation("Processing series {SeriesName}", series.Name); - UpdateVolumes(series, parsedSeries[series.Name].ToArray()); + UpdateVolumes(series, parsedSeries[series.OriginalName].ToArray()); series.Pages = series.Volumes.Sum(v => v.Pages); }); @@ -247,16 +253,15 @@ namespace API.Services.Tasks private void UpdateChapters(Volume volume, ParserInfo[] parsedInfos) { var startingChapters = volume.Chapters.Count; - - + // Add new chapters foreach (var info in parsedInfos) { var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0")); // Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0 - // also are treated like specials + // also are treated like specials for UI grouping. _logger.LogDebug("Adding new chapters, {Series} - Vol {Volume} Ch {Chapter} - Needs Special Treatment? {NeedsSpecialTreatment}", info.Series, info.Volumes, info.Chapters, specialTreatment); - // If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest + // NOTE: If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest // vs Historys strongest), this code will break and the duplicate will be skipped. Chapter chapter = null; try @@ -312,6 +317,7 @@ namespace API.Services.Tasks + // Remove chapters that aren't in parsedInfos or have no files linked var existingChapters = volume.Chapters.ToList(); foreach (var existingChapter in existingChapters) @@ -324,8 +330,17 @@ namespace API.Services.Tasks { volume.Chapters.Remove(existingChapter); } + else + { + // Ensure we remove any files that no longer exist AND order + existingChapter.Files = existingChapter.Files + .Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath)) + .OrderBy(f => f.FilePath, _naturalSort).ToList(); + } } + + _logger.LogDebug("Updated chapters from {StartingChaptersCount} to {ChapterCount}", startingChapters, volume.Chapters.Count); }