diff --git a/API.Tests/Extensions/SeriesExtensionsTests.cs b/API.Tests/Extensions/SeriesExtensionsTests.cs new file mode 100644 index 000000000..687ca9ca0 --- /dev/null +++ b/API.Tests/Extensions/SeriesExtensionsTests.cs @@ -0,0 +1,26 @@ +using API.Entities; +using API.Extensions; +using Xunit; + +namespace API.Tests.Extensions +{ + public class SeriesExtensionsTests + { + [Theory] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker than Black"}, true)] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker_than_Black"}, true)] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker then Black!"}, false)] + public void NameInListTest(string[] seriesInput, string[] list, bool expected) + { + var series = new Series() + { + Name = seriesInput[0], + LocalizedName = seriesInput[1], + OriginalName = seriesInput[2], + NormalizedName = Parser.Parser.Normalize(seriesInput[0]) + }; + + Assert.Equal(expected, series.NameInList(list)); + } + } +} \ No newline at end of file diff --git a/API.Tests/ParserTest.cs b/API.Tests/ParserTest.cs index 3699c743d..145bca0fa 100644 --- a/API.Tests/ParserTest.cs +++ b/API.Tests/ParserTest.cs @@ -198,6 +198,7 @@ namespace API.Tests [InlineData("1", "001")] [InlineData("10", "010")] [InlineData("100", "100")] + [InlineData("4-8", "004-008")] public void PadZerosTest(string input, string expected) { Assert.Equal(expected, PadZeros(input)); @@ -266,6 +267,7 @@ namespace API.Tests [Theory] [InlineData("Darker Than Black", "darkerthanblack")] [InlineData("Darker Than Black - Something", "darkerthanblacksomething")] + [InlineData("Darker Than_Black", "darkerthanblack")] [InlineData("", "")] public void NormalizeTest(string input, string expected) { @@ -384,6 +386,14 @@ namespace API.Tests Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath)); } + [Theory] + [InlineData("image.png", MangaFormat.Image)] + [InlineData("image.cbz", MangaFormat.Archive)] + [InlineData("image.txt", MangaFormat.Unknown)] + public void ParseFormatTest(string inputFile, MangaFormat expected) + { + Assert.Equal(expected, ParseFormat(inputFile)); + } [Fact] public void ParseInfoTest() diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs index c052a8880..bc1ea6de0 100644 --- a/API.Tests/Services/ScannerServiceTests.cs +++ b/API.Tests/Services/ScannerServiceTests.cs @@ -1,12 +1,17 @@ using System; +using System.Collections.Concurrent; using System.Collections.Generic; +using System.Linq; using API.Entities; +using API.Extensions; using API.Interfaces; using API.Interfaces.Services; +using API.Parser; using API.Services; using API.Services.Tasks; using Microsoft.Extensions.Logging; using NSubstitute; +using NSubstitute.Extensions; using Xunit; using Xunit.Abstractions; @@ -28,33 +33,126 @@ namespace API.Tests.Services _testOutputHelper = testOutputHelper; _scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService); _metadataService= Substitute.For(_unitOfWork, _metadataLogger, _archiveService); - _libraryMock = new Library() - { - Id = 1, - Name = "Manga", - Folders = new List() - { - new FolderPath() - { - Id = 1, - LastScanned = DateTime.Now, - LibraryId = 1, - Path = "E:/Manga" - } - }, - LastModified = DateTime.Now, - Series = new List() - { - new Series() - { - Id = 0, - Name = "Darker Than Black" - } - } - }; + // _libraryMock = new Library() + // { + // Id = 1, + // Name = "Manga", + // Folders = new List() + // { + // new FolderPath() + // { + // Id = 1, + // LastScanned = DateTime.Now, + // LibraryId = 1, + // Path = "E:/Manga" + // } + // }, + // LastModified = DateTime.Now, + // Series = new List() + // { + // new Series() + // { + // Id = 0, + // Name = "Darker Than Black" + // } + // } + // }; } + [Fact] + public void FindSeriesNotOnDisk_Should_RemoveNothing_Test() + { + var scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService); + var infos = new Dictionary>(); + + AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black"}); + AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1"}); + AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10"}); + + var existingSeries = new List(); + existingSeries.Add(new Series() + { + Name = "Cage of Eden", + LocalizedName = "Cage of Eden", + OriginalName = "Cage of Eden", + NormalizedName = Parser.Parser.Normalize("Cage of Eden") + }); + existingSeries.Add(new Series() + { + Name = "Darker Than Black", + LocalizedName = "Darker Than Black", + OriginalName = "Darker Than Black", + NormalizedName = Parser.Parser.Normalize("Darker Than Black") + }); + var expectedSeries = new List(); + + + + Assert.Empty(scannerService.FindSeriesNotOnDisk(existingSeries, infos)); + } + + [Theory] + [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")] + [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")] + [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker Than Black!")] + [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")] + public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected) + { + var scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService); + + var collectedSeries = new ConcurrentDictionary>(); + foreach (var seriesName in existingSeriesNames) + { + AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName}); + } + + var actualName = scannerService.MergeName(collectedSeries, new ParserInfo() + { + Series = parsedInfoName + }); + + Assert.Equal(expected, actualName); + } + + private void AddToParsedInfo(IDictionary> collectedSeries, ParserInfo info) + { + if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>)) + { + ((ConcurrentDictionary>) collectedSeries).AddOrUpdate(info.Series, new List() {info}, (_, oldValue) => + { + oldValue ??= new List(); + if (!oldValue.Contains(info)) + { + oldValue.Add(info); + } + + return oldValue; + }); + } + else + { + if (!collectedSeries.ContainsKey(info.Series)) + { + collectedSeries.Add(info.Series, new List() {info}); + } + else + { + var list = collectedSeries[info.Series]; + if (!list.Contains(info)) + { + list.Add(info); + } + + collectedSeries[info.Series] = list; + } + + } + + } + + + // [Fact] // public void ExistingOrDefault_Should_BeFromLibrary() // { diff --git a/API/Extensions/SeriesExtensions.cs b/API/Extensions/SeriesExtensions.cs index f2a896987..5680c52d2 100644 --- a/API/Extensions/SeriesExtensions.cs +++ b/API/Extensions/SeriesExtensions.cs @@ -15,7 +15,7 @@ namespace API.Extensions { foreach (var name in list) { - if (name == series.Name || name == series.LocalizedName || name == series.OriginalName || name == series.NormalizedName) + if (Parser.Parser.Normalize(name) == series.NormalizedName || name == series.Name || name == series.LocalizedName || name == series.OriginalName) { return true; } diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index 4e5521dfc..cb00043dd 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -409,7 +409,7 @@ namespace API.Parser return ret.Series == string.Empty ? null : ret; } - private static MangaFormat ParseFormat(string filePath) + public static MangaFormat ParseFormat(string filePath) { if (IsArchive(filePath)) return MangaFormat.Archive; if (IsImage(filePath)) return MangaFormat.Image; @@ -742,7 +742,7 @@ namespace API.Parser public static string Normalize(string name) { - return name.ToLower().Replace("-", "").Replace(" ", "").Replace(":", ""); + return name.ToLower().Replace("-", "").Replace(" ", "").Replace(":", "").Replace("_", ""); } /// diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 17797b875..16a7679e7 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -155,27 +155,16 @@ namespace API.Services.Tasks BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate)); } + private void UpdateLibrary(Library library, Dictionary> parsedSeries) { if (parsedSeries == null) throw new ArgumentNullException(nameof(parsedSeries)); // First, remove any series that are not in parsedSeries list - var foundSeries = parsedSeries.Select(s => Parser.Parser.Normalize(s.Key)).ToList(); - // var missingSeries = library.Series.Where(existingSeries => - // !foundSeries.Contains(existingSeries.NormalizedName) || !parsedSeries.ContainsKey(existingSeries.Name) - // || (existingSeries.LocalizedName != null && !parsedSeries.ContainsKey(existingSeries.LocalizedName)) - // || !parsedSeries.ContainsKey(existingSeries.OriginalName)); - - var missingSeries = library.Series.Where(existingSeries => !existingSeries.NameInList(foundSeries) - || !existingSeries.NameInList(parsedSeries.Keys)); - var removeCount = 0; - foreach (var existingSeries in missingSeries) - { - library.Series?.Remove(existingSeries); - removeCount += 1; - } - _logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount); + var missingSeries = FindSeriesNotOnDisk(library.Series, parsedSeries); + var removeCount = RemoveMissingSeries(library.Series, missingSeries); + _logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk", removeCount); // Add new series that have parsedInfos foreach (var (key, _) in parsedSeries) @@ -207,9 +196,29 @@ namespace API.Services.Tasks UpdateVolumes(series, parsedSeries[series.OriginalName].ToArray()); series.Pages = series.Volumes.Sum(v => v.Pages); }); - + } - foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now; + public IEnumerable FindSeriesNotOnDisk(ICollection existingSeries, Dictionary> parsedSeries) + { + var foundSeries = parsedSeries.Select(s => s.Key).ToList(); + var missingSeries = existingSeries.Where(existingSeries => !existingSeries.NameInList(foundSeries) + || !existingSeries.NameInList(parsedSeries.Keys)); + return missingSeries; + } + + public int RemoveMissingSeries(ICollection existingSeries, IEnumerable missingSeries) + { + + var removeCount = 0; + //library.Series = library.Series.Except(missingSeries).ToList(); + if (existingSeries == null || existingSeries.Count == 0) return 0; + foreach (var existing in missingSeries) + { + existingSeries.Remove(existing); + removeCount += 1; + } + + return removeCount; } private void UpdateVolumes(Series series, ParserInfo[] parsedInfos) @@ -266,6 +275,7 @@ namespace API.Services.Tasks Chapter chapter = null; try { + // TODO: Extract to FindExistingChapter() chapter = specialTreatment ? volume.Chapters.SingleOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath) @@ -317,7 +327,7 @@ namespace API.Services.Tasks - + // TODO: Extract to // Remove chapters that aren't in parsedInfos or have no files linked var existingChapters = volume.Chapters.ToList(); foreach (var existingChapter in existingChapters) @@ -354,15 +364,7 @@ namespace API.Services.Tasks if (info.Series == string.Empty) return; // Check if normalized info.Series already exists and if so, update info to use that name instead - var normalizedSeries = Parser.Parser.Normalize(info.Series); - _logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries); - var existingName = _scannedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries) - .Key; - if (!string.IsNullOrEmpty(existingName) && info.Series != existingName) - { - _logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName); - info.Series = existingName; - } + info.Series = MergeName(_scannedSeries, info); _scannedSeries.AddOrUpdate(info.Series, new List() {info}, (_, oldValue) => { @@ -376,6 +378,21 @@ namespace API.Services.Tasks }); } + public string MergeName(ConcurrentDictionary> collectedSeries, ParserInfo info) + { + var normalizedSeries = Parser.Parser.Normalize(info.Series); + _logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries); + var existingName = collectedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries) + .Key; + if (!string.IsNullOrEmpty(existingName) && info.Series != existingName) + { + _logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName); + return existingName; + } + + return info.Series; + } + /// /// Processes files found during a library scan. /// Populates a collection of for DB updates later.