diff --git a/API.Tests/ParserTest.cs b/API.Tests/ParserTest.cs index 0a3b588b0..d594531e1 100644 --- a/API.Tests/ParserTest.cs +++ b/API.Tests/ParserTest.cs @@ -29,6 +29,7 @@ namespace API.Tests [InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "1")] [InlineData("Future Diary v02 (2009) (Digital) (Viz).cbz", "2")] [InlineData("Mujaki no Rakuen Vol12 ch76", "12")] + [InlineData("Ichinensei_ni_Nacchattara_v02_ch11_[Taruby]_v1.3.zip", "2")] public void ParseVolumeTest(string filename, string expected) { Assert.Equal(expected, ParseVolume(filename)); @@ -66,6 +67,10 @@ namespace API.Tests [InlineData("Mujaki no Rakuen Vol12 ch76", "Mujaki no Rakuen")] [InlineData("Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans]", "Knights of Sidonia")] [InlineData("Vol 1.cbz", "")] + [InlineData("Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", "Ichinensei ni Nacchattara")] + [InlineData("Chrno_Crusade_Dragon_Age_All_Stars[AS].zip", "")] + [InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip", "Ichiban Ushiro no Daimaou")] + [InlineData("[Tempus Edax Rerum] Epigraph of the Closed Curve - Chapter 6.zip", "Epigraph of the Closed Curve")] public void ParseSeriesTest(string filename, string expected) { Assert.Equal(expected, ParseSeries(filename)); @@ -92,6 +97,7 @@ namespace API.Tests [InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "1-6")] [InlineData("Mujaki no Rakuen Vol12 ch76", "76")] [InlineData("Beelzebub_01_[Noodles].zip", "1")] + [InlineData("[Tempus Edax Rerum] Epigraph of the Closed Curve - Chapter 6.zip", "6")] public void ParseChaptersTest(string filename, string expected) { Assert.Equal(expected, ParseChapter(filename)); @@ -176,13 +182,37 @@ namespace API.Tests FullFilePath = filepath }); + filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip"; + expected.Add(filepath, new ParserInfo + { + Series = "Ichinensei ni Nacchattara", Volumes = "1", + Chapters = "1", Filename = "Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + // filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip"; + // expected.Add(filepath, new ParserInfo + // { + // Series = "Ichinensei ni Nacchattara", Volumes = "1", + // Chapters = "1", Filename = "Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", Format = MangaFormat.Archive, + // FullFilePath = filepath + // }); + filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz"; expected.Add(filepath, new ParserInfo { Series = "Tenjo Tenge", Volumes = "1", Edition = "Full Contact Edition", Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive, FullFilePath = filepath - }); + }); + + // filepath = @"E:\Manga\Steins Gate - Epigraph of the Closed Curve\[Tempus Edax Rerum] Epigraph of the Closed Curve - Chapter 6.zip"; + // expected.Add(filepath, new ParserInfo + // { + // Series = "Steins Gate - Epigraph of the Closed Curve", Volumes = "0", Edition = "", + // Chapters = "6", Filename = "[Tempus Edax Rerum] Epigraph of the Closed Curve - Chapter 6.zip", Format = MangaFormat.Archive, + // FullFilePath = filepath + // }); diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index 67990db2e..ed193a4d0 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -144,6 +144,14 @@ namespace API.Controllers [Authorize(Policy = "RequireAdminRole")] [HttpPost("scan")] public ActionResult Scan(int libraryId) + { + _taskScheduler.ScanLibrary(libraryId, false); + return Ok(); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("refresh-metadata")] + public ActionResult RefreshMetadata(int libraryId) { _taskScheduler.ScanLibrary(libraryId, true); return Ok(); diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index 902fb81a5..b859176ca 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -13,6 +13,10 @@ namespace API.Parser //?: is a non-capturing group in C#, else anything in () will be a group private static readonly Regex[] MangaVolumeRegex = new[] { + // Dance in the Vampire Bund v16-17 + new Regex( + @"(?.*)(\b|_)v(?\d+-?\d+)( |_)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), // Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17 new Regex( @"(?.*)(\b|_)v(?\d+-?\d*)", @@ -25,10 +29,7 @@ namespace API.Parser new Regex( @"(volume )(?0?[1-9]+)", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // Dance in the Vampire Bund v16-17 - new Regex( - @"(?.*)(\b|_)v(?\d+-?\d+)", - RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Tower Of God S01 014 (CBT) (digital).cbz new Regex( @"(?.*)(\b|_|)(S(?\d+))", @@ -38,9 +39,12 @@ namespace API.Parser private static readonly Regex[] MangaSeriesRegex = new[] { + // Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip + new Regex( + @"(?.*)(\b|_)v(?\d+-?\d*)( |_)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), // Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto] new Regex( - @"(?.*)( - )(?:v|vo|c)\d", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) @@ -55,6 +59,11 @@ namespace API.Parser new Regex( @"(?.*)(\bc\d+\b)", RegexOptions.IgnoreCase | RegexOptions.Compiled), + //Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip + // due to duplicate version identifiers in file. + new Regex( + @"(?.*)(v|s)\d+(-\d+)?(_| )", + RegexOptions.IgnoreCase | RegexOptions.Compiled), //[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip new Regex( @"(?.*)(v|s)\d+(-\d+)?", @@ -72,9 +81,9 @@ namespace API.Parser @"(?.*)\(\d", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // Black Bullet (This is very loose, keep towards bottom) + // Black Bullet (This is very loose, keep towards bottom) (?.*)(_)(v|vo|c|volume) new Regex( - @"(?.*)(_)(v|vo|c|volume)", + @"(?.*)(_)(v|vo|c|volume)( |_)\d+", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Akiiro Bousou Biyori - 01.jpg, Beelzebub_172_RHS.zip, Cynthia the Mission 29.rar new Regex( @@ -82,7 +91,7 @@ namespace API.Parser RegexOptions.IgnoreCase | RegexOptions.Compiled), // [BAA]_Darker_than_Black_c1 (This is very greedy, make sure it's close to last) new Regex( - @"(?.*)( |_)(c)", + @"(?.*)( |_)(c)\d+", RegexOptions.IgnoreCase | RegexOptions.Compiled), }; @@ -134,9 +143,14 @@ namespace API.Parser private static readonly Regex[] CleanupRegex = { + // (), {}, [] new Regex( @"(?(\{\}|\[\]|\(\)))", RegexOptions.IgnoreCase | RegexOptions.Compiled), + // (Complete) + new Regex( + @"(?(\{Complete\}|\[Complete\]|\(Complete\)))", + RegexOptions.IgnoreCase | RegexOptions.Compiled), }; @@ -165,6 +179,9 @@ namespace API.Parser { ret.Series = ParseSeries(directoryName); if (ret.Series == string.Empty) ret.Series = CleanTitle(directoryName); + } else if (directoryName != null && directoryName.Contains(ret.Series)) + { + ret.Series = directoryName; // TODO: Validate if this works better overall for grouping. } var edition = ParseEdition(fileName); @@ -201,8 +218,7 @@ namespace API.Parser } } } - - Console.WriteLine("Unable to parse Edition of {0}", filePath); + return string.Empty; } @@ -219,8 +235,7 @@ namespace API.Parser } } } - - Console.WriteLine("Unable to parse Series of {0}", filename); + return string.Empty; } @@ -242,8 +257,7 @@ namespace API.Parser } } - - Console.WriteLine("Unable to parse Volume of {0}", filename); + return "0"; } diff --git a/API/Parser/ParserInfo.cs b/API/Parser/ParserInfo.cs index 2ab08eed0..f61c4bc74 100644 --- a/API/Parser/ParserInfo.cs +++ b/API/Parser/ParserInfo.cs @@ -25,5 +25,9 @@ namespace API.Parser /// This can potentially story things like "Omnibus, Color, Full Contact Edition, Extra, Final, etc" /// public string Edition { get; set; } = ""; + /// + /// If this file is some sort of side story that links back to some master series. + /// + public bool IsSpecial { get; set; } = false; } } \ No newline at end of file diff --git a/API/Services/ScannerService.cs b/API/Services/ScannerService.cs index 4016553ac..87dcc2859 100644 --- a/API/Services/ScannerService.cs +++ b/API/Services/ScannerService.cs @@ -129,16 +129,19 @@ namespace API.Services return allSeries; } - private static void RemoveSeriesNotOnDisk(List allSeries, ImmutableDictionary> series, Library library) + private void RemoveSeriesNotOnDisk(List allSeries, ImmutableDictionary> series, Library library) { + var count = 0; foreach (var existingSeries in allSeries) { if (!series.ContainsKey(existingSeries.Name) || !series.ContainsKey(existingSeries.OriginalName)) { // Delete series, there is no file to backup any longer. library.Series?.Remove(existingSeries); + count++; } } + _logger.LogInformation($"Removed {count} series that are no longer on disk"); } @@ -174,7 +177,67 @@ namespace API.Services _scannedSeries.TryAdd(info.Series, newBag); } } - + + private void Match(ConcurrentDictionary> scannedSeries, string filePath) + { + var info = Parser.Parser.Parse(filePath); + // I want to cross corelate with other series. So if I have + // Darker than Black and Darker than Black - Side Stories, + // we end up with Darker than Black with a Volume of "Specials" and Side - Stories belongs in there. + + if (info == null) + { + _logger.LogInformation($"Could not parse series from {filePath}"); + return; + } + + // NOTE: This was pointless due to changes in how we Parse + var existingKey = scannedSeries.Keys.SingleOrDefault(k => info.Series.ToLower().Contains(k.ToLower())); + if (existingKey != null && existingKey.ToLower() == info.Series.ToLower()) + { + // Perform an add to existing infos + _logger.LogDebug($"Adding {info.Series} to existing {existingKey}"); + AddToScannedSeries(existingKey, info); + + } + else if (existingKey != null) + { + _logger.LogDebug($"Found that {info.Series} might be a special for {existingKey}. Adding as special."); + info.IsSpecial = true; + AddToScannedSeries(existingKey, info); + } + else + { + _logger.LogDebug($"Adding {info.Series} as new entry."); + AddToScannedSeries(info.Series, info); + } + + } + + private void AddToScannedSeries(string key, ParserInfo info) + { + ConcurrentBag newBag = new ConcurrentBag(); + if (_scannedSeries.TryGetValue(key, out var tempBag)) + { + var existingInfos = tempBag.ToArray(); + foreach (var existingInfo in existingInfos) + { + newBag.Add(existingInfo); + } + } + else + { + tempBag = new ConcurrentBag(); + } + + newBag.Add(info); + + if (!_scannedSeries.TryUpdate(info.Series, newBag, tempBag)) + { + _scannedSeries.TryAdd(info.Series, newBag); + } + } + /// /// Processes files found during a library scan. /// Populates a collection of for DB updates later. @@ -183,13 +246,16 @@ namespace API.Services private void ProcessFile(string path) { var info = Parser.Parser.Parse(path); + if (info == null) { _logger.LogInformation($"Could not parse series from {path}"); return; } - + TrackSeries(info); + + //Match(_scannedSeries, path); } private Series UpdateSeries(Series series, ParserInfo[] infos, bool forceUpdate) @@ -199,7 +265,12 @@ namespace API.Services series.Pages = volumes.Sum(v => v.Pages); if (series.CoverImage == null || forceUpdate) { - series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0)?.CoverImage; + var firstCover = volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); + if (firstCover == null && volumes.Any()) + { + firstCover = volumes.FirstOrDefault(x => x.Number == 0); + } + series.CoverImage = firstCover?.CoverImage; } if (string.IsNullOrEmpty(series.Summary) || forceUpdate) { @@ -273,6 +344,7 @@ namespace API.Services } else { + // Create New Volume existingVolume = volumes.SingleOrDefault(v => v.Name == info.Volumes); if (existingVolume != null) {