mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-06-04 14:14:39 -04:00
More regex tweaking and use cases for real library.
This commit is contained in:
parent
8498d25aa7
commit
7cd0b80ac2
@ -29,6 +29,7 @@ namespace API.Tests
|
|||||||
[InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "1")]
|
[InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "1")]
|
||||||
[InlineData("Future Diary v02 (2009) (Digital) (Viz).cbz", "2")]
|
[InlineData("Future Diary v02 (2009) (Digital) (Viz).cbz", "2")]
|
||||||
[InlineData("Mujaki no Rakuen Vol12 ch76", "12")]
|
[InlineData("Mujaki no Rakuen Vol12 ch76", "12")]
|
||||||
|
[InlineData("Ichinensei_ni_Nacchattara_v02_ch11_[Taruby]_v1.3.zip", "2")]
|
||||||
public void ParseVolumeTest(string filename, string expected)
|
public void ParseVolumeTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, ParseVolume(filename));
|
Assert.Equal(expected, ParseVolume(filename));
|
||||||
@ -66,6 +67,10 @@ namespace API.Tests
|
|||||||
[InlineData("Mujaki no Rakuen Vol12 ch76", "Mujaki no Rakuen")]
|
[InlineData("Mujaki no Rakuen Vol12 ch76", "Mujaki no Rakuen")]
|
||||||
[InlineData("Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans]", "Knights of Sidonia")]
|
[InlineData("Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans]", "Knights of Sidonia")]
|
||||||
[InlineData("Vol 1.cbz", "")]
|
[InlineData("Vol 1.cbz", "")]
|
||||||
|
[InlineData("Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", "Ichinensei ni Nacchattara")]
|
||||||
|
[InlineData("Chrno_Crusade_Dragon_Age_All_Stars[AS].zip", "")]
|
||||||
|
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip", "Ichiban Ushiro no Daimaou")]
|
||||||
|
[InlineData("[Tempus Edax Rerum] Epigraph of the Closed Curve - Chapter 6.zip", "Epigraph of the Closed Curve")]
|
||||||
public void ParseSeriesTest(string filename, string expected)
|
public void ParseSeriesTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, ParseSeries(filename));
|
Assert.Equal(expected, ParseSeries(filename));
|
||||||
@ -92,6 +97,7 @@ namespace API.Tests
|
|||||||
[InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "1-6")]
|
[InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "1-6")]
|
||||||
[InlineData("Mujaki no Rakuen Vol12 ch76", "76")]
|
[InlineData("Mujaki no Rakuen Vol12 ch76", "76")]
|
||||||
[InlineData("Beelzebub_01_[Noodles].zip", "1")]
|
[InlineData("Beelzebub_01_[Noodles].zip", "1")]
|
||||||
|
[InlineData("[Tempus Edax Rerum] Epigraph of the Closed Curve - Chapter 6.zip", "6")]
|
||||||
public void ParseChaptersTest(string filename, string expected)
|
public void ParseChaptersTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, ParseChapter(filename));
|
Assert.Equal(expected, ParseChapter(filename));
|
||||||
@ -176,13 +182,37 @@ namespace API.Tests
|
|||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
|
filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
|
||||||
|
expected.Add(filepath, new ParserInfo
|
||||||
|
{
|
||||||
|
Series = "Ichinensei ni Nacchattara", Volumes = "1",
|
||||||
|
Chapters = "1", Filename = "Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", Format = MangaFormat.Archive,
|
||||||
|
FullFilePath = filepath
|
||||||
|
});
|
||||||
|
|
||||||
|
// filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
|
||||||
|
// expected.Add(filepath, new ParserInfo
|
||||||
|
// {
|
||||||
|
// Series = "Ichinensei ni Nacchattara", Volumes = "1",
|
||||||
|
// Chapters = "1", Filename = "Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", Format = MangaFormat.Archive,
|
||||||
|
// FullFilePath = filepath
|
||||||
|
// });
|
||||||
|
|
||||||
filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
|
filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Tenjo Tenge", Volumes = "1", Edition = "Full Contact Edition",
|
Series = "Tenjo Tenge", Volumes = "1", Edition = "Full Contact Edition",
|
||||||
Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
|
Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// filepath = @"E:\Manga\Steins Gate - Epigraph of the Closed Curve\[Tempus Edax Rerum] Epigraph of the Closed Curve - Chapter 6.zip";
|
||||||
|
// expected.Add(filepath, new ParserInfo
|
||||||
|
// {
|
||||||
|
// Series = "Steins Gate - Epigraph of the Closed Curve", Volumes = "0", Edition = "",
|
||||||
|
// Chapters = "6", Filename = "[Tempus Edax Rerum] Epigraph of the Closed Curve - Chapter 6.zip", Format = MangaFormat.Archive,
|
||||||
|
// FullFilePath = filepath
|
||||||
|
// });
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -144,6 +144,14 @@ namespace API.Controllers
|
|||||||
[Authorize(Policy = "RequireAdminRole")]
|
[Authorize(Policy = "RequireAdminRole")]
|
||||||
[HttpPost("scan")]
|
[HttpPost("scan")]
|
||||||
public ActionResult Scan(int libraryId)
|
public ActionResult Scan(int libraryId)
|
||||||
|
{
|
||||||
|
_taskScheduler.ScanLibrary(libraryId, false);
|
||||||
|
return Ok();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Authorize(Policy = "RequireAdminRole")]
|
||||||
|
[HttpPost("refresh-metadata")]
|
||||||
|
public ActionResult RefreshMetadata(int libraryId)
|
||||||
{
|
{
|
||||||
_taskScheduler.ScanLibrary(libraryId, true);
|
_taskScheduler.ScanLibrary(libraryId, true);
|
||||||
return Ok();
|
return Ok();
|
||||||
|
@ -13,6 +13,10 @@ namespace API.Parser
|
|||||||
//?: is a non-capturing group in C#, else anything in () will be a group
|
//?: is a non-capturing group in C#, else anything in () will be a group
|
||||||
private static readonly Regex[] MangaVolumeRegex = new[]
|
private static readonly Regex[] MangaVolumeRegex = new[]
|
||||||
{
|
{
|
||||||
|
// Dance in the Vampire Bund v16-17
|
||||||
|
new Regex(
|
||||||
|
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d+)( |_)",
|
||||||
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
|
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
|
||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)",
|
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)",
|
||||||
@ -25,10 +29,7 @@ namespace API.Parser
|
|||||||
new Regex(
|
new Regex(
|
||||||
@"(volume )(?<Volume>0?[1-9]+)",
|
@"(volume )(?<Volume>0?[1-9]+)",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// Dance in the Vampire Bund v16-17
|
|
||||||
new Regex(
|
|
||||||
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d+)",
|
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
|
||||||
// Tower Of God S01 014 (CBT) (digital).cbz
|
// Tower Of God S01 014 (CBT) (digital).cbz
|
||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Series>.*)(\b|_|)(S(?<Volume>\d+))",
|
@"(?<Series>.*)(\b|_|)(S(?<Volume>\d+))",
|
||||||
@ -38,9 +39,12 @@ namespace API.Parser
|
|||||||
|
|
||||||
private static readonly Regex[] MangaSeriesRegex = new[]
|
private static readonly Regex[] MangaSeriesRegex = new[]
|
||||||
{
|
{
|
||||||
|
// Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip
|
||||||
|
new Regex(
|
||||||
|
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)( |_)",
|
||||||
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto]
|
// Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto]
|
||||||
new Regex(
|
new Regex(
|
||||||
|
|
||||||
@"(?<Series>.*)( - )(?:v|vo|c)\d",
|
@"(?<Series>.*)( - )(?:v|vo|c)\d",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
|
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
|
||||||
@ -55,6 +59,11 @@ namespace API.Parser
|
|||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Series>.*)(\bc\d+\b)",
|
@"(?<Series>.*)(\bc\d+\b)",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
|
//Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
|
||||||
|
// due to duplicate version identifiers in file.
|
||||||
|
new Regex(
|
||||||
|
@"(?<Series>.*)(v|s)\d+(-\d+)?(_| )",
|
||||||
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
//[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
|
//[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
|
||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Series>.*)(v|s)\d+(-\d+)?",
|
@"(?<Series>.*)(v|s)\d+(-\d+)?",
|
||||||
@ -72,9 +81,9 @@ namespace API.Parser
|
|||||||
@"(?<Series>.*)\(\d",
|
@"(?<Series>.*)\(\d",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
|
|
||||||
// Black Bullet (This is very loose, keep towards bottom)
|
// Black Bullet (This is very loose, keep towards bottom) (?<Series>.*)(_)(v|vo|c|volume)
|
||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Series>.*)(_)(v|vo|c|volume)",
|
@"(?<Series>.*)(_)(v|vo|c|volume)( |_)\d+",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// Akiiro Bousou Biyori - 01.jpg, Beelzebub_172_RHS.zip, Cynthia the Mission 29.rar
|
// Akiiro Bousou Biyori - 01.jpg, Beelzebub_172_RHS.zip, Cynthia the Mission 29.rar
|
||||||
new Regex(
|
new Regex(
|
||||||
@ -82,7 +91,7 @@ namespace API.Parser
|
|||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// [BAA]_Darker_than_Black_c1 (This is very greedy, make sure it's close to last)
|
// [BAA]_Darker_than_Black_c1 (This is very greedy, make sure it's close to last)
|
||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Series>.*)( |_)(c)",
|
@"(?<Series>.*)( |_)(c)\d+",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -134,9 +143,14 @@ namespace API.Parser
|
|||||||
|
|
||||||
private static readonly Regex[] CleanupRegex =
|
private static readonly Regex[] CleanupRegex =
|
||||||
{
|
{
|
||||||
|
// (), {}, []
|
||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Cleanup>(\{\}|\[\]|\(\)))",
|
@"(?<Cleanup>(\{\}|\[\]|\(\)))",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
|
// (Complete)
|
||||||
|
new Regex(
|
||||||
|
@"(?<Cleanup>(\{Complete\}|\[Complete\]|\(Complete\)))",
|
||||||
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -165,6 +179,9 @@ namespace API.Parser
|
|||||||
{
|
{
|
||||||
ret.Series = ParseSeries(directoryName);
|
ret.Series = ParseSeries(directoryName);
|
||||||
if (ret.Series == string.Empty) ret.Series = CleanTitle(directoryName);
|
if (ret.Series == string.Empty) ret.Series = CleanTitle(directoryName);
|
||||||
|
} else if (directoryName != null && directoryName.Contains(ret.Series))
|
||||||
|
{
|
||||||
|
ret.Series = directoryName; // TODO: Validate if this works better overall for grouping.
|
||||||
}
|
}
|
||||||
|
|
||||||
var edition = ParseEdition(fileName);
|
var edition = ParseEdition(fileName);
|
||||||
@ -201,8 +218,7 @@ namespace API.Parser
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Console.WriteLine("Unable to parse Edition of {0}", filePath);
|
|
||||||
return string.Empty;
|
return string.Empty;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,8 +235,7 @@ namespace API.Parser
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Console.WriteLine("Unable to parse Series of {0}", filename);
|
|
||||||
return string.Empty;
|
return string.Empty;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -242,8 +257,7 @@ namespace API.Parser
|
|||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Console.WriteLine("Unable to parse Volume of {0}", filename);
|
|
||||||
return "0";
|
return "0";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,5 +25,9 @@ namespace API.Parser
|
|||||||
/// This can potentially story things like "Omnibus, Color, Full Contact Edition, Extra, Final, etc"
|
/// This can potentially story things like "Omnibus, Color, Full Contact Edition, Extra, Final, etc"
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public string Edition { get; set; } = "";
|
public string Edition { get; set; } = "";
|
||||||
|
/// <summary>
|
||||||
|
/// If this file is some sort of side story that links back to some master series.
|
||||||
|
/// </summary>
|
||||||
|
public bool IsSpecial { get; set; } = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -129,16 +129,19 @@ namespace API.Services
|
|||||||
return allSeries;
|
return allSeries;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void RemoveSeriesNotOnDisk(List<Series> allSeries, ImmutableDictionary<string, ConcurrentBag<ParserInfo>> series, Library library)
|
private void RemoveSeriesNotOnDisk(List<Series> allSeries, ImmutableDictionary<string, ConcurrentBag<ParserInfo>> series, Library library)
|
||||||
{
|
{
|
||||||
|
var count = 0;
|
||||||
foreach (var existingSeries in allSeries)
|
foreach (var existingSeries in allSeries)
|
||||||
{
|
{
|
||||||
if (!series.ContainsKey(existingSeries.Name) || !series.ContainsKey(existingSeries.OriginalName))
|
if (!series.ContainsKey(existingSeries.Name) || !series.ContainsKey(existingSeries.OriginalName))
|
||||||
{
|
{
|
||||||
// Delete series, there is no file to backup any longer.
|
// Delete series, there is no file to backup any longer.
|
||||||
library.Series?.Remove(existingSeries);
|
library.Series?.Remove(existingSeries);
|
||||||
|
count++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
_logger.LogInformation($"Removed {count} series that are no longer on disk");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -174,7 +177,67 @@ namespace API.Services
|
|||||||
_scannedSeries.TryAdd(info.Series, newBag);
|
_scannedSeries.TryAdd(info.Series, newBag);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void Match(ConcurrentDictionary<string, ConcurrentBag<ParserInfo>> scannedSeries, string filePath)
|
||||||
|
{
|
||||||
|
var info = Parser.Parser.Parse(filePath);
|
||||||
|
// I want to cross corelate with other series. So if I have
|
||||||
|
// Darker than Black and Darker than Black - Side Stories,
|
||||||
|
// we end up with Darker than Black with a Volume of "Specials" and Side - Stories belongs in there.
|
||||||
|
|
||||||
|
if (info == null)
|
||||||
|
{
|
||||||
|
_logger.LogInformation($"Could not parse series from {filePath}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE: This was pointless due to changes in how we Parse
|
||||||
|
var existingKey = scannedSeries.Keys.SingleOrDefault(k => info.Series.ToLower().Contains(k.ToLower()));
|
||||||
|
if (existingKey != null && existingKey.ToLower() == info.Series.ToLower())
|
||||||
|
{
|
||||||
|
// Perform an add to existing infos
|
||||||
|
_logger.LogDebug($"Adding {info.Series} to existing {existingKey}");
|
||||||
|
AddToScannedSeries(existingKey, info);
|
||||||
|
|
||||||
|
}
|
||||||
|
else if (existingKey != null)
|
||||||
|
{
|
||||||
|
_logger.LogDebug($"Found that {info.Series} might be a special for {existingKey}. Adding as special.");
|
||||||
|
info.IsSpecial = true;
|
||||||
|
AddToScannedSeries(existingKey, info);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_logger.LogDebug($"Adding {info.Series} as new entry.");
|
||||||
|
AddToScannedSeries(info.Series, info);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void AddToScannedSeries(string key, ParserInfo info)
|
||||||
|
{
|
||||||
|
ConcurrentBag<ParserInfo> newBag = new ConcurrentBag<ParserInfo>();
|
||||||
|
if (_scannedSeries.TryGetValue(key, out var tempBag))
|
||||||
|
{
|
||||||
|
var existingInfos = tempBag.ToArray();
|
||||||
|
foreach (var existingInfo in existingInfos)
|
||||||
|
{
|
||||||
|
newBag.Add(existingInfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
tempBag = new ConcurrentBag<ParserInfo>();
|
||||||
|
}
|
||||||
|
|
||||||
|
newBag.Add(info);
|
||||||
|
|
||||||
|
if (!_scannedSeries.TryUpdate(info.Series, newBag, tempBag))
|
||||||
|
{
|
||||||
|
_scannedSeries.TryAdd(info.Series, newBag);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Processes files found during a library scan.
|
/// Processes files found during a library scan.
|
||||||
/// Populates a collection of <see cref="ParserInfo"/> for DB updates later.
|
/// Populates a collection of <see cref="ParserInfo"/> for DB updates later.
|
||||||
@ -183,13 +246,16 @@ namespace API.Services
|
|||||||
private void ProcessFile(string path)
|
private void ProcessFile(string path)
|
||||||
{
|
{
|
||||||
var info = Parser.Parser.Parse(path);
|
var info = Parser.Parser.Parse(path);
|
||||||
|
|
||||||
if (info == null)
|
if (info == null)
|
||||||
{
|
{
|
||||||
_logger.LogInformation($"Could not parse series from {path}");
|
_logger.LogInformation($"Could not parse series from {path}");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
TrackSeries(info);
|
TrackSeries(info);
|
||||||
|
|
||||||
|
//Match(_scannedSeries, path);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Series UpdateSeries(Series series, ParserInfo[] infos, bool forceUpdate)
|
private Series UpdateSeries(Series series, ParserInfo[] infos, bool forceUpdate)
|
||||||
@ -199,7 +265,12 @@ namespace API.Services
|
|||||||
series.Pages = volumes.Sum(v => v.Pages);
|
series.Pages = volumes.Sum(v => v.Pages);
|
||||||
if (series.CoverImage == null || forceUpdate)
|
if (series.CoverImage == null || forceUpdate)
|
||||||
{
|
{
|
||||||
series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0)?.CoverImage;
|
var firstCover = volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0);
|
||||||
|
if (firstCover == null && volumes.Any())
|
||||||
|
{
|
||||||
|
firstCover = volumes.FirstOrDefault(x => x.Number == 0);
|
||||||
|
}
|
||||||
|
series.CoverImage = firstCover?.CoverImage;
|
||||||
}
|
}
|
||||||
if (string.IsNullOrEmpty(series.Summary) || forceUpdate)
|
if (string.IsNullOrEmpty(series.Summary) || forceUpdate)
|
||||||
{
|
{
|
||||||
@ -273,6 +344,7 @@ namespace API.Services
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
// Create New Volume
|
||||||
existingVolume = volumes.SingleOrDefault(v => v.Name == info.Volumes);
|
existingVolume = volumes.SingleOrDefault(v => v.Name == info.Volumes);
|
||||||
if (existingVolume != null)
|
if (existingVolume != null)
|
||||||
{
|
{
|
||||||
|
Loading…
x
Reference in New Issue
Block a user