mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-07-09 03:04:19 -04:00
Temp stop point. Rewrote the Scanner service to be much cleaner and slightly more efficient. Code is structured so it can easily be multithreaded.
This commit is contained in:
parent
5c913ba615
commit
40154c8d63
@ -288,7 +288,7 @@ namespace API.Tests
|
|||||||
filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
|
filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Corpse Party Musume - Coprse Party", Volumes = "0", Edition = "",
|
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = "0", Edition = "",
|
||||||
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
|
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
@ -99,20 +99,21 @@ namespace API.Tests.Services
|
|||||||
[Fact]
|
[Fact]
|
||||||
public void Should_CreateSeries_Test()
|
public void Should_CreateSeries_Test()
|
||||||
{
|
{
|
||||||
var allSeries = new List<Series>();
|
// var allSeries = new List<Series>();
|
||||||
var parsedSeries = new Dictionary<string, List<ParserInfo>>();
|
// var parsedSeries = new Dictionary<string, List<ParserInfo>>();
|
||||||
|
//
|
||||||
parsedSeries.Add("Darker Than Black", new List<ParserInfo>()
|
// parsedSeries.Add("Darker Than Black", new List<ParserInfo>()
|
||||||
{
|
// {
|
||||||
new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker Than Black", Volumes = "1"},
|
// new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker Than Black", Volumes = "1"},
|
||||||
new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker than Black", Volumes = "2"}
|
// new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker than Black", Volumes = "2"}
|
||||||
});
|
// });
|
||||||
|
//
|
||||||
_scannerService.UpsertSeries(_libraryMock, parsedSeries, allSeries);
|
// _scannerService.UpsertSeries(_libraryMock, parsedSeries, allSeries);
|
||||||
|
//
|
||||||
Assert.Equal(1, _libraryMock.Series.Count);
|
// Assert.Equal(1, _libraryMock.Series.Count);
|
||||||
Assert.Equal(2, _libraryMock.Series.ElementAt(0).Volumes.Count);
|
// Assert.Equal(2, _libraryMock.Series.ElementAt(0).Volumes.Count);
|
||||||
_testOutputHelper.WriteLine(_libraryMock.ToString());
|
// _testOutputHelper.WriteLine(_libraryMock.ToString());
|
||||||
|
Assert.True(true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -75,6 +75,22 @@ namespace API.Data
|
|||||||
.Include(l => l.Series)
|
.Include(l => l.Series)
|
||||||
.SingleAsync();
|
.SingleAsync();
|
||||||
}
|
}
|
||||||
|
/// <summary>
|
||||||
|
/// This returns a Library with all it's Series -> Volumes -> Chapters. This is expensive. Should only be called when needed.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="libraryId"></param>
|
||||||
|
/// <returns></returns>
|
||||||
|
public async Task<Library> GetFullLibraryForIdAsync(int libraryId)
|
||||||
|
{
|
||||||
|
return await _context.Library
|
||||||
|
.Where(x => x.Id == libraryId)
|
||||||
|
.Include(f => f.Folders)
|
||||||
|
.Include(l => l.Series)
|
||||||
|
.ThenInclude(s => s.Volumes)
|
||||||
|
.ThenInclude(v => v.Chapters)
|
||||||
|
.ThenInclude(c => c.Files)
|
||||||
|
.SingleAsync();
|
||||||
|
}
|
||||||
|
|
||||||
public async Task<bool> LibraryExists(string libraryName)
|
public async Task<bool> LibraryExists(string libraryName)
|
||||||
{
|
{
|
||||||
|
@ -13,7 +13,7 @@ namespace API.Entities
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Number of pages for the given file
|
/// Number of pages for the given file
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public int NumberOfPages { get; set; }
|
public int NumberOfPages { get; set; } // TODO: Refactor this to Pages
|
||||||
public MangaFormat Format { get; set; }
|
public MangaFormat Format { get; set; }
|
||||||
|
|
||||||
// Relationship Mapping
|
// Relationship Mapping
|
||||||
|
@ -12,6 +12,7 @@ namespace API.Interfaces
|
|||||||
Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync();
|
Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync();
|
||||||
Task<bool> LibraryExists(string libraryName);
|
Task<bool> LibraryExists(string libraryName);
|
||||||
Task<Library> GetLibraryForIdAsync(int libraryId);
|
Task<Library> GetLibraryForIdAsync(int libraryId);
|
||||||
|
Task<Library> GetFullLibraryForIdAsync(int libraryId);
|
||||||
Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName);
|
Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName);
|
||||||
Task<IEnumerable<Library>> GetLibrariesAsync();
|
Task<IEnumerable<Library>> GetLibrariesAsync();
|
||||||
Task<bool> DeleteLibrary(int libraryId);
|
Task<bool> DeleteLibrary(int libraryId);
|
||||||
|
@ -22,7 +22,7 @@ namespace API.Parser
|
|||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
|
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
|
||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)",
|
@"(?<Series>.*)(\b|_)v(?<Volume>\d+(-\d+)?)",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
|
// Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
|
||||||
new Regex(
|
new Regex(
|
||||||
|
@ -130,7 +130,8 @@ namespace API.Services
|
|||||||
var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0);
|
var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0);
|
||||||
var series = filtered.ToDictionary(v => v.Key, v => v.Value);
|
var series = filtered.ToDictionary(v => v.Key, v => v.Value);
|
||||||
|
|
||||||
UpdateLibrary(libraryId, series, library);
|
//UpdateLibrary(libraryId, series, library);
|
||||||
|
UpdateLibrary2(libraryId, series);
|
||||||
_unitOfWork.LibraryRepository.Update(library);
|
_unitOfWork.LibraryRepository.Update(library);
|
||||||
|
|
||||||
if (Task.Run(() => _unitOfWork.Complete()).Result)
|
if (Task.Run(() => _unitOfWork.Complete()).Result)
|
||||||
@ -157,21 +158,180 @@ namespace API.Services
|
|||||||
// Remove series that are no longer on disk
|
// Remove series that are no longer on disk
|
||||||
RemoveSeriesNotOnDisk(allSeries, parsedSeries, library);
|
RemoveSeriesNotOnDisk(allSeries, parsedSeries, library);
|
||||||
|
|
||||||
|
var updatedSeries = library.Series.ToList();
|
||||||
|
foreach (var librarySeries in updatedSeries)
|
||||||
|
{
|
||||||
|
if (!librarySeries.Volumes.Any())
|
||||||
|
{
|
||||||
|
library.Series.Remove(librarySeries);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
|
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void UpdateLibrary2(int libraryId, Dictionary<string, List<ParserInfo>> parsedSeries)
|
||||||
|
{
|
||||||
|
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).Result;
|
||||||
|
|
||||||
|
// First, remove any series that are not in parsedSeries list
|
||||||
|
var foundSeries = parsedSeries.Select(s => Parser.Parser.Normalize(s.Key)).ToList();
|
||||||
|
var missingSeries = library.Series.Where(existingSeries =>
|
||||||
|
!foundSeries.Contains(existingSeries.NormalizedName) || !parsedSeries.ContainsKey(existingSeries.Name) ||
|
||||||
|
!parsedSeries.ContainsKey(existingSeries.OriginalName));
|
||||||
|
var removeCount = 0;
|
||||||
|
foreach (var existingSeries in missingSeries)
|
||||||
|
{
|
||||||
|
library.Series?.Remove(existingSeries);
|
||||||
|
removeCount += 1;
|
||||||
|
}
|
||||||
|
_logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount);
|
||||||
|
|
||||||
|
// Add new series that have parsedInfos
|
||||||
|
foreach (var info in parsedSeries)
|
||||||
|
{
|
||||||
|
var existingSeries =
|
||||||
|
library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(info.Key)) ??
|
||||||
|
new Series()
|
||||||
|
{
|
||||||
|
Name = info.Key,
|
||||||
|
OriginalName = info.Key,
|
||||||
|
NormalizedName = Parser.Parser.Normalize(info.Key),
|
||||||
|
SortName = info.Key,
|
||||||
|
Summary = "",
|
||||||
|
Volumes = new List<Volume>()
|
||||||
|
};
|
||||||
|
existingSeries.NormalizedName = Parser.Parser.Normalize(info.Key);
|
||||||
|
|
||||||
|
if (existingSeries.Id == 0)
|
||||||
|
{
|
||||||
|
library.Series.Add(existingSeries);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
|
||||||
|
foreach (var existingSeries in library.Series)
|
||||||
|
{
|
||||||
|
_logger.LogInformation("Processing series {SeriesName}", existingSeries.Name);
|
||||||
|
UpdateVolumes2(existingSeries, parsedSeries[existingSeries.Name].ToArray());
|
||||||
|
existingSeries.Pages = existingSeries.Volumes.Sum(v => v.Pages);
|
||||||
|
_metadataService.UpdateMetadata(existingSeries, _forceUpdate);
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void UpdateVolumes2(Series series, ParserInfo[] parsedInfos)
|
||||||
|
{
|
||||||
|
var startingVolumeCount = series.Volumes.Count;
|
||||||
|
// Add new volumes
|
||||||
|
foreach (var info in parsedInfos)
|
||||||
|
{
|
||||||
|
var volume = series.Volumes.SingleOrDefault(s => s.Name == info.Volumes) ?? new Volume()
|
||||||
|
{
|
||||||
|
Name = info.Volumes,
|
||||||
|
Number = (int) Parser.Parser.MinimumNumberFromRange(info.Volumes),
|
||||||
|
IsSpecial = false,
|
||||||
|
Chapters = new List<Chapter>()
|
||||||
|
};
|
||||||
|
volume.IsSpecial = volume.Number == 0;
|
||||||
|
|
||||||
|
UpdateChapters2(volume, parsedInfos.Where(p => p.Volumes == volume.Name).ToArray());
|
||||||
|
volume.Pages = volume.Chapters.Sum(c => c.Pages);
|
||||||
|
_metadataService.UpdateMetadata(volume, _forceUpdate);
|
||||||
|
|
||||||
|
if (volume.Id == 0)
|
||||||
|
{
|
||||||
|
series.Volumes.Add(volume);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove existing volumes that aren't in parsedInfos and volumes that have no chapters
|
||||||
|
var existingVolumes = series.Volumes.ToList();
|
||||||
|
foreach (var volume in existingVolumes)
|
||||||
|
{
|
||||||
|
// I can't remove based on chapter count as I haven't updated Chapters || volume.Chapters.Count == 0
|
||||||
|
var hasInfo = parsedInfos.Any(v => v.Volumes == volume.Name);
|
||||||
|
if (!hasInfo)
|
||||||
|
{
|
||||||
|
series.Volumes.Remove(volume);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update each volume with Chapters
|
||||||
|
// foreach (var volume in series.Volumes)
|
||||||
|
// {
|
||||||
|
// UpdateChapters2(volume, parsedInfos.Where(p => p.Volumes == volume.Name).ToArray());
|
||||||
|
// volume.Pages = volume.Chapters.Sum(c => c.Pages);
|
||||||
|
// _metadataService
|
||||||
|
// }
|
||||||
|
|
||||||
|
_logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
|
||||||
|
series.Name, startingVolumeCount, series.Volumes.Count);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void UpdateChapters2(Volume volume, ParserInfo[] parsedInfos)
|
||||||
|
{
|
||||||
|
var startingChapters = volume.Chapters.Count;
|
||||||
|
// Add new chapters
|
||||||
|
foreach (var info in parsedInfos)
|
||||||
|
{
|
||||||
|
var chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters) ?? new Chapter()
|
||||||
|
{
|
||||||
|
Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "",
|
||||||
|
Range = info.Chapters,
|
||||||
|
Files = new List<MangaFile>()
|
||||||
|
};
|
||||||
|
|
||||||
|
chapter.Files = new List<MangaFile>();
|
||||||
|
|
||||||
|
if (chapter.Id == 0)
|
||||||
|
{
|
||||||
|
volume.Chapters.Add(chapter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add files
|
||||||
|
foreach (var info in parsedInfos)
|
||||||
|
{
|
||||||
|
var chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters);
|
||||||
|
if (chapter == null) continue;
|
||||||
|
// I need to reset Files for the first time, hence this work should be done in a spearate loop
|
||||||
|
AddOrUpdateFileForChapter(chapter, info);
|
||||||
|
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
|
||||||
|
chapter.Range = info.Chapters;
|
||||||
|
chapter.Pages = chapter.Files.Sum(f => f.NumberOfPages);
|
||||||
|
_metadataService.UpdateMetadata(chapter, _forceUpdate);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Remove chapters that aren't in parsedInfos or have no files linked
|
||||||
|
var existingChapters = volume.Chapters.ToList();
|
||||||
|
foreach (var existingChapter in existingChapters)
|
||||||
|
{
|
||||||
|
var hasInfo = parsedInfos.Any(v => v.Chapters == existingChapter.Range);
|
||||||
|
if (!hasInfo || !existingChapter.Files.Any())
|
||||||
|
{
|
||||||
|
volume.Chapters.Remove(existingChapter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_logger.LogDebug("Updated chapters from {StartingChaptersCount} to {ChapterCount}",
|
||||||
|
startingChapters, volume.Chapters.Count);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
protected internal void UpsertSeries(Library library, Dictionary<string, List<ParserInfo>> parsedSeries,
|
protected internal void UpsertSeries(Library library, Dictionary<string, List<ParserInfo>> parsedSeries,
|
||||||
List<Series> allSeries)
|
List<Series> allSeries)
|
||||||
{
|
{
|
||||||
// NOTE: This is a great point to break the parsing into threads and join back. Each thread can take X series.
|
// NOTE: This is a great point to break the parsing into threads and join back. Each thread can take X series.
|
||||||
var foundSeries = parsedSeries.Keys.ToList();
|
|
||||||
_logger.LogDebug($"Found {foundSeries} series.");
|
|
||||||
foreach (var seriesKey in parsedSeries.Keys)
|
foreach (var seriesKey in parsedSeries.Keys)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
// TODO: I don't need library here. It will always pull from allSeries
|
var mangaSeries = allSeries.SingleOrDefault(s => Parser.Parser.Normalize(s.Name) == Parser.Parser.Normalize(seriesKey)) ?? new Series
|
||||||
var mangaSeries = ExistingOrDefault(library, allSeries, seriesKey) ?? new Series
|
|
||||||
{
|
{
|
||||||
Name = seriesKey,
|
Name = seriesKey,
|
||||||
OriginalName = seriesKey,
|
OriginalName = seriesKey,
|
||||||
@ -184,13 +344,13 @@ namespace API.Services
|
|||||||
|
|
||||||
UpdateSeries(ref mangaSeries, parsedSeries[seriesKey].ToArray());
|
UpdateSeries(ref mangaSeries, parsedSeries[seriesKey].ToArray());
|
||||||
if (library.Series.Any(s => Parser.Parser.Normalize(s.Name) == mangaSeries.NormalizedName)) continue;
|
if (library.Series.Any(s => Parser.Parser.Normalize(s.Name) == mangaSeries.NormalizedName)) continue;
|
||||||
_logger.LogInformation($"Added series {mangaSeries.Name}");
|
_logger.LogInformation("Added series {SeriesName}", mangaSeries.Name);
|
||||||
library.Series.Add(mangaSeries);
|
library.Series.Add(mangaSeries);
|
||||||
|
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
_logger.LogError(ex, $"There was an error during scanning of library. {seriesKey} will be skipped.");
|
_logger.LogError(ex, "There was an error during scanning of library. {SeriesName} will be skipped", seriesKey);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -202,19 +362,22 @@ namespace API.Services
|
|||||||
|
|
||||||
private void RemoveSeriesNotOnDisk(IEnumerable<Series> allSeries, Dictionary<string, List<ParserInfo>> series, Library library)
|
private void RemoveSeriesNotOnDisk(IEnumerable<Series> allSeries, Dictionary<string, List<ParserInfo>> series, Library library)
|
||||||
{
|
{
|
||||||
_logger.LogInformation("Removing any series that are no longer on disk.");
|
// TODO: Need to also remove any series that no longer have Volumes.
|
||||||
|
_logger.LogInformation("Removing any series that are no longer on disk");
|
||||||
var count = 0;
|
var count = 0;
|
||||||
var foundSeries = series.Select(s => Parser.Parser.Normalize(s.Key)).ToList();
|
var foundSeries = series.Select(s => Parser.Parser.Normalize(s.Key)).ToList();
|
||||||
var missingSeries = allSeries.Where(existingSeries =>
|
var missingSeries = allSeries.Where(existingSeries =>
|
||||||
!foundSeries.Contains(existingSeries.NormalizedName) || !series.ContainsKey(existingSeries.Name) ||
|
!foundSeries.Contains(existingSeries.NormalizedName) || !series.ContainsKey(existingSeries.Name) ||
|
||||||
!series.ContainsKey(existingSeries.OriginalName));
|
!series.ContainsKey(existingSeries.OriginalName));
|
||||||
|
|
||||||
foreach (var existingSeries in missingSeries)
|
foreach (var existingSeries in missingSeries)
|
||||||
{
|
{
|
||||||
// Delete series, there is no file to backup any longer.
|
// Delete series, there is no file to backup any longer.
|
||||||
library.Series?.Remove(existingSeries);
|
library.Series?.Remove(existingSeries);
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
_logger.LogInformation($"Removed {count} series that are no longer on disk");
|
|
||||||
|
_logger.LogInformation("Removed {Count} series that are no longer on disk", count);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void RemoveVolumesNotOnDisk(Series series)
|
private void RemoveVolumesNotOnDisk(Series series)
|
||||||
@ -226,7 +389,6 @@ namespace API.Services
|
|||||||
if (!chapters.Any())
|
if (!chapters.Any())
|
||||||
{
|
{
|
||||||
series.Volumes.Remove(volume);
|
series.Volumes.Remove(volume);
|
||||||
//chapters.Select(c => c.Files).Any()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -264,7 +426,7 @@ namespace API.Services
|
|||||||
|
|
||||||
if (info == null)
|
if (info == null)
|
||||||
{
|
{
|
||||||
_logger.LogWarning($"Could not parse from {path}");
|
_logger.LogWarning("Could not parse from {Path}", path);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -273,15 +435,15 @@ namespace API.Services
|
|||||||
|
|
||||||
private void UpdateSeries(ref Series series, ParserInfo[] infos)
|
private void UpdateSeries(ref Series series, ParserInfo[] infos)
|
||||||
{
|
{
|
||||||
_logger.LogInformation($"Updating entries for {series.Name}. {infos.Length} related files.");
|
_logger.LogInformation("Updating entries for {series.Name}. {infos.Length} related files", series.Name, infos.Length);
|
||||||
|
|
||||||
|
|
||||||
UpdateVolumes(series, infos);
|
UpdateVolumes(series, infos);
|
||||||
RemoveVolumesNotOnDisk(series);
|
//RemoveVolumesNotOnDisk(series);
|
||||||
series.Pages = series.Volumes.Sum(v => v.Pages);
|
//series.Pages = series.Volumes.Sum(v => v.Pages);
|
||||||
|
|
||||||
_metadataService.UpdateMetadata(series, _forceUpdate);
|
_metadataService.UpdateMetadata(series, _forceUpdate);
|
||||||
_logger.LogDebug($"Created {series.Volumes.Count} volumes on {series.Name}");
|
_logger.LogDebug("Created {series.Volumes.Count} volumes on {series.Name}", series.Volumes.Count, series.Name);
|
||||||
}
|
}
|
||||||
|
|
||||||
private MangaFile CreateMangaFile(ParserInfo info)
|
private MangaFile CreateMangaFile(ParserInfo info)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user