mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-06-02 05:04:14 -04:00
* Added Last Folder Scanned time to series info modal. Tweaked the info event detail modal to have a primary and thus be auto-dismissable * Added an error event when multiple series are found in processing a series. * Fixed a bug where a series could get stuck with other series due to a bad select query. Started adding the force flag hook for the UI and designing the confirm. Confirm service now also has ability to hide the close button. Updated error events and logging in the loop, to be more informative * Fixed a bug where confirm service wasn't showing the proper body content. * Hooked up force scan series * refresh metadata now has force update * Fixed up the messaging with the prompt on scan, hooked it up properly in the scan library to avoid the check if the whole library needs to even be scanned. Fixed a bug where NormalizedLocalizedName wasn't being calculated on new entities. Started adding unit tests for this problematic repo method. * Fixed a bug where we updated NormalizedLocalizedName before we set it. * Send an info to the UI when series are spread between multiple library level folders. * Added some logger output when there are no files found in a folder. Return early if there are no files found, so we can avoid some small loops of code. * Fixed an issue where multiple series in a folder with localized series would cause unintended grouping. This is not supported and hence we will warn them and allow the bad grouping. * Added a case where scan series fails due to the folder being removed. We will now log an error * Normalize paths when finding the highest directory till root. * Fixed an issue with Scan Series where changing a series' folder to a different path but the original series folder existed with another series in it, would cause the series to not be deleted. * Fixed some bugs around specials causing a series merge issue on scan series. * Removed a bug marker * Cleaned up some of the scan loop and removed a test I don't need. * Remove any prompts for force flow, it doesn't work well. Leave the API as is though. * Fixed up a check for duplicate ScanLibrary calls
799 lines
32 KiB
C#
799 lines
32 KiB
C#
using System;
|
|
using System.Collections.Generic;
|
|
using System.Collections.Immutable;
|
|
using System.Diagnostics;
|
|
using System.Linq;
|
|
using System.Threading.Tasks;
|
|
using API.Data;
|
|
using API.Data.Metadata;
|
|
using API.Entities;
|
|
using API.Entities.Enums;
|
|
using API.Extensions;
|
|
using API.Helpers;
|
|
using API.Parser;
|
|
using API.Services.Tasks.Metadata;
|
|
using API.SignalR;
|
|
using Hangfire;
|
|
using Microsoft.Extensions.Logging;
|
|
|
|
namespace API.Services.Tasks.Scanner;
|
|
|
|
public interface IProcessSeries
|
|
{
|
|
/// <summary>
|
|
/// Do not allow this Prime to be invoked by multiple threads. It will break the DB.
|
|
/// </summary>
|
|
/// <returns></returns>
|
|
Task Prime();
|
|
Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library);
|
|
void EnqueuePostSeriesProcessTasks(int libraryId, int seriesId, bool forceUpdate = false);
|
|
}
|
|
|
|
/// <summary>
|
|
/// All code needed to Update a Series from a Scan action
|
|
/// </summary>
|
|
public class ProcessSeries : IProcessSeries
|
|
{
|
|
private readonly IUnitOfWork _unitOfWork;
|
|
private readonly ILogger<ProcessSeries> _logger;
|
|
private readonly IEventHub _eventHub;
|
|
private readonly IDirectoryService _directoryService;
|
|
private readonly ICacheHelper _cacheHelper;
|
|
private readonly IReadingItemService _readingItemService;
|
|
private readonly IFileService _fileService;
|
|
private readonly IMetadataService _metadataService;
|
|
private readonly IWordCountAnalyzerService _wordCountAnalyzerService;
|
|
|
|
private IList<Genre> _genres;
|
|
private IList<Person> _people;
|
|
private IList<Tag> _tags;
|
|
|
|
|
|
|
|
public ProcessSeries(IUnitOfWork unitOfWork, ILogger<ProcessSeries> logger, IEventHub eventHub,
|
|
IDirectoryService directoryService, ICacheHelper cacheHelper, IReadingItemService readingItemService,
|
|
IFileService fileService, IMetadataService metadataService, IWordCountAnalyzerService wordCountAnalyzerService)
|
|
{
|
|
_unitOfWork = unitOfWork;
|
|
_logger = logger;
|
|
_eventHub = eventHub;
|
|
_directoryService = directoryService;
|
|
_cacheHelper = cacheHelper;
|
|
_readingItemService = readingItemService;
|
|
_fileService = fileService;
|
|
_metadataService = metadataService;
|
|
_wordCountAnalyzerService = wordCountAnalyzerService;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Invoke this before processing any series, just once to prime all the needed data during a scan
|
|
/// </summary>
|
|
public async Task Prime()
|
|
{
|
|
_genres = await _unitOfWork.GenreRepository.GetAllGenresAsync();
|
|
_people = await _unitOfWork.PersonRepository.GetAllPeople();
|
|
_tags = await _unitOfWork.TagRepository.GetAllTagsAsync();
|
|
}
|
|
|
|
public async Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library)
|
|
{
|
|
if (!parsedInfos.Any()) return;
|
|
|
|
var seriesAdded = false;
|
|
var scanWatch = Stopwatch.StartNew();
|
|
var seriesName = parsedInfos.First().Series;
|
|
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
|
MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Updated, seriesName));
|
|
_logger.LogInformation("[ScannerService] Beginning series update on {SeriesName}", seriesName);
|
|
|
|
// Check if there is a Series
|
|
var firstInfo = parsedInfos.First();
|
|
Series series;
|
|
try
|
|
{
|
|
series =
|
|
await _unitOfWork.SeriesRepository.GetFullSeriesByAnyName(firstInfo.Series, firstInfo.LocalizedSeries,
|
|
library.Id);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "There was an exception finding existing series for {SeriesName} with Localized name of {LocalizedName} for library {LibraryId}. This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan", firstInfo.Series, firstInfo.LocalizedSeries, library.Id);
|
|
await _eventHub.SendMessageAsync(MessageFactory.Error,
|
|
MessageFactory.ErrorEvent($"There was an exception finding existing series for {firstInfo.Series} with Localized name of {firstInfo.LocalizedSeries} for library {library.Id}",
|
|
"This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan."));
|
|
return;
|
|
}
|
|
|
|
if (series == null)
|
|
{
|
|
seriesAdded = true;
|
|
series = DbFactory.Series(firstInfo.Series, firstInfo.LocalizedSeries);
|
|
}
|
|
|
|
if (series.LibraryId == 0) series.LibraryId = library.Id;
|
|
|
|
try
|
|
{
|
|
_logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
|
|
|
|
UpdateVolumes(series, parsedInfos);
|
|
series.Pages = series.Volumes.Sum(v => v.Pages);
|
|
|
|
series.NormalizedName = Parser.Parser.Normalize(series.Name);
|
|
series.OriginalName ??= parsedInfos[0].Series;
|
|
if (series.Format == MangaFormat.Unknown)
|
|
{
|
|
series.Format = parsedInfos[0].Format;
|
|
}
|
|
|
|
if (string.IsNullOrEmpty(series.SortName))
|
|
{
|
|
series.SortName = series.Name;
|
|
}
|
|
if (!series.SortNameLocked)
|
|
{
|
|
series.SortName = series.Name;
|
|
if (!string.IsNullOrEmpty(parsedInfos[0].SeriesSort))
|
|
{
|
|
series.SortName = parsedInfos[0].SeriesSort;
|
|
}
|
|
}
|
|
|
|
// parsedInfos[0] is not the first volume or chapter. We need to find it
|
|
var localizedSeries = parsedInfos.Select(p => p.LocalizedSeries).FirstOrDefault(p => !string.IsNullOrEmpty(p));
|
|
if (!series.LocalizedNameLocked && !string.IsNullOrEmpty(localizedSeries))
|
|
{
|
|
series.LocalizedName = localizedSeries;
|
|
series.NormalizedLocalizedName = Parser.Parser.Normalize(series.LocalizedName);
|
|
}
|
|
|
|
// Update series FolderPath here (TODO: Move this into it's own private method)
|
|
var seriesDirs = _directoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(l => l.Path), parsedInfos.Select(f => f.FullFilePath).ToList());
|
|
if (seriesDirs.Keys.Count == 0)
|
|
{
|
|
_logger.LogCritical("Scan Series has files spread outside a main series folder. This has negative performance effects. Please ensure all series are under a single folder from library");
|
|
await _eventHub.SendMessageAsync(MessageFactory.Info,
|
|
MessageFactory.InfoEvent($"{series.Name} has files spread outside a single series folder",
|
|
"This has negative performance effects. Please ensure all series are under a single folder from library"));
|
|
}
|
|
else
|
|
{
|
|
// Don't save FolderPath if it's a library Folder
|
|
if (!library.Folders.Select(f => f.Path).Contains(seriesDirs.Keys.First()))
|
|
{
|
|
series.FolderPath = Parser.Parser.NormalizePath(seriesDirs.Keys.First());
|
|
}
|
|
}
|
|
|
|
series.Metadata ??= DbFactory.SeriesMetadata(new List<CollectionTag>());
|
|
UpdateSeriesMetadata(series, library.Type);
|
|
|
|
series.LastFolderScanned = DateTime.Now;
|
|
_unitOfWork.SeriesRepository.Attach(series);
|
|
|
|
try
|
|
{
|
|
await _unitOfWork.CommitAsync();
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
await _unitOfWork.RollbackAsync();
|
|
_logger.LogCritical(ex, "[ScannerService] There was an issue writing to the for series {@SeriesName}", series);
|
|
|
|
await _eventHub.SendMessageAsync(MessageFactory.Error,
|
|
MessageFactory.ErrorEvent($"There was an issue writing to the DB for Series {series}",
|
|
string.Empty));
|
|
}
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "[ScannerService] There was an exception updating series for {SeriesName}", series.Name);
|
|
}
|
|
|
|
if (seriesAdded)
|
|
{
|
|
await _eventHub.SendMessageAsync(MessageFactory.SeriesAdded,
|
|
MessageFactory.SeriesAddedEvent(series.Id, series.Name, series.LibraryId));
|
|
}
|
|
|
|
_logger.LogInformation("[ScannerService] Finished series update on {SeriesName} in {Milliseconds} ms", seriesName, scanWatch.ElapsedMilliseconds);
|
|
EnqueuePostSeriesProcessTasks(series.LibraryId, series.Id, false);
|
|
}
|
|
|
|
public void EnqueuePostSeriesProcessTasks(int libraryId, int seriesId, bool forceUpdate = false)
|
|
{
|
|
BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForSeries(libraryId, seriesId, forceUpdate));
|
|
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(libraryId, seriesId, forceUpdate));
|
|
}
|
|
|
|
private static void UpdateSeriesMetadata(Series series, LibraryType libraryType)
|
|
{
|
|
var isBook = libraryType == LibraryType.Book;
|
|
var firstChapter = SeriesService.GetFirstChapterForMetadata(series, isBook);
|
|
|
|
var firstFile = firstChapter?.Files.FirstOrDefault();
|
|
if (firstFile == null) return;
|
|
if (Parser.Parser.IsPdf(firstFile.FilePath)) return;
|
|
|
|
var chapters = series.Volumes.SelectMany(volume => volume.Chapters).ToList();
|
|
|
|
// Update Metadata based on Chapter metadata
|
|
series.Metadata.ReleaseYear = chapters.Min(c => c.ReleaseDate.Year);
|
|
|
|
if (series.Metadata.ReleaseYear < 1000)
|
|
{
|
|
// Not a valid year, default to 0
|
|
series.Metadata.ReleaseYear = 0;
|
|
}
|
|
|
|
// Set the AgeRating as highest in all the comicInfos
|
|
if (!series.Metadata.AgeRatingLocked) series.Metadata.AgeRating = chapters.Max(chapter => chapter.AgeRating);
|
|
|
|
series.Metadata.TotalCount = chapters.Max(chapter => chapter.TotalCount);
|
|
series.Metadata.MaxCount = chapters.Max(chapter => chapter.Count);
|
|
// To not have to rely completely on ComicInfo, try to parse out if the series is complete by checking parsed filenames as well.
|
|
if (series.Metadata.MaxCount != series.Metadata.TotalCount)
|
|
{
|
|
var maxVolume = series.Volumes.Max(v => (int) Parser.Parser.MaxNumberFromRange(v.Name));
|
|
var maxChapter = chapters.Max(c => (int) Parser.Parser.MaxNumberFromRange(c.Range));
|
|
if (maxVolume == series.Metadata.TotalCount) series.Metadata.MaxCount = maxVolume;
|
|
else if (maxChapter == series.Metadata.TotalCount) series.Metadata.MaxCount = maxChapter;
|
|
}
|
|
|
|
|
|
if (!series.Metadata.PublicationStatusLocked)
|
|
{
|
|
series.Metadata.PublicationStatus = PublicationStatus.OnGoing;
|
|
if (series.Metadata.MaxCount >= series.Metadata.TotalCount && series.Metadata.TotalCount > 0)
|
|
{
|
|
series.Metadata.PublicationStatus = PublicationStatus.Completed;
|
|
} else if (series.Metadata.TotalCount > 0 && series.Metadata.MaxCount > 0)
|
|
{
|
|
series.Metadata.PublicationStatus = PublicationStatus.Ended;
|
|
}
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(firstChapter.Summary) && !series.Metadata.SummaryLocked)
|
|
{
|
|
series.Metadata.Summary = firstChapter.Summary;
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(firstChapter.Language) && !series.Metadata.LanguageLocked)
|
|
{
|
|
series.Metadata.Language = firstChapter.Language;
|
|
}
|
|
|
|
// Handle People
|
|
foreach (var chapter in chapters)
|
|
{
|
|
if (!series.Metadata.WriterLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Writer))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.CoverArtistLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.CoverArtist))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.PublisherLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Publisher))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.CharacterLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Character))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.ColoristLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Colorist))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.EditorLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Editor))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.InkerLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Inker))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.LettererLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Letterer))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.PencillerLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Penciller))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.TranslatorLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Translator))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.TagsLocked)
|
|
{
|
|
foreach (var tag in chapter.Tags)
|
|
{
|
|
TagHelper.AddTagIfNotExists(series.Metadata.Tags, tag);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.GenresLocked)
|
|
{
|
|
foreach (var genre in chapter.Genres)
|
|
{
|
|
GenreHelper.AddGenreIfNotExists(series.Metadata.Genres, genre);
|
|
}
|
|
}
|
|
}
|
|
|
|
// NOTE: The issue here is that people is just from chapter, but series metadata might already have some people on it
|
|
// I might be able to filter out people that are in locked fields?
|
|
var people = chapters.SelectMany(c => c.People).ToList();
|
|
PersonHelper.KeepOnlySamePeopleBetweenLists(series.Metadata.People,
|
|
people, person =>
|
|
{
|
|
switch (person.Role)
|
|
{
|
|
case PersonRole.Writer:
|
|
if (!series.Metadata.WriterLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Penciller:
|
|
if (!series.Metadata.PencillerLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Inker:
|
|
if (!series.Metadata.InkerLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Colorist:
|
|
if (!series.Metadata.ColoristLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Letterer:
|
|
if (!series.Metadata.LettererLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.CoverArtist:
|
|
if (!series.Metadata.CoverArtistLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Editor:
|
|
if (!series.Metadata.EditorLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Publisher:
|
|
if (!series.Metadata.PublisherLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Character:
|
|
if (!series.Metadata.CharacterLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Translator:
|
|
if (!series.Metadata.TranslatorLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
default:
|
|
series.Metadata.People.Remove(person);
|
|
break;
|
|
}
|
|
});
|
|
}
|
|
|
|
private void UpdateVolumes(Series series, IList<ParserInfo> parsedInfos)
|
|
{
|
|
var startingVolumeCount = series.Volumes.Count;
|
|
// Add new volumes and update chapters per volume
|
|
var distinctVolumes = parsedInfos.DistinctVolumes();
|
|
_logger.LogDebug("[ScannerService] Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name);
|
|
foreach (var volumeNumber in distinctVolumes)
|
|
{
|
|
var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
|
|
if (volume == null)
|
|
{
|
|
volume = DbFactory.Volume(volumeNumber);
|
|
volume.SeriesId = series.Id;
|
|
series.Volumes.Add(volume);
|
|
_unitOfWork.VolumeRepository.Add(volume);
|
|
}
|
|
|
|
volume.Name = volumeNumber;
|
|
|
|
_logger.LogDebug("[ScannerService] Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
|
|
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
|
|
UpdateChapters(series, volume, infos);
|
|
volume.Pages = volume.Chapters.Sum(c => c.Pages);
|
|
|
|
// Update all the metadata on the Chapters
|
|
foreach (var chapter in volume.Chapters)
|
|
{
|
|
var firstFile = chapter.Files.MinBy(x => x.Chapter);
|
|
if (firstFile == null || _cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, false, firstFile)) continue;
|
|
try
|
|
{
|
|
var firstChapterInfo = infos.SingleOrDefault(i => i.FullFilePath.Equals(firstFile.FilePath));
|
|
UpdateChapterFromComicInfo(chapter, firstChapterInfo?.ComicInfo);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "There was some issue when updating chapter's metadata");
|
|
}
|
|
}
|
|
}
|
|
|
|
// Remove existing volumes that aren't in parsedInfos
|
|
var nonDeletedVolumes = series.Volumes.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.Name)).ToList();
|
|
if (series.Volumes.Count != nonDeletedVolumes.Count)
|
|
{
|
|
_logger.LogDebug("[ScannerService] Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name",
|
|
(series.Volumes.Count - nonDeletedVolumes.Count), series.Name);
|
|
var deletedVolumes = series.Volumes.Except(nonDeletedVolumes);
|
|
foreach (var volume in deletedVolumes)
|
|
{
|
|
var file = volume.Chapters.FirstOrDefault()?.Files?.FirstOrDefault()?.FilePath ?? "";
|
|
if (!string.IsNullOrEmpty(file) && _directoryService.FileSystem.File.Exists(file))
|
|
{
|
|
_logger.LogError(
|
|
"[ScannerService] Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}",
|
|
file);
|
|
}
|
|
|
|
_logger.LogDebug("[ScannerService] Removed {SeriesName} - Volume {Volume}: {File}", series.Name, volume.Name, file);
|
|
}
|
|
|
|
series.Volumes = nonDeletedVolumes;
|
|
}
|
|
|
|
_logger.LogDebug("[ScannerService] Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
|
|
series.Name, startingVolumeCount, series.Volumes.Count);
|
|
}
|
|
|
|
private void UpdateChapters(Series series, Volume volume, IList<ParserInfo> parsedInfos)
|
|
{
|
|
// Add new chapters
|
|
foreach (var info in parsedInfos)
|
|
{
|
|
// Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0
|
|
// also are treated like specials for UI grouping.
|
|
Chapter chapter;
|
|
try
|
|
{
|
|
chapter = volume.Chapters.GetChapterByRange(info);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "{FileName} mapped as '{Series} - Vol {Volume} Ch {Chapter}' is a duplicate, skipping", info.FullFilePath, info.Series, info.Volumes, info.Chapters);
|
|
continue;
|
|
}
|
|
|
|
if (chapter == null)
|
|
{
|
|
_logger.LogDebug(
|
|
"[ScannerService] Adding new chapter, {Series} - Vol {Volume} Ch {Chapter}", info.Series, info.Volumes, info.Chapters);
|
|
chapter = DbFactory.Chapter(info);
|
|
volume.Chapters.Add(chapter);
|
|
series.LastChapterAdded = DateTime.Now;
|
|
}
|
|
else
|
|
{
|
|
chapter.UpdateFrom(info);
|
|
}
|
|
|
|
if (chapter == null) continue;
|
|
// Add files
|
|
var specialTreatment = info.IsSpecialInfo();
|
|
AddOrUpdateFileForChapter(chapter, info);
|
|
chapter.Number = Parser.Parser.MinNumberFromRange(info.Chapters) + string.Empty;
|
|
chapter.Range = specialTreatment ? info.Filename : info.Chapters;
|
|
}
|
|
|
|
|
|
// Remove chapters that aren't in parsedInfos or have no files linked
|
|
var existingChapters = volume.Chapters.ToList();
|
|
foreach (var existingChapter in existingChapters)
|
|
{
|
|
if (existingChapter.Files.Count == 0 || !parsedInfos.HasInfo(existingChapter))
|
|
{
|
|
_logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series);
|
|
volume.Chapters.Remove(existingChapter);
|
|
}
|
|
else
|
|
{
|
|
// Ensure we remove any files that no longer exist AND order
|
|
existingChapter.Files = existingChapter.Files
|
|
.Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath))
|
|
.OrderByNatural(f => f.FilePath).ToList();
|
|
existingChapter.Pages = existingChapter.Files.Sum(f => f.Pages);
|
|
}
|
|
}
|
|
}
|
|
|
|
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info)
|
|
{
|
|
chapter.Files ??= new List<MangaFile>();
|
|
var existingFile = chapter.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
|
|
if (existingFile != null)
|
|
{
|
|
existingFile.Format = info.Format;
|
|
if (!_fileService.HasFileBeenModifiedSince(existingFile.FilePath, existingFile.LastModified) && existingFile.Pages != 0) return;
|
|
existingFile.Pages = _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format);
|
|
// We skip updating DB here with last modified time so that metadata refresh can do it
|
|
}
|
|
else
|
|
{
|
|
var file = DbFactory.MangaFile(info.FullFilePath, info.Format, _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format));
|
|
if (file == null) return;
|
|
|
|
chapter.Files.Add(file);
|
|
}
|
|
}
|
|
|
|
#nullable enable
|
|
private void UpdateChapterFromComicInfo(Chapter chapter, ComicInfo? info)
|
|
{
|
|
var firstFile = chapter.Files.MinBy(x => x.Chapter);
|
|
if (firstFile == null ||
|
|
_cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, false, firstFile)) return;
|
|
|
|
var comicInfo = info;
|
|
if (info == null)
|
|
{
|
|
comicInfo = _readingItemService.GetComicInfo(firstFile.FilePath);
|
|
}
|
|
|
|
if (comicInfo == null) return;
|
|
_logger.LogDebug("[ScannerService] Read ComicInfo for {File}", firstFile.FilePath);
|
|
|
|
chapter.AgeRating = ComicInfo.ConvertAgeRatingToEnum(comicInfo.AgeRating);
|
|
|
|
if (!string.IsNullOrEmpty(comicInfo.Title))
|
|
{
|
|
chapter.TitleName = comicInfo.Title.Trim();
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(comicInfo.Summary))
|
|
{
|
|
chapter.Summary = comicInfo.Summary;
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(comicInfo.LanguageISO))
|
|
{
|
|
chapter.Language = comicInfo.LanguageISO;
|
|
}
|
|
|
|
if (comicInfo.Count > 0)
|
|
{
|
|
chapter.TotalCount = comicInfo.Count;
|
|
}
|
|
|
|
// This needs to check against both Number and Volume to calculate Count
|
|
if (!string.IsNullOrEmpty(comicInfo.Number) && float.Parse(comicInfo.Number) > 0)
|
|
{
|
|
chapter.Count = (int) Math.Floor(float.Parse(comicInfo.Number));
|
|
}
|
|
if (!string.IsNullOrEmpty(comicInfo.Volume) && float.Parse(comicInfo.Volume) > 0)
|
|
{
|
|
chapter.Count = Math.Max(chapter.Count, (int) Math.Floor(float.Parse(comicInfo.Volume)));
|
|
}
|
|
|
|
void AddPerson(Person person)
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(chapter.People, person);
|
|
}
|
|
|
|
void AddGenre(Genre genre)
|
|
{
|
|
//chapter.Genres.Add(genre);
|
|
GenreHelper.AddGenreIfNotExists(chapter.Genres, genre);
|
|
}
|
|
|
|
void AddTag(Tag tag, bool added)
|
|
{
|
|
//chapter.Tags.Add(tag);
|
|
TagHelper.AddTagIfNotExists(chapter.Tags, tag);
|
|
}
|
|
|
|
|
|
if (comicInfo.Year > 0)
|
|
{
|
|
var day = Math.Max(comicInfo.Day, 1);
|
|
var month = Math.Max(comicInfo.Month, 1);
|
|
chapter.ReleaseDate = DateTime.Parse($"{month}/{day}/{comicInfo.Year}");
|
|
}
|
|
|
|
var people = GetTagValues(comicInfo.Colorist);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Colorist);
|
|
UpdatePeople(people, PersonRole.Colorist,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.Characters);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Character);
|
|
UpdatePeople(people, PersonRole.Character,
|
|
AddPerson);
|
|
|
|
|
|
people = GetTagValues(comicInfo.Translator);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Translator);
|
|
UpdatePeople(people, PersonRole.Translator,
|
|
AddPerson);
|
|
|
|
|
|
people = GetTagValues(comicInfo.Writer);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Writer);
|
|
UpdatePeople(people, PersonRole.Writer,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.Editor);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Editor);
|
|
UpdatePeople(people, PersonRole.Editor,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.Inker);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Inker);
|
|
UpdatePeople(people, PersonRole.Inker,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.Letterer);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Letterer);
|
|
UpdatePeople(people, PersonRole.Letterer,
|
|
AddPerson);
|
|
|
|
|
|
people = GetTagValues(comicInfo.Penciller);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Penciller);
|
|
UpdatePeople(people, PersonRole.Penciller,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.CoverArtist);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.CoverArtist);
|
|
UpdatePeople(people, PersonRole.CoverArtist,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.Publisher);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Publisher);
|
|
UpdatePeople(people, PersonRole.Publisher,
|
|
AddPerson);
|
|
|
|
var genres = GetTagValues(comicInfo.Genre);
|
|
GenreHelper.KeepOnlySameGenreBetweenLists(chapter.Genres, genres.Select(g => DbFactory.Genre(g, false)).ToList());
|
|
UpdateGenre(genres, false,
|
|
AddGenre);
|
|
|
|
var tags = GetTagValues(comicInfo.Tags);
|
|
TagHelper.KeepOnlySameTagBetweenLists(chapter.Tags, tags.Select(t => DbFactory.Tag(t, false)).ToList());
|
|
UpdateTag(tags, false,
|
|
AddTag);
|
|
}
|
|
|
|
private static IList<string> GetTagValues(string comicInfoTagSeparatedByComma)
|
|
{
|
|
|
|
if (!string.IsNullOrEmpty(comicInfoTagSeparatedByComma))
|
|
{
|
|
return comicInfoTagSeparatedByComma.Split(",").Select(s => s.Trim()).ToList();
|
|
}
|
|
return ImmutableList<string>.Empty;
|
|
}
|
|
#nullable disable
|
|
|
|
/// <summary>
|
|
/// Given a list of all existing people, this will check the new names and roles and if it doesn't exist in allPeople, will create and
|
|
/// add an entry. For each person in name, the callback will be executed.
|
|
/// </summary>
|
|
/// <remarks>This does not remove people if an empty list is passed into names</remarks>
|
|
/// <remarks>This is used to add new people to a list without worrying about duplicating rows in the DB</remarks>
|
|
/// <param name="names"></param>
|
|
/// <param name="role"></param>
|
|
/// <param name="action"></param>
|
|
private void UpdatePeople(IEnumerable<string> names, PersonRole role, Action<Person> action)
|
|
{
|
|
|
|
var allPeopleTypeRole = _people.Where(p => p.Role == role).ToList();
|
|
|
|
foreach (var name in names)
|
|
{
|
|
var normalizedName = Parser.Parser.Normalize(name);
|
|
var person = allPeopleTypeRole.FirstOrDefault(p =>
|
|
p.NormalizedName.Equals(normalizedName));
|
|
if (person == null)
|
|
{
|
|
person = DbFactory.Person(name, role);
|
|
lock (_people)
|
|
{
|
|
_people.Add(person);
|
|
}
|
|
}
|
|
|
|
action(person);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
///
|
|
/// </summary>
|
|
/// <param name="names"></param>
|
|
/// <param name="isExternal"></param>
|
|
/// <param name="action"></param>
|
|
private void UpdateGenre(IEnumerable<string> names, bool isExternal, Action<Genre> action)
|
|
{
|
|
foreach (var name in names)
|
|
{
|
|
if (string.IsNullOrEmpty(name.Trim())) continue;
|
|
|
|
var normalizedName = Parser.Parser.Normalize(name);
|
|
var genre = _genres.FirstOrDefault(p =>
|
|
p.NormalizedTitle.Equals(normalizedName) && p.ExternalTag == isExternal);
|
|
if (genre == null)
|
|
{
|
|
genre = DbFactory.Genre(name, false);
|
|
lock (_genres)
|
|
{
|
|
_genres.Add(genre);
|
|
}
|
|
}
|
|
|
|
action(genre);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
///
|
|
/// </summary>
|
|
/// <param name="names"></param>
|
|
/// <param name="isExternal"></param>
|
|
/// <param name="action">Callback for every item. Will give said item back and a bool if item was added</param>
|
|
private void UpdateTag(IEnumerable<string> names, bool isExternal, Action<Tag, bool> action)
|
|
{
|
|
foreach (var name in names)
|
|
{
|
|
if (string.IsNullOrEmpty(name.Trim())) continue;
|
|
|
|
var added = false;
|
|
var normalizedName = Parser.Parser.Normalize(name);
|
|
|
|
var tag = _tags.FirstOrDefault(p =>
|
|
p.NormalizedTitle.Equals(normalizedName) && p.ExternalTag == isExternal);
|
|
if (tag == null)
|
|
{
|
|
added = true;
|
|
tag = DbFactory.Tag(name, false);
|
|
lock (_tags)
|
|
{
|
|
_tags.Add(tag);
|
|
}
|
|
}
|
|
|
|
action(tag, added);
|
|
}
|
|
}
|
|
|
|
}
|