mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-06-02 05:04:14 -04:00
* Refactored ResponseCache profiles into consts * Refactored code to use an extension method for getting user library ids. * Started server statistics, added a charting library, and added a table sort column (not finished) * Refactored code and have a fully working example of sortable headers. Still doesn't work with default sorting state, will work on that later. * Implemented file size, but it's too expensive, so commented out. * Added a migration to provide extension and length/size information in the DB to allow for faster stat apis. * Added the ability to force a library scan from library settings. * Refactored some apis to provide more of a file breakdown rather than just file size. * Working on visualization of file breakdown * Fixed the file breakdown visual * Fixed up 2 visualizations * Added back an api for member names, started work on top reads * Hooked up the other library types and username/days. * Preparing to remove top reads and refactor into Top users * Added LibraryId to AppUserProgress to help with complex lookups. * Added the new libraryId hook into some stats methods * Updated api methods to use libraryId for progress * More places where LibraryId is needed * Added some high level server stats * Got a ton done on server stats * Updated default theme (dark) to be the default root variables. This will allow user themes to override just what they want, rather than maintain their own css variables. * Implemented a monster query for top users by reading time. It's very slow and can be cleaned up likely. * Hooked up top reads. Code needs a big refactor. Handing off for Robbie treatment and I'll switch to User stats. * Implemented last 5 recently read series (broken) and added some basic css * Fixed recently read query * Cleanup the css a bit, Robbie we need you * More css love * Cleaned up DTOs that aren't needed anymore * Fixed top readers query * When calculating top readers, don't include read events where nothing is read (0 pages) * Hooked up the date into GetTopUsers * Hooked top readers up with days and refactored and cleaned up componets not used * Fixed up query * Started on a day by day breakdown, but going to take a break from stats. * Added a temp task to run some migration manually for stats to work * Ensure OPDS-PS uses new libraryId for progress reporting * Fixed a code smell * Adding some styling * adding more styles * Removed some debug stuff from user stats * Bump qs from 6.5.2 to 6.5.3 in /UI/Web Bumps [qs](https://github.com/ljharb/qs) from 6.5.2 to 6.5.3. - [Release notes](https://github.com/ljharb/qs/releases) - [Changelog](https://github.com/ljharb/qs/blob/main/CHANGELOG.md) - [Commits](https://github.com/ljharb/qs/compare/v6.5.2...v6.5.3) --- updated-dependencies: - dependency-name: qs dependency-type: indirect ... Signed-off-by: dependabot[bot] <support@github.com> * Tweaked some code for bad data cases * Refactored a chapter lookup to remove un-needed Volume join in 5 places across the code. * API push Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: Robbie Davis <robbie@therobbiedavis.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
829 lines
34 KiB
C#
829 lines
34 KiB
C#
using System;
|
|
using System.Collections.Generic;
|
|
using System.Collections.Immutable;
|
|
using System.Diagnostics;
|
|
using System.Linq;
|
|
using System.Threading;
|
|
using System.Threading.Tasks;
|
|
using API.Data;
|
|
using API.Data.Metadata;
|
|
using API.Entities;
|
|
using API.Entities.Enums;
|
|
using API.Extensions;
|
|
using API.Helpers;
|
|
using API.Parser;
|
|
using API.Services.Tasks.Metadata;
|
|
using API.SignalR;
|
|
using Hangfire;
|
|
using Kavita.Common;
|
|
using Microsoft.Extensions.Logging;
|
|
|
|
namespace API.Services.Tasks.Scanner;
|
|
|
|
public interface IProcessSeries
|
|
{
|
|
/// <summary>
|
|
/// Do not allow this Prime to be invoked by multiple threads. It will break the DB.
|
|
/// </summary>
|
|
/// <returns></returns>
|
|
Task Prime();
|
|
Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library, bool forceUpdate = false);
|
|
void EnqueuePostSeriesProcessTasks(int libraryId, int seriesId, bool forceUpdate = false);
|
|
}
|
|
|
|
/// <summary>
|
|
/// All code needed to Update a Series from a Scan action
|
|
/// </summary>
|
|
public class ProcessSeries : IProcessSeries
|
|
{
|
|
private readonly IUnitOfWork _unitOfWork;
|
|
private readonly ILogger<ProcessSeries> _logger;
|
|
private readonly IEventHub _eventHub;
|
|
private readonly IDirectoryService _directoryService;
|
|
private readonly ICacheHelper _cacheHelper;
|
|
private readonly IReadingItemService _readingItemService;
|
|
private readonly IFileService _fileService;
|
|
private readonly IMetadataService _metadataService;
|
|
private readonly IWordCountAnalyzerService _wordCountAnalyzerService;
|
|
|
|
private IList<Genre> _genres;
|
|
private IList<Person> _people;
|
|
private IList<Tag> _tags;
|
|
|
|
public ProcessSeries(IUnitOfWork unitOfWork, ILogger<ProcessSeries> logger, IEventHub eventHub,
|
|
IDirectoryService directoryService, ICacheHelper cacheHelper, IReadingItemService readingItemService,
|
|
IFileService fileService, IMetadataService metadataService, IWordCountAnalyzerService wordCountAnalyzerService)
|
|
{
|
|
_unitOfWork = unitOfWork;
|
|
_logger = logger;
|
|
_eventHub = eventHub;
|
|
_directoryService = directoryService;
|
|
_cacheHelper = cacheHelper;
|
|
_readingItemService = readingItemService;
|
|
_fileService = fileService;
|
|
_metadataService = metadataService;
|
|
_wordCountAnalyzerService = wordCountAnalyzerService;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Invoke this before processing any series, just once to prime all the needed data during a scan
|
|
/// </summary>
|
|
public async Task Prime()
|
|
{
|
|
_genres = await _unitOfWork.GenreRepository.GetAllGenresAsync();
|
|
_people = await _unitOfWork.PersonRepository.GetAllPeople();
|
|
_tags = await _unitOfWork.TagRepository.GetAllTagsAsync();
|
|
}
|
|
|
|
public async Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library, bool forceUpdate = false)
|
|
{
|
|
if (!parsedInfos.Any()) return;
|
|
|
|
var seriesAdded = false;
|
|
var scanWatch = Stopwatch.StartNew();
|
|
var seriesName = parsedInfos.First().Series;
|
|
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
|
MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Updated, seriesName));
|
|
_logger.LogInformation("[ScannerService] Beginning series update on {SeriesName}", seriesName);
|
|
|
|
// Check if there is a Series
|
|
var firstInfo = parsedInfos.First();
|
|
Series series;
|
|
try
|
|
{
|
|
series =
|
|
await _unitOfWork.SeriesRepository.GetFullSeriesByAnyName(firstInfo.Series, firstInfo.LocalizedSeries,
|
|
library.Id, firstInfo.Format);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "There was an exception finding existing series for {SeriesName} with Localized name of {LocalizedName} for library {LibraryId}. This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan", firstInfo.Series, firstInfo.LocalizedSeries, library.Id);
|
|
await _eventHub.SendMessageAsync(MessageFactory.Error,
|
|
MessageFactory.ErrorEvent($"There was an exception finding existing series for {firstInfo.Series} with Localized name of {firstInfo.LocalizedSeries} for library {library.Id}",
|
|
"This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan."));
|
|
return;
|
|
}
|
|
|
|
if (series == null)
|
|
{
|
|
seriesAdded = true;
|
|
series = DbFactory.Series(firstInfo.Series, firstInfo.LocalizedSeries);
|
|
_unitOfWork.SeriesRepository.Add(series);
|
|
}
|
|
|
|
if (series.LibraryId == 0) series.LibraryId = library.Id;
|
|
|
|
try
|
|
{
|
|
_logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
|
|
|
|
// parsedInfos[0] is not the first volume or chapter. We need to find it using a ComicInfo check (as it uses firstParsedInfo for series sort)
|
|
var firstParsedInfo = parsedInfos.FirstOrDefault(p => p.ComicInfo != null, firstInfo);
|
|
|
|
UpdateVolumes(series, parsedInfos, forceUpdate);
|
|
series.Pages = series.Volumes.Sum(v => v.Pages);
|
|
|
|
series.NormalizedName = Parser.Parser.Normalize(series.Name);
|
|
series.OriginalName ??= firstParsedInfo.Series;
|
|
if (series.Format == MangaFormat.Unknown)
|
|
{
|
|
series.Format = firstParsedInfo.Format;
|
|
}
|
|
|
|
if (string.IsNullOrEmpty(series.SortName))
|
|
{
|
|
series.SortName = series.Name;
|
|
}
|
|
if (!series.SortNameLocked)
|
|
{
|
|
series.SortName = series.Name;
|
|
if (!string.IsNullOrEmpty(firstParsedInfo.SeriesSort))
|
|
{
|
|
series.SortName = firstParsedInfo.SeriesSort;
|
|
}
|
|
}
|
|
|
|
// parsedInfos[0] is not the first volume or chapter. We need to find it
|
|
var localizedSeries = parsedInfos.Select(p => p.LocalizedSeries).FirstOrDefault(p => !string.IsNullOrEmpty(p));
|
|
if (!series.LocalizedNameLocked && !string.IsNullOrEmpty(localizedSeries))
|
|
{
|
|
series.LocalizedName = localizedSeries;
|
|
series.NormalizedLocalizedName = Parser.Parser.Normalize(series.LocalizedName);
|
|
}
|
|
|
|
UpdateSeriesMetadata(series, library.Type);
|
|
|
|
// Update series FolderPath here
|
|
await UpdateSeriesFolderPath(parsedInfos, library, series);
|
|
|
|
series.LastFolderScanned = DateTime.Now;
|
|
|
|
if (_unitOfWork.HasChanges())
|
|
{
|
|
try
|
|
{
|
|
await _unitOfWork.CommitAsync();
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
await _unitOfWork.RollbackAsync();
|
|
_logger.LogCritical(ex,
|
|
"[ScannerService] There was an issue writing to the database for series {@SeriesName}",
|
|
series.Name);
|
|
|
|
await _eventHub.SendMessageAsync(MessageFactory.Error,
|
|
MessageFactory.ErrorEvent($"There was an issue writing to the DB for Series {series}",
|
|
ex.Message));
|
|
return;
|
|
}
|
|
|
|
if (seriesAdded)
|
|
{
|
|
await _eventHub.SendMessageAsync(MessageFactory.SeriesAdded,
|
|
MessageFactory.SeriesAddedEvent(series.Id, series.Name, series.LibraryId), false);
|
|
}
|
|
|
|
_logger.LogInformation("[ScannerService] Finished series update on {SeriesName} in {Milliseconds} ms", seriesName, scanWatch.ElapsedMilliseconds);
|
|
}
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "[ScannerService] There was an exception updating series for {SeriesName}", series.Name);
|
|
}
|
|
|
|
await _metadataService.GenerateCoversForSeries(series, false);
|
|
EnqueuePostSeriesProcessTasks(series.LibraryId, series.Id);
|
|
}
|
|
|
|
private async Task UpdateSeriesFolderPath(IEnumerable<ParserInfo> parsedInfos, Library library, Series series)
|
|
{
|
|
var seriesDirs = _directoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(l => l.Path),
|
|
parsedInfos.Select(f => f.FullFilePath).ToList());
|
|
if (seriesDirs.Keys.Count == 0)
|
|
{
|
|
_logger.LogCritical(
|
|
"Scan Series has files spread outside a main series folder. This has negative performance effects. Please ensure all series are under a single folder from library");
|
|
await _eventHub.SendMessageAsync(MessageFactory.Info,
|
|
MessageFactory.InfoEvent($"{series.Name} has files spread outside a single series folder",
|
|
"This has negative performance effects. Please ensure all series are under a single folder from library"));
|
|
}
|
|
else
|
|
{
|
|
// Don't save FolderPath if it's a library Folder
|
|
if (!library.Folders.Select(f => f.Path).Contains(seriesDirs.Keys.First()))
|
|
{
|
|
series.FolderPath = Parser.Parser.NormalizePath(seriesDirs.Keys.First());
|
|
_logger.LogDebug("Updating {Series} FolderPath to {FolderPath}", series.Name, series.FolderPath);
|
|
}
|
|
}
|
|
}
|
|
|
|
public void EnqueuePostSeriesProcessTasks(int libraryId, int seriesId, bool forceUpdate = false)
|
|
{
|
|
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(libraryId, seriesId, forceUpdate));
|
|
}
|
|
|
|
private static void UpdateSeriesMetadata(Series series, LibraryType libraryType)
|
|
{
|
|
series.Metadata ??= DbFactory.SeriesMetadata(new List<CollectionTag>());
|
|
var isBook = libraryType == LibraryType.Book;
|
|
var firstChapter = SeriesService.GetFirstChapterForMetadata(series, isBook);
|
|
|
|
var firstFile = firstChapter?.Files.FirstOrDefault();
|
|
if (firstFile == null) return;
|
|
if (Parser.Parser.IsPdf(firstFile.FilePath)) return;
|
|
|
|
var chapters = series.Volumes.SelectMany(volume => volume.Chapters).ToList();
|
|
|
|
// Update Metadata based on Chapter metadata
|
|
if (!series.Metadata.ReleaseYearLocked)
|
|
{
|
|
series.Metadata.ReleaseYear = chapters.MinimumReleaseYear();
|
|
}
|
|
|
|
// Set the AgeRating as highest in all the comicInfos
|
|
if (!series.Metadata.AgeRatingLocked) series.Metadata.AgeRating = chapters.Max(chapter => chapter.AgeRating);
|
|
|
|
series.Metadata.TotalCount = chapters.Max(chapter => chapter.TotalCount);
|
|
series.Metadata.MaxCount = chapters.Max(chapter => chapter.Count);
|
|
// To not have to rely completely on ComicInfo, try to parse out if the series is complete by checking parsed filenames as well.
|
|
if (series.Metadata.MaxCount != series.Metadata.TotalCount)
|
|
{
|
|
var maxVolume = series.Volumes.Max(v => (int) Parser.Parser.MaxNumberFromRange(v.Name));
|
|
var maxChapter = chapters.Max(c => (int) Parser.Parser.MaxNumberFromRange(c.Range));
|
|
if (maxVolume == series.Metadata.TotalCount) series.Metadata.MaxCount = maxVolume;
|
|
else if (maxChapter == series.Metadata.TotalCount) series.Metadata.MaxCount = maxChapter;
|
|
}
|
|
|
|
|
|
if (!series.Metadata.PublicationStatusLocked)
|
|
{
|
|
series.Metadata.PublicationStatus = PublicationStatus.OnGoing;
|
|
if (series.Metadata.MaxCount >= series.Metadata.TotalCount && series.Metadata.TotalCount > 0)
|
|
{
|
|
series.Metadata.PublicationStatus = PublicationStatus.Completed;
|
|
} else if (series.Metadata.TotalCount > 0 && series.Metadata.MaxCount > 0)
|
|
{
|
|
series.Metadata.PublicationStatus = PublicationStatus.Ended;
|
|
}
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(firstChapter.Summary) && !series.Metadata.SummaryLocked)
|
|
{
|
|
series.Metadata.Summary = firstChapter.Summary;
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(firstChapter.Language) && !series.Metadata.LanguageLocked)
|
|
{
|
|
series.Metadata.Language = firstChapter.Language;
|
|
}
|
|
|
|
// Handle People
|
|
foreach (var chapter in chapters)
|
|
{
|
|
if (!series.Metadata.WriterLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Writer))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.CoverArtistLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.CoverArtist))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.PublisherLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Publisher))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.CharacterLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Character))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.ColoristLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Colorist))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.EditorLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Editor))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.InkerLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Inker))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.LettererLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Letterer))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.PencillerLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Penciller))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.TranslatorLocked)
|
|
{
|
|
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Translator))
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.TagsLocked)
|
|
{
|
|
foreach (var tag in chapter.Tags)
|
|
{
|
|
TagHelper.AddTagIfNotExists(series.Metadata.Tags, tag);
|
|
}
|
|
}
|
|
|
|
if (!series.Metadata.GenresLocked)
|
|
{
|
|
foreach (var genre in chapter.Genres)
|
|
{
|
|
GenreHelper.AddGenreIfNotExists(series.Metadata.Genres, genre);
|
|
}
|
|
}
|
|
}
|
|
|
|
var genres = chapters.SelectMany(c => c.Genres).ToList();
|
|
GenreHelper.KeepOnlySameGenreBetweenLists(series.Metadata.Genres.ToList(), genres, genre =>
|
|
{
|
|
if (series.Metadata.GenresLocked) return;
|
|
series.Metadata.Genres.Remove(genre);
|
|
});
|
|
|
|
// NOTE: The issue here is that people is just from chapter, but series metadata might already have some people on it
|
|
// I might be able to filter out people that are in locked fields?
|
|
var people = chapters.SelectMany(c => c.People).ToList();
|
|
PersonHelper.KeepOnlySamePeopleBetweenLists(series.Metadata.People.ToList(),
|
|
people, person =>
|
|
{
|
|
switch (person.Role)
|
|
{
|
|
case PersonRole.Writer:
|
|
if (!series.Metadata.WriterLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Penciller:
|
|
if (!series.Metadata.PencillerLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Inker:
|
|
if (!series.Metadata.InkerLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Colorist:
|
|
if (!series.Metadata.ColoristLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Letterer:
|
|
if (!series.Metadata.LettererLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.CoverArtist:
|
|
if (!series.Metadata.CoverArtistLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Editor:
|
|
if (!series.Metadata.EditorLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Publisher:
|
|
if (!series.Metadata.PublisherLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Character:
|
|
if (!series.Metadata.CharacterLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
case PersonRole.Translator:
|
|
if (!series.Metadata.TranslatorLocked) series.Metadata.People.Remove(person);
|
|
break;
|
|
default:
|
|
series.Metadata.People.Remove(person);
|
|
break;
|
|
}
|
|
});
|
|
}
|
|
|
|
private void UpdateVolumes(Series series, IList<ParserInfo> parsedInfos, bool forceUpdate = false)
|
|
{
|
|
var startingVolumeCount = series.Volumes.Count;
|
|
// Add new volumes and update chapters per volume
|
|
var distinctVolumes = parsedInfos.DistinctVolumes();
|
|
_logger.LogDebug("[ScannerService] Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name);
|
|
foreach (var volumeNumber in distinctVolumes)
|
|
{
|
|
_logger.LogDebug("[ScannerService] Looking up volume for {VolumeNumber}", volumeNumber);
|
|
Volume volume;
|
|
try
|
|
{
|
|
volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
if (ex.Message.Equals("Sequence contains more than one matching element"))
|
|
{
|
|
_logger.LogCritical("[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
|
|
throw new KavitaException(
|
|
$"Kavita found corrupted volume entries on {series.Name}. Please delete the series from Kavita via UI and rescan");
|
|
}
|
|
throw;
|
|
}
|
|
if (volume == null)
|
|
{
|
|
volume = DbFactory.Volume(volumeNumber);
|
|
volume.SeriesId = series.Id;
|
|
series.Volumes.Add(volume);
|
|
}
|
|
|
|
volume.Name = volumeNumber;
|
|
|
|
_logger.LogDebug("[ScannerService] Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
|
|
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
|
|
UpdateChapters(series, volume, infos, forceUpdate);
|
|
volume.Pages = volume.Chapters.Sum(c => c.Pages);
|
|
|
|
// Update all the metadata on the Chapters
|
|
foreach (var chapter in volume.Chapters)
|
|
{
|
|
var firstFile = chapter.Files.MinBy(x => x.Chapter);
|
|
if (firstFile == null || _cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, firstFile)) continue;
|
|
try
|
|
{
|
|
var firstChapterInfo = infos.SingleOrDefault(i => i.FullFilePath.Equals(firstFile.FilePath));
|
|
UpdateChapterFromComicInfo(chapter, firstChapterInfo?.ComicInfo);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "There was some issue when updating chapter's metadata");
|
|
}
|
|
}
|
|
}
|
|
|
|
// Remove existing volumes that aren't in parsedInfos
|
|
var nonDeletedVolumes = series.Volumes.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.Name)).ToList();
|
|
if (series.Volumes.Count != nonDeletedVolumes.Count)
|
|
{
|
|
_logger.LogDebug("[ScannerService] Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name",
|
|
(series.Volumes.Count - nonDeletedVolumes.Count), series.Name);
|
|
var deletedVolumes = series.Volumes.Except(nonDeletedVolumes);
|
|
foreach (var volume in deletedVolumes)
|
|
{
|
|
var file = volume.Chapters.FirstOrDefault()?.Files?.FirstOrDefault()?.FilePath ?? string.Empty;
|
|
if (!string.IsNullOrEmpty(file) && _directoryService.FileSystem.File.Exists(file))
|
|
{
|
|
_logger.LogInformation(
|
|
"[ScannerService] Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}",
|
|
file);
|
|
}
|
|
|
|
_logger.LogDebug("[ScannerService] Removed {SeriesName} - Volume {Volume}: {File}", series.Name, volume.Name, file);
|
|
}
|
|
|
|
series.Volumes = nonDeletedVolumes;
|
|
}
|
|
|
|
_logger.LogDebug("[ScannerService] Updated {SeriesName} volumes from count of {StartingVolumeCount} to {VolumeCount}",
|
|
series.Name, startingVolumeCount, series.Volumes.Count);
|
|
}
|
|
|
|
private void UpdateChapters(Series series, Volume volume, IList<ParserInfo> parsedInfos, bool forceUpdate = false)
|
|
{
|
|
// Add new chapters
|
|
foreach (var info in parsedInfos)
|
|
{
|
|
// Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0
|
|
// also are treated like specials for UI grouping.
|
|
Chapter chapter;
|
|
try
|
|
{
|
|
chapter = volume.Chapters.GetChapterByRange(info);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "{FileName} mapped as '{Series} - Vol {Volume} Ch {Chapter}' is a duplicate, skipping", info.FullFilePath, info.Series, info.Volumes, info.Chapters);
|
|
continue;
|
|
}
|
|
|
|
if (chapter == null)
|
|
{
|
|
_logger.LogDebug(
|
|
"[ScannerService] Adding new chapter, {Series} - Vol {Volume} Ch {Chapter}", info.Series, info.Volumes, info.Chapters);
|
|
chapter = DbFactory.Chapter(info);
|
|
volume.Chapters.Add(chapter);
|
|
series.LastChapterAdded = DateTime.Now;
|
|
}
|
|
else
|
|
{
|
|
chapter.UpdateFrom(info);
|
|
}
|
|
|
|
if (chapter == null) continue;
|
|
// Add files
|
|
var specialTreatment = info.IsSpecialInfo();
|
|
AddOrUpdateFileForChapter(chapter, info, forceUpdate);
|
|
chapter.Number = Parser.Parser.MinNumberFromRange(info.Chapters) + string.Empty;
|
|
chapter.Range = specialTreatment ? info.Filename : info.Chapters;
|
|
}
|
|
|
|
|
|
// Remove chapters that aren't in parsedInfos or have no files linked
|
|
var existingChapters = volume.Chapters.ToList();
|
|
foreach (var existingChapter in existingChapters)
|
|
{
|
|
if (existingChapter.Files.Count == 0 || !parsedInfos.HasInfo(existingChapter))
|
|
{
|
|
_logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series);
|
|
volume.Chapters.Remove(existingChapter);
|
|
}
|
|
else
|
|
{
|
|
// Ensure we remove any files that no longer exist AND order
|
|
existingChapter.Files = existingChapter.Files
|
|
.Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath))
|
|
.OrderByNatural(f => f.FilePath).ToList();
|
|
existingChapter.Pages = existingChapter.Files.Sum(f => f.Pages);
|
|
}
|
|
}
|
|
}
|
|
|
|
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info, bool forceUpdate = false)
|
|
{
|
|
chapter.Files ??= new List<MangaFile>();
|
|
var existingFile = chapter.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
|
|
var fileInfo = _directoryService.FileSystem.FileInfo.FromFileName(info.FullFilePath);
|
|
if (existingFile != null)
|
|
{
|
|
existingFile.Format = info.Format;
|
|
if (!forceUpdate && !_fileService.HasFileBeenModifiedSince(existingFile.FilePath, existingFile.LastModified) && existingFile.Pages != 0) return;
|
|
existingFile.Pages = _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format);
|
|
existingFile.Extension = fileInfo.Extension.ToLowerInvariant();
|
|
existingFile.Bytes = fileInfo.Length;
|
|
// We skip updating DB here with last modified time so that metadata refresh can do it
|
|
}
|
|
else
|
|
{
|
|
var file = DbFactory.MangaFile(info.FullFilePath, info.Format, _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format));
|
|
if (file == null) return;
|
|
file.Extension = fileInfo.Extension.ToLowerInvariant();
|
|
file.Bytes = fileInfo.Length;
|
|
chapter.Files.Add(file);
|
|
}
|
|
}
|
|
|
|
#nullable enable
|
|
private void UpdateChapterFromComicInfo(Chapter chapter, ComicInfo? info)
|
|
{
|
|
var firstFile = chapter.Files.MinBy(x => x.Chapter);
|
|
if (firstFile == null ||
|
|
_cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, firstFile)) return;
|
|
|
|
var comicInfo = info;
|
|
if (info == null)
|
|
{
|
|
comicInfo = _readingItemService.GetComicInfo(firstFile.FilePath);
|
|
}
|
|
|
|
if (comicInfo == null) return;
|
|
_logger.LogDebug("[ScannerService] Read ComicInfo for {File}", firstFile.FilePath);
|
|
|
|
chapter.AgeRating = ComicInfo.ConvertAgeRatingToEnum(comicInfo.AgeRating);
|
|
|
|
if (!string.IsNullOrEmpty(comicInfo.Title))
|
|
{
|
|
chapter.TitleName = comicInfo.Title.Trim();
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(comicInfo.Summary))
|
|
{
|
|
chapter.Summary = comicInfo.Summary;
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(comicInfo.LanguageISO))
|
|
{
|
|
chapter.Language = comicInfo.LanguageISO;
|
|
}
|
|
|
|
if (comicInfo.Count > 0)
|
|
{
|
|
chapter.TotalCount = comicInfo.Count;
|
|
}
|
|
|
|
// This needs to check against both Number and Volume to calculate Count
|
|
chapter.Count = comicInfo.CalculatedCount();
|
|
|
|
void AddPerson(Person person)
|
|
{
|
|
PersonHelper.AddPersonIfNotExists(chapter.People, person);
|
|
}
|
|
|
|
void AddGenre(Genre genre)
|
|
{
|
|
GenreHelper.AddGenreIfNotExists(chapter.Genres, genre);
|
|
}
|
|
|
|
void AddTag(Tag tag, bool added)
|
|
{
|
|
TagHelper.AddTagIfNotExists(chapter.Tags, tag);
|
|
}
|
|
|
|
|
|
if (comicInfo.Year > 0)
|
|
{
|
|
var day = Math.Max(comicInfo.Day, 1);
|
|
var month = Math.Max(comicInfo.Month, 1);
|
|
chapter.ReleaseDate = new DateTime(comicInfo.Year, month, day);
|
|
}
|
|
|
|
var people = GetTagValues(comicInfo.Colorist);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Colorist);
|
|
UpdatePeople(people, PersonRole.Colorist,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.Characters);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Character);
|
|
UpdatePeople(people, PersonRole.Character,
|
|
AddPerson);
|
|
|
|
|
|
people = GetTagValues(comicInfo.Translator);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Translator);
|
|
UpdatePeople(people, PersonRole.Translator,
|
|
AddPerson);
|
|
|
|
|
|
people = GetTagValues(comicInfo.Writer);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Writer);
|
|
UpdatePeople(people, PersonRole.Writer,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.Editor);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Editor);
|
|
UpdatePeople(people, PersonRole.Editor,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.Inker);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Inker);
|
|
UpdatePeople(people, PersonRole.Inker,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.Letterer);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Letterer);
|
|
UpdatePeople(people, PersonRole.Letterer,
|
|
AddPerson);
|
|
|
|
|
|
people = GetTagValues(comicInfo.Penciller);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Penciller);
|
|
UpdatePeople(people, PersonRole.Penciller,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.CoverArtist);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.CoverArtist);
|
|
UpdatePeople(people, PersonRole.CoverArtist,
|
|
AddPerson);
|
|
|
|
people = GetTagValues(comicInfo.Publisher);
|
|
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Publisher);
|
|
UpdatePeople(people, PersonRole.Publisher,
|
|
AddPerson);
|
|
|
|
var genres = GetTagValues(comicInfo.Genre);
|
|
GenreHelper.KeepOnlySameGenreBetweenLists(chapter.Genres, genres.Select(g => DbFactory.Genre(g, false)).ToList());
|
|
UpdateGenre(genres, false,
|
|
AddGenre);
|
|
|
|
var tags = GetTagValues(comicInfo.Tags);
|
|
TagHelper.KeepOnlySameTagBetweenLists(chapter.Tags, tags.Select(t => DbFactory.Tag(t, false)).ToList());
|
|
UpdateTag(tags, false,
|
|
AddTag);
|
|
}
|
|
|
|
private static IList<string> GetTagValues(string comicInfoTagSeparatedByComma)
|
|
{
|
|
|
|
if (!string.IsNullOrEmpty(comicInfoTagSeparatedByComma))
|
|
{
|
|
return comicInfoTagSeparatedByComma.Split(",").Select(s => s.Trim()).ToList();
|
|
}
|
|
return ImmutableList<string>.Empty;
|
|
}
|
|
#nullable disable
|
|
|
|
/// <summary>
|
|
/// Given a list of all existing people, this will check the new names and roles and if it doesn't exist in allPeople, will create and
|
|
/// add an entry. For each person in name, the callback will be executed.
|
|
/// </summary>
|
|
/// <remarks>This does not remove people if an empty list is passed into names</remarks>
|
|
/// <remarks>This is used to add new people to a list without worrying about duplicating rows in the DB</remarks>
|
|
/// <param name="names"></param>
|
|
/// <param name="role"></param>
|
|
/// <param name="action"></param>
|
|
private void UpdatePeople(IEnumerable<string> names, PersonRole role, Action<Person> action)
|
|
{
|
|
var allPeopleTypeRole = _people.Where(p => p.Role == role).ToList();
|
|
|
|
foreach (var name in names)
|
|
{
|
|
var normalizedName = Parser.Parser.Normalize(name);
|
|
var person = allPeopleTypeRole.FirstOrDefault(p =>
|
|
p.NormalizedName.Equals(normalizedName));
|
|
if (person == null)
|
|
{
|
|
person = DbFactory.Person(name, role);
|
|
lock (_people)
|
|
{
|
|
_people.Add(person);
|
|
}
|
|
}
|
|
|
|
action(person);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
///
|
|
/// </summary>
|
|
/// <param name="names"></param>
|
|
/// <param name="isExternal"></param>
|
|
/// <param name="action"></param>
|
|
private void UpdateGenre(IEnumerable<string> names, bool isExternal, Action<Genre> action)
|
|
{
|
|
foreach (var name in names)
|
|
{
|
|
if (string.IsNullOrEmpty(name.Trim())) continue;
|
|
|
|
var normalizedName = Parser.Parser.Normalize(name);
|
|
var genre = _genres.FirstOrDefault(p =>
|
|
p.NormalizedTitle.Equals(normalizedName) && p.ExternalTag == isExternal);
|
|
if (genre == null)
|
|
{
|
|
genre = DbFactory.Genre(name, false);
|
|
lock (_genres)
|
|
{
|
|
_genres.Add(genre);
|
|
}
|
|
}
|
|
|
|
action(genre);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
///
|
|
/// </summary>
|
|
/// <param name="names"></param>
|
|
/// <param name="isExternal"></param>
|
|
/// <param name="action">Callback for every item. Will give said item back and a bool if item was added</param>
|
|
private void UpdateTag(IEnumerable<string> names, bool isExternal, Action<Tag, bool> action)
|
|
{
|
|
foreach (var name in names)
|
|
{
|
|
if (string.IsNullOrEmpty(name.Trim())) continue;
|
|
|
|
var added = false;
|
|
var normalizedName = Parser.Parser.Normalize(name);
|
|
|
|
var tag = _tags.FirstOrDefault(p =>
|
|
p.NormalizedTitle.Equals(normalizedName) && p.ExternalTag == isExternal);
|
|
if (tag == null)
|
|
{
|
|
added = true;
|
|
tag = DbFactory.Tag(name, false);
|
|
lock (_tags)
|
|
{
|
|
_tags.Add(tag);
|
|
}
|
|
}
|
|
|
|
action(tag, added);
|
|
}
|
|
}
|
|
|
|
}
|