Kavita/API/Services/Tasks/Metadata/WordCountAnalyzerService.cs
Joseph Milazzo 3ab3a10ae7
New PDF Reader (#1324)
* Refactored all the code that opens the reader to use a unified function. Added new library and setup basic pdf reader route.

* Progress saving is implemented. Targeting ES6 now.

* Customized the toolbar to remove things we don't want, made the download button download with correct filename. Adjusted zoom setting to work well on first load regardless of device.

* Stream the pdf file to the UI rather than handling the download ourselves.

* Started implementing a custom toolbar.

* Fixed up the jump bar calculations

* Fixed filtering being broken

* Pushing up for Robbie to cleanup the toolbar layout

* Added an additional button. Working on logic while robbie takes styling

* Tried to fix the code for robbie

* Tweaks for fonts

* Added button for book mode, but doesn't seem to work after renderer is built

* Removed book mode

* Removed the old image caching code for pdfs as it's not needed with new reader

* Removed the interfaces to extract images from pdf.

* Fixed original pagination area not scaling correctly

* Integrated series remove events to library detail

* Cleaned up the getter naming convention

* Cleaned up some of the manga reader code to reduce cluter and improve re-use

* Implemented Japanese parser support for volume and chapters.

* Fixed a bug where resetting scroll in manga reader wasn't working

* Fixed a bug where word count grew on each scan.

* Removed unused variable

* Ensure we calculate word count on files with their own cache timestamp

* Adjusted size of reel headers

* Put some code in for moving on original image with keyboard, but it's not in use.

* Cleaned up the css for the pdf reader

* Cleaned up the code

* Tweaked the list item so we show scrollbar now when fully read
2022-06-15 14:43:32 -07:00

245 lines
10 KiB
C#

using System;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
using API.Helpers;
using API.SignalR;
using Hangfire;
using HtmlAgilityPack;
using Microsoft.Extensions.Logging;
using VersOne.Epub;
namespace API.Services.Tasks.Metadata;
public interface IWordCountAnalyzerService
{
[DisableConcurrentExecution(timeoutInSeconds: 60 * 60 * 60)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
Task ScanLibrary(int libraryId, bool forceUpdate = false);
Task ScanSeries(int libraryId, int seriesId, bool forceUpdate = true);
}
/// <summary>
/// This service is a metadata task that generates information around time to read
/// </summary>
public class WordCountAnalyzerService : IWordCountAnalyzerService
{
private readonly ILogger<WordCountAnalyzerService> _logger;
private readonly IUnitOfWork _unitOfWork;
private readonly IEventHub _eventHub;
private readonly ICacheHelper _cacheHelper;
private readonly IReaderService _readerService;
public WordCountAnalyzerService(ILogger<WordCountAnalyzerService> logger, IUnitOfWork unitOfWork, IEventHub eventHub,
ICacheHelper cacheHelper, IReaderService readerService)
{
_logger = logger;
_unitOfWork = unitOfWork;
_eventHub = eventHub;
_cacheHelper = cacheHelper;
_readerService = readerService;
}
public async Task ScanLibrary(int libraryId, bool forceUpdate = false)
{
var sw = Stopwatch.StartNew();
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.WordCountAnalyzerProgressEvent(libraryId, 0F, ProgressEventType.Started, string.Empty));
var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
var stopwatch = Stopwatch.StartNew();
_logger.LogInformation("[MetadataService] Refreshing Library {LibraryName}. Total Items: {TotalSize}. Total Chunks: {TotalChunks} with {ChunkSize} size", library.Name, chunkInfo.TotalSize, chunkInfo.TotalChunks, chunkInfo.ChunkSize);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.WordCountAnalyzerProgressEvent(library.Id, 0F, ProgressEventType.Started, $"Starting {library.Name}"));
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++)
{
if (chunkInfo.TotalChunks == 0) continue;
stopwatch.Restart();
_logger.LogInformation("[MetadataService] Processing chunk {ChunkNumber} / {TotalChunks} with size {ChunkSize}. Series ({SeriesStart} - {SeriesEnd}",
chunk, chunkInfo.TotalChunks, chunkInfo.ChunkSize, chunk * chunkInfo.ChunkSize, (chunk + 1) * chunkInfo.ChunkSize);
var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id,
new UserParams()
{
PageNumber = chunk,
PageSize = chunkInfo.ChunkSize
});
_logger.LogDebug("[MetadataService] Fetched {SeriesCount} series for refresh", nonLibrarySeries.Count);
var seriesIndex = 0;
foreach (var series in nonLibrarySeries)
{
var index = chunk * seriesIndex;
var progress = Math.Max(0F, Math.Min(1F, index * 1F / chunkInfo.TotalSize));
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.WordCountAnalyzerProgressEvent(library.Id, progress, ProgressEventType.Updated, series.Name));
try
{
await ProcessSeries(series, forceUpdate, false);
}
catch (Exception ex)
{
_logger.LogError(ex, "[MetadataService] There was an exception during metadata refresh for {SeriesName}", series.Name);
}
seriesIndex++;
}
if (_unitOfWork.HasChanges())
{
await _unitOfWork.CommitAsync();
}
_logger.LogInformation(
"[MetadataService] Processed {SeriesStart} - {SeriesEnd} out of {TotalSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name);
}
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.WordCountAnalyzerProgressEvent(library.Id, 1F, ProgressEventType.Ended, $"Complete"));
_logger.LogInformation("[WordCountAnalyzerService] Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds);
}
public async Task ScanSeries(int libraryId, int seriesId, bool forceUpdate = true)
{
var sw = Stopwatch.StartNew();
var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
if (series == null)
{
_logger.LogError("[WordCountAnalyzerService] Series {SeriesId} was not found on Library {LibraryId}", seriesId, libraryId);
return;
}
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.WordCountAnalyzerProgressEvent(libraryId, 0F, ProgressEventType.Started, series.Name));
await ProcessSeries(series, forceUpdate);
if (_unitOfWork.HasChanges())
{
await _unitOfWork.CommitAsync();
}
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.WordCountAnalyzerProgressEvent(libraryId, 1F, ProgressEventType.Ended, series.Name));
_logger.LogInformation("[WordCountAnalyzerService] Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
}
private async Task ProcessSeries(Series series, bool forceUpdate = false, bool useFileName = true)
{
var isEpub = series.Format == MangaFormat.Epub;
series.WordCount = 0;
foreach (var volume in series.Volumes)
{
volume.WordCount = 0;
foreach (var chapter in volume.Chapters)
{
// This compares if it's changed since a file scan only
var firstFile = chapter.Files.FirstOrDefault();
if (firstFile == null) return;
if (!_cacheHelper.HasFileChangedSinceLastScan(firstFile.LastFileAnalysis, forceUpdate,
firstFile))
continue;
if (series.Format == MangaFormat.Epub)
{
long sum = 0;
var fileCounter = 1;
foreach (var file in chapter.Files)
{
var filePath = file.FilePath;
var pageCounter = 1;
try
{
using var book = await EpubReader.OpenBookAsync(filePath, BookService.BookReaderOptions);
var totalPages = book.Content.Html.Values;
foreach (var bookPage in totalPages)
{
var progress = Math.Max(0F,
Math.Min(1F, (fileCounter * pageCounter) * 1F / (chapter.Files.Count * totalPages.Count)));
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.WordCountAnalyzerProgressEvent(series.LibraryId, progress,
ProgressEventType.Updated, useFileName ? filePath : series.Name));
sum += await GetWordCountFromHtml(bookPage);
pageCounter++;
}
fileCounter++;
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an error reading an epub file for word count, series skipped");
await _eventHub.SendMessageAsync(MessageFactory.Error,
MessageFactory.ErrorEvent("There was an issue counting words on an epub",
$"{series.Name} - {file}"));
return;
}
file.LastFileAnalysis = DateTime.Now;
_unitOfWork.MangaFileRepository.Update(file);
}
chapter.WordCount = sum;
series.WordCount += sum;
volume.WordCount += sum;
}
var est = _readerService.GetTimeEstimate(chapter.WordCount, chapter.Pages, isEpub);
chapter.MinHoursToRead = est.MinHours;
chapter.MaxHoursToRead = est.MaxHours;
chapter.AvgHoursToRead = est.AvgHours;
_unitOfWork.ChapterRepository.Update(chapter);
}
var volumeEst = _readerService.GetTimeEstimate(volume.WordCount, volume.Pages, isEpub);
volume.MinHoursToRead = volumeEst.MinHours;
volume.MaxHoursToRead = volumeEst.MaxHours;
volume.AvgHoursToRead = volumeEst.AvgHours;
_unitOfWork.VolumeRepository.Update(volume);
}
var seriesEstimate = _readerService.GetTimeEstimate(series.WordCount, series.Pages, isEpub);
series.MinHoursToRead = seriesEstimate.MinHours;
series.MaxHoursToRead = seriesEstimate.MaxHours;
series.AvgHoursToRead = seriesEstimate.AvgHours;
_unitOfWork.SeriesRepository.Update(series);
}
private static async Task<int> GetWordCountFromHtml(EpubContentFileRef bookFile)
{
var doc = new HtmlDocument();
doc.LoadHtml(await bookFile.ReadContentAsTextAsync());
var textNodes = doc.DocumentNode.SelectNodes("//body//text()[not(parent::script)]");
if (textNodes == null) return 0;
return textNodes
.Select(node => node.InnerText.Split(' ', StringSplitOptions.RemoveEmptyEntries)
.Where(s => char.IsLetter(s[0])))
.Select(words => words.Count())
.Where(wordCount => wordCount > 0)
.Sum();
}
}