Book Feedback and small bugs (#183)

* Remove automatic retry for scanLibraries as if something fails, it wont pass magically. Catch exceptions when opening books for parsing and swallow to ignore the file.

* Delete extra attempts

* Switched to using FirstOrDefault for finding existing series. This will help avoid pointless crashes.

* Updated message when duplicate series are found (not sure how this happens)

* Fixed a negation for deleting volumes where files still exist.

* Implemented the ability to automatically scale the manga reader based on screen size.
This commit is contained in:
Joseph Milazzo 2021-04-29 17:52:34 -05:00 committed by GitHub
parent a01613f80f
commit 35a47f5d88
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 65 additions and 23 deletions

View File

@ -89,7 +89,7 @@ namespace API.Controllers
var user = _mapper.Map<AppUser>(registerDto); var user = _mapper.Map<AppUser>(registerDto);
user.UserPreferences ??= new AppUserPreferences(); user.UserPreferences ??= new AppUserPreferences();
var result = await _userManager.CreateAsync(user, registerDto.Password); var result = await _userManager.CreateAsync(user, registerDto.Password);
if (!result.Succeeded) return BadRequest(result.Errors); if (!result.Succeeded) return BadRequest(result.Errors);

View File

@ -32,6 +32,8 @@ namespace API.Controllers
await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, user.Id, userParams); await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, user.Id, userParams);
// Apply progress/rating information (I can't work out how to do this in initial query) // Apply progress/rating information (I can't work out how to do this in initial query)
if (series == null) return BadRequest("Could not get series for library");
await _unitOfWork.SeriesRepository.AddSeriesModifiers(user.Id, series); await _unitOfWork.SeriesRepository.AddSeriesModifiers(user.Id, series);
Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages);

View File

@ -23,5 +23,6 @@
public string UserReview { get; set; } public string UserReview { get; set; }
public int LibraryId { get; set; } public int LibraryId { get; set; }
public string LibraryName { get; set; }
} }
} }

View File

@ -307,7 +307,7 @@ namespace API.Data
} }
/// <summary> /// <summary>
/// Returns Series that the user /// Returns Series that the user has some partial progress on
/// </summary> /// </summary>
/// <param name="userId"></param> /// <param name="userId"></param>
/// <param name="libraryId"></param> /// <param name="libraryId"></param>
@ -327,8 +327,8 @@ namespace API.Data
&& s.PagesRead > 0 && s.PagesRead > 0
&& s.PagesRead < s.Series.Pages && s.PagesRead < s.Series.Pages
&& (libraryId <= 0 || s.Series.LibraryId == libraryId)) && (libraryId <= 0 || s.Series.LibraryId == libraryId))
.OrderByDescending(s => s.LastModified)
.Take(limit) .Take(limit)
.OrderByDescending(s => s.LastModified)
.Select(s => s.Series) .Select(s => s.Series)
.ProjectTo<SeriesDto>(_mapper.ConfigurationProvider) .ProjectTo<SeriesDto>(_mapper.ConfigurationProvider)
.AsNoTracking() .AsNoTracking()

View File

@ -4,6 +4,7 @@
{ {
FitToHeight = 0, FitToHeight = 0,
FitToWidth = 1, FitToWidth = 1,
Original = 2 Original = 2,
Automatic = 3
} }
} }

View File

@ -1,5 +1,6 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Parser;
using VersOne.Epub; using VersOne.Epub;
namespace API.Interfaces namespace API.Interfaces
@ -17,5 +18,6 @@ namespace API.Interfaces
/// <returns></returns> /// <returns></returns>
Task<string> ScopeStyles(string stylesheetHtml, string apiBase); Task<string> ScopeStyles(string stylesheetHtml, string apiBase);
string GetSummaryInfo(string filePath); string GetSummaryInfo(string filePath);
ParserInfo ParseInfo(string filePath);
} }
} }

View File

@ -187,22 +187,37 @@ namespace API.Services
return dict; return dict;
} }
public static ParserInfo ParseInfo(string filePath) /// <summary>
/// Parses out Title from book. Chapters and Volumes will always be "0". If there is any exception reading book (malformed books)
/// then null is returned.
/// </summary>
/// <param name="filePath"></param>
/// <returns></returns>
public ParserInfo ParseInfo(string filePath)
{ {
var epubBook = EpubReader.OpenBook(filePath); try
return new ParserInfo()
{ {
Chapters = "0", var epubBook = EpubReader.OpenBook(filePath);
Edition = "",
Format = MangaFormat.Book, return new ParserInfo()
Filename = Path.GetFileName(filePath), {
Title = epubBook.Title, Chapters = "0",
FullFilePath = filePath, Edition = "",
IsSpecial = false, Format = MangaFormat.Book,
Series = epubBook.Title, Filename = Path.GetFileName(filePath),
Volumes = "0" Title = epubBook.Title,
}; FullFilePath = filePath,
IsSpecial = false,
Series = epubBook.Title,
Volumes = "0"
};
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception when opening epub book: {FileName}", filePath);
}
return null;
} }
public byte[] GetCoverImage(string fileFilePath, bool createThumbnail = true) public byte[] GetCoverImage(string fileFilePath, bool createThumbnail = true)

View File

@ -42,12 +42,12 @@ namespace API.Services.Tasks
[DisableConcurrentExecution(timeoutInSeconds: 360)] [DisableConcurrentExecution(timeoutInSeconds: 360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibraries() public void ScanLibraries()
{ {
var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList(); var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
foreach (var lib in libraries) foreach (var lib in libraries)
{ {
// BUG?: I think we need to keep _scannedSeries within the ScanLibrary instance since this is multithreaded.
ScanLibrary(lib.Id, false); ScanLibrary(lib.Id, false);
} }
} }
@ -68,6 +68,7 @@ namespace API.Services.Tasks
} }
[DisableConcurrentExecution(360)] [DisableConcurrentExecution(360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibrary(int libraryId, bool forceUpdate) public void ScanLibrary(int libraryId, bool forceUpdate)
{ {
var sw = Stopwatch.StartNew(); var sw = Stopwatch.StartNew();
@ -203,7 +204,27 @@ namespace API.Services.Tasks
foreach (var (key, infos) in parsedSeries) foreach (var (key, infos) in parsedSeries)
{ {
// Key is normalized already // Key is normalized already
var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key); Series existingSeries = null;
try
{
existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key);
}
catch (Exception e)
{
_logger.LogCritical(e, "There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it", key);
var duplicateSeries = library.Series.Where(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key).ToList();
//var firstSeries = duplicateSeries.First();
//duplicateSeries.
foreach (var series in duplicateSeries)
{
_logger.LogCritical("{Key} maps with {Series}", key, series.OriginalName);
}
// Merge them together?
//_unitOfWork.AppUserProgressRepository.MapSeriesProgressFromTo(firstSeries.Id, );
continue;
}
if (existingSeries == null) if (existingSeries == null)
{ {
existingSeries = DbFactory.Series(infos[0].Series); existingSeries = DbFactory.Series(infos[0].Series);
@ -292,7 +313,7 @@ namespace API.Services.Tasks
foreach (var volume in deletedVolumes) foreach (var volume in deletedVolumes)
{ {
var file = volume.Chapters.FirstOrDefault()?.Files.FirstOrDefault()?.FilePath ?? "no files"; var file = volume.Chapters.FirstOrDefault()?.Files.FirstOrDefault()?.FilePath ?? "no files";
if (!new FileInfo(file).Exists) if (new FileInfo(file).Exists)
{ {
_logger.LogError("Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}", file); _logger.LogError("Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}", file);
} }
@ -435,7 +456,7 @@ namespace API.Services.Tasks
if (type == LibraryType.Book && Parser.Parser.IsEpub(path)) if (type == LibraryType.Book && Parser.Parser.IsEpub(path))
{ {
info = BookService.ParseInfo(path); info = _bookService.ParseInfo(path);
} }
else else
{ {
@ -451,7 +472,7 @@ namespace API.Services.Tasks
if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != "0") if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != "0")
{ {
info = Parser.Parser.Parse(path, rootPath, type); info = Parser.Parser.Parse(path, rootPath, type);
var info2 = BookService.ParseInfo(path); var info2 = _bookService.ParseInfo(path);
info.Merge(info2); info.Merge(info2);
} }