Scan Bugfixes (#177)

* Added way  more logging for debugging issue #163.

Fixed #175

* Removed some comment that isn't needed

* Fixed a enumeration issue due to removing while enumerating
This commit is contained in:
Joseph Milazzo 2021-04-16 12:21:16 -05:00 committed by GitHub
parent 09a953be8c
commit 2b99c8abfa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 47 additions and 38 deletions

View File

@ -17,7 +17,7 @@ namespace API.Data
/// <summary> /// <summary>
/// This will remove any entries that have chapterIds that no longer exists. This will execute the save as well. /// This will remove any entries that have chapterIds that no longer exists. This will execute the save as well.
/// </summary> /// </summary>
public async Task<bool> CleanupAbandonedChapters() public async Task<int> CleanupAbandonedChapters()
{ {
var chapterIds = _context.Chapter.Select(c => c.Id); var chapterIds = _context.Chapter.Select(c => c.Id);
@ -26,7 +26,7 @@ namespace API.Data
.ToListAsync(); .ToListAsync();
_context.RemoveRange(rowsToRemove); _context.RemoveRange(rowsToRemove);
return (await _context.SaveChangesAsync()) > 0; return await _context.SaveChangesAsync() > 0 ? rowsToRemove.Count : 0;
} }
} }
} }

View File

@ -4,6 +4,6 @@ namespace API.Interfaces
{ {
public interface IAppUserProgressRepository public interface IAppUserProgressRepository
{ {
Task<bool> CleanupAbandonedChapters(); Task<int> CleanupAbandonedChapters();
} }
} }

View File

@ -37,8 +37,7 @@ namespace API.Services.Tasks
} }
[DisableConcurrentExecution(timeoutInSeconds: 360)] [DisableConcurrentExecution(timeoutInSeconds: 360)]
//[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibraries() public void ScanLibraries()
{ {
var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList(); var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
@ -64,7 +63,6 @@ namespace API.Services.Tasks
} }
[DisableConcurrentExecution(360)] [DisableConcurrentExecution(360)]
//[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibrary(int libraryId, bool forceUpdate) public void ScanLibrary(int libraryId, bool forceUpdate)
{ {
var sw = Stopwatch.StartNew(); var sw = Stopwatch.StartNew();
@ -144,15 +142,8 @@ namespace API.Services.Tasks
// Cleanup any user progress that doesn't exist // Cleanup any user progress that doesn't exist
var cleanedUp = Task.Run(() => _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters()).Result; var cleanedUp = Task.Run(() => _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters()).Result;
if (cleanedUp) _logger.LogInformation("Removed {Count} abandoned progress rows", cleanedUp);
{
_logger.LogInformation("Removed all abandoned progress rows");
}
else
{
_logger.LogWarning("There are abandoned user progress entities in the DB. In Progress activity stream will be skewed");
}
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate)); BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
} }
@ -167,16 +158,17 @@ namespace API.Services.Tasks
_logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk", removeCount); _logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk", removeCount);
// Add new series that have parsedInfos // Add new series that have parsedInfos
foreach (var (key, _) in parsedSeries) foreach (var (key, infos) in parsedSeries)
{ {
var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(key)); var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(key));
if (existingSeries == null) if (existingSeries == null)
{ {
var name = infos.Count > 0 ? infos[0].Series : key;
existingSeries = new Series() existingSeries = new Series()
{ {
Name = key, Name = name,
OriginalName = key, OriginalName = name,
LocalizedName = key, LocalizedName = name,
NormalizedName = Parser.Parser.Normalize(key), NormalizedName = Parser.Parser.Normalize(key),
SortName = key, SortName = key,
Summary = "", Summary = "",
@ -193,7 +185,7 @@ namespace API.Services.Tasks
Parallel.ForEach(librarySeries, (series) => Parallel.ForEach(librarySeries, (series) =>
{ {
_logger.LogInformation("Processing series {SeriesName}", series.Name); _logger.LogInformation("Processing series {SeriesName}", series.Name);
UpdateVolumes(series, parsedSeries[series.OriginalName].ToArray()); UpdateVolumes(series, parsedSeries[Parser.Parser.Normalize(series.OriginalName)].ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages); series.Pages = series.Volumes.Sum(v => v.Pages);
}); });
} }
@ -201,22 +193,24 @@ namespace API.Services.Tasks
public IEnumerable<Series> FindSeriesNotOnDisk(ICollection<Series> existingSeries, Dictionary<string, List<ParserInfo>> parsedSeries) public IEnumerable<Series> FindSeriesNotOnDisk(ICollection<Series> existingSeries, Dictionary<string, List<ParserInfo>> parsedSeries)
{ {
var foundSeries = parsedSeries.Select(s => s.Key).ToList(); var foundSeries = parsedSeries.Select(s => s.Key).ToList();
var missingSeries = existingSeries.Where(existingSeries => !existingSeries.NameInList(foundSeries) var missingSeries = existingSeries.Where(es => !es.NameInList(foundSeries)
|| !existingSeries.NameInList(parsedSeries.Keys)); || !es.NameInList(parsedSeries.Keys));
return missingSeries; return missingSeries;
} }
public int RemoveMissingSeries(ICollection<Series> existingSeries, IEnumerable<Series> missingSeries) public int RemoveMissingSeries(ICollection<Series> existingSeries, IEnumerable<Series> missingSeries)
{ {
var removeCount = 0; var removeCount = existingSeries.Count;
//library.Series = library.Series.Except(missingSeries).ToList(); var missingList = missingSeries.ToList();
if (existingSeries == null || existingSeries.Count == 0) return 0; existingSeries = existingSeries.Except(missingList).ToList();
foreach (var existing in missingSeries) // if (existingSeries == null || existingSeries.Count == 0) return 0;
{ // foreach (var existing in missingSeries)
existingSeries.Remove(existing); // {
removeCount += 1; // existingSeries.Remove(existing);
} // removeCount += 1;
// }
removeCount -= existingSeries.Count;
return removeCount; return removeCount;
} }
@ -245,15 +239,31 @@ namespace API.Services.Tasks
} }
// NOTE: I don't think we need this as chapters now handle specials // NOTE: I don't think we need this as chapters now handle specials
volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial); //volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial);
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name); _logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
UpdateChapters(volume, infos); UpdateChapters(volume, infos);
volume.Pages = volume.Chapters.Sum(c => c.Pages); volume.Pages = volume.Chapters.Sum(c => c.Pages);
} }
// BUG: This is causing volumes to be removed when they shouldn't
// Remove existing volumes that aren't in parsedInfos and volumes that have no chapters // Remove existing volumes that aren't in parsedInfos and volumes that have no chapters
var existingVolumeLength = series.Volumes.Count;
// var existingVols = series.Volumes;
// foreach (var v in existingVols)
// {
// // NOTE: I think checking if Chapter count is 0 is enough, we don't need parsedInfos
// if (parsedInfos.All(p => p.Volumes != v.Name)) // || v.Chapters.Count == 0 (this wont work yet because we don't take care of chapters correctly vs parsedInfos)
// {
// _logger.LogDebug("Removed {Series} - {Volume} as there were no chapters", series.Name, v.Name);
// series.Volumes.Remove(v);
// }
// }
series.Volumes = series.Volumes.Where(v => parsedInfos.Any(p => p.Volumes == v.Name)).ToList(); series.Volumes = series.Volumes.Where(v => parsedInfos.Any(p => p.Volumes == v.Name)).ToList();
if (existingVolumeLength != series.Volumes.Count)
{
_logger.LogDebug("Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name", (existingVolumeLength - series.Volumes.Count), series.Name);
}
_logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}", _logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
series.Name, startingVolumeCount, series.Volumes.Count); series.Name, startingVolumeCount, series.Volumes.Count);
@ -269,7 +279,6 @@ namespace API.Services.Tasks
var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0")); var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0"));
// Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0 // Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0
// also are treated like specials for UI grouping. // also are treated like specials for UI grouping.
_logger.LogDebug("Adding new chapters, {Series} - Vol {Volume} Ch {Chapter} - Needs Special Treatment? {NeedsSpecialTreatment}", info.Series, info.Volumes, info.Chapters, specialTreatment);
// NOTE: If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest // NOTE: If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest
// vs Historys strongest), this code will break and the duplicate will be skipped. // vs Historys strongest), this code will break and the duplicate will be skipped.
Chapter chapter = null; Chapter chapter = null;
@ -291,6 +300,7 @@ namespace API.Services.Tasks
if (chapter == null) if (chapter == null)
{ {
_logger.LogDebug("Adding new chapter, {Series} - Vol {Volume} Ch {Chapter} - Needs Special Treatment? {NeedsSpecialTreatment}", info.Series, info.Volumes, info.Chapters, specialTreatment);
chapter = new Chapter() chapter = new Chapter()
{ {
Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty, Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty,
@ -320,14 +330,12 @@ namespace API.Services.Tasks
} }
if (chapter == null) continue; if (chapter == null) continue;
AddOrUpdateFileForChapter(chapter, info); AddOrUpdateFileForChapter(chapter, info);
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + ""; chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty;
chapter.Range = specialTreatment ? info.Filename : info.Chapters; chapter.Range = specialTreatment ? info.Filename : info.Chapters;
chapter.Pages = chapter.Files.Sum(f => f.Pages); chapter.Pages = chapter.Files.Sum(f => f.Pages);
} }
// TODO: Extract to
// Remove chapters that aren't in parsedInfos or have no files linked // Remove chapters that aren't in parsedInfos or have no files linked
var existingChapters = volume.Chapters.ToList(); var existingChapters = volume.Chapters.ToList();
foreach (var existingChapter in existingChapters) foreach (var existingChapter in existingChapters)
@ -336,8 +344,9 @@ namespace API.Services.Tasks
var hasInfo = specialTreatment ? parsedInfos.Any(v => v.Filename == existingChapter.Range) var hasInfo = specialTreatment ? parsedInfos.Any(v => v.Filename == existingChapter.Range)
: parsedInfos.Any(v => v.Chapters == existingChapter.Range); : parsedInfos.Any(v => v.Chapters == existingChapter.Range);
if (!hasInfo || !existingChapter.Files.Any()) if (!hasInfo || existingChapter.Files.Count == 0)
{ {
_logger.LogDebug("Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series);
volume.Chapters.Remove(existingChapter); volume.Chapters.Remove(existingChapter);
} }
else else
@ -365,8 +374,8 @@ namespace API.Services.Tasks
// Check if normalized info.Series already exists and if so, update info to use that name instead // Check if normalized info.Series already exists and if so, update info to use that name instead
info.Series = MergeName(_scannedSeries, info); info.Series = MergeName(_scannedSeries, info);
_scannedSeries.AddOrUpdate(info.Series, new List<ParserInfo>() {info}, (_, oldValue) => _scannedSeries.AddOrUpdate(Parser.Parser.Normalize(info.Series), new List<ParserInfo>() {info}, (_, oldValue) =>
{ {
oldValue ??= new List<ParserInfo>(); oldValue ??= new List<ParserInfo>();
if (!oldValue.Contains(info)) if (!oldValue.Contains(info))