From 2b99c8abfa7944f4073e56116962452021907ef6 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Fri, 16 Apr 2021 12:21:16 -0500 Subject: [PATCH] Scan Bugfixes (#177) * Added way more logging for debugging issue #163. Fixed #175 * Removed some comment that isn't needed * Fixed a enumeration issue due to removing while enumerating --- API/Data/AppUserProgressRepository.cs | 4 +- API/Interfaces/IAppUserProgressRepository.cs | 2 +- API/Services/Tasks/ScannerService.cs | 79 +++++++++++--------- 3 files changed, 47 insertions(+), 38 deletions(-) diff --git a/API/Data/AppUserProgressRepository.cs b/API/Data/AppUserProgressRepository.cs index c39430e61..a65ab2c48 100644 --- a/API/Data/AppUserProgressRepository.cs +++ b/API/Data/AppUserProgressRepository.cs @@ -17,7 +17,7 @@ namespace API.Data /// /// This will remove any entries that have chapterIds that no longer exists. This will execute the save as well. /// - public async Task CleanupAbandonedChapters() + public async Task CleanupAbandonedChapters() { var chapterIds = _context.Chapter.Select(c => c.Id); @@ -26,7 +26,7 @@ namespace API.Data .ToListAsync(); _context.RemoveRange(rowsToRemove); - return (await _context.SaveChangesAsync()) > 0; + return await _context.SaveChangesAsync() > 0 ? rowsToRemove.Count : 0; } } } \ No newline at end of file diff --git a/API/Interfaces/IAppUserProgressRepository.cs b/API/Interfaces/IAppUserProgressRepository.cs index 62a252661..a268ac5f5 100644 --- a/API/Interfaces/IAppUserProgressRepository.cs +++ b/API/Interfaces/IAppUserProgressRepository.cs @@ -4,6 +4,6 @@ namespace API.Interfaces { public interface IAppUserProgressRepository { - Task CleanupAbandonedChapters(); + Task CleanupAbandonedChapters(); } } \ No newline at end of file diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 16a7679e7..bc812bf00 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -37,8 +37,7 @@ namespace API.Services.Tasks } - [DisableConcurrentExecution(timeoutInSeconds: 360)] - //[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)] + [DisableConcurrentExecution(timeoutInSeconds: 360)] public void ScanLibraries() { var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList(); @@ -64,7 +63,6 @@ namespace API.Services.Tasks } [DisableConcurrentExecution(360)] - //[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)] public void ScanLibrary(int libraryId, bool forceUpdate) { var sw = Stopwatch.StartNew(); @@ -144,15 +142,8 @@ namespace API.Services.Tasks // Cleanup any user progress that doesn't exist var cleanedUp = Task.Run(() => _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters()).Result; - if (cleanedUp) - { - _logger.LogInformation("Removed all abandoned progress rows"); - } - else - { - _logger.LogWarning("There are abandoned user progress entities in the DB. In Progress activity stream will be skewed"); - } - + _logger.LogInformation("Removed {Count} abandoned progress rows", cleanedUp); + BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate)); } @@ -167,16 +158,17 @@ namespace API.Services.Tasks _logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk", removeCount); // Add new series that have parsedInfos - foreach (var (key, _) in parsedSeries) + foreach (var (key, infos) in parsedSeries) { var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(key)); if (existingSeries == null) { + var name = infos.Count > 0 ? infos[0].Series : key; existingSeries = new Series() { - Name = key, - OriginalName = key, - LocalizedName = key, + Name = name, + OriginalName = name, + LocalizedName = name, NormalizedName = Parser.Parser.Normalize(key), SortName = key, Summary = "", @@ -193,7 +185,7 @@ namespace API.Services.Tasks Parallel.ForEach(librarySeries, (series) => { _logger.LogInformation("Processing series {SeriesName}", series.Name); - UpdateVolumes(series, parsedSeries[series.OriginalName].ToArray()); + UpdateVolumes(series, parsedSeries[Parser.Parser.Normalize(series.OriginalName)].ToArray()); series.Pages = series.Volumes.Sum(v => v.Pages); }); } @@ -201,22 +193,24 @@ namespace API.Services.Tasks public IEnumerable FindSeriesNotOnDisk(ICollection existingSeries, Dictionary> parsedSeries) { var foundSeries = parsedSeries.Select(s => s.Key).ToList(); - var missingSeries = existingSeries.Where(existingSeries => !existingSeries.NameInList(foundSeries) - || !existingSeries.NameInList(parsedSeries.Keys)); + var missingSeries = existingSeries.Where(es => !es.NameInList(foundSeries) + || !es.NameInList(parsedSeries.Keys)); return missingSeries; } public int RemoveMissingSeries(ICollection existingSeries, IEnumerable missingSeries) { - var removeCount = 0; - //library.Series = library.Series.Except(missingSeries).ToList(); - if (existingSeries == null || existingSeries.Count == 0) return 0; - foreach (var existing in missingSeries) - { - existingSeries.Remove(existing); - removeCount += 1; - } + var removeCount = existingSeries.Count; + var missingList = missingSeries.ToList(); + existingSeries = existingSeries.Except(missingList).ToList(); + // if (existingSeries == null || existingSeries.Count == 0) return 0; + // foreach (var existing in missingSeries) + // { + // existingSeries.Remove(existing); + // removeCount += 1; + // } + removeCount -= existingSeries.Count; return removeCount; } @@ -245,15 +239,31 @@ namespace API.Services.Tasks } // NOTE: I don't think we need this as chapters now handle specials - volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial); + //volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial); _logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name); UpdateChapters(volume, infos); volume.Pages = volume.Chapters.Sum(c => c.Pages); } + // BUG: This is causing volumes to be removed when they shouldn't // Remove existing volumes that aren't in parsedInfos and volumes that have no chapters + var existingVolumeLength = series.Volumes.Count; + // var existingVols = series.Volumes; + // foreach (var v in existingVols) + // { + // // NOTE: I think checking if Chapter count is 0 is enough, we don't need parsedInfos + // if (parsedInfos.All(p => p.Volumes != v.Name)) // || v.Chapters.Count == 0 (this wont work yet because we don't take care of chapters correctly vs parsedInfos) + // { + // _logger.LogDebug("Removed {Series} - {Volume} as there were no chapters", series.Name, v.Name); + // series.Volumes.Remove(v); + // } + // } series.Volumes = series.Volumes.Where(v => parsedInfos.Any(p => p.Volumes == v.Name)).ToList(); + if (existingVolumeLength != series.Volumes.Count) + { + _logger.LogDebug("Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name", (existingVolumeLength - series.Volumes.Count), series.Name); + } _logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}", series.Name, startingVolumeCount, series.Volumes.Count); @@ -269,7 +279,6 @@ namespace API.Services.Tasks var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0")); // Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0 // also are treated like specials for UI grouping. - _logger.LogDebug("Adding new chapters, {Series} - Vol {Volume} Ch {Chapter} - Needs Special Treatment? {NeedsSpecialTreatment}", info.Series, info.Volumes, info.Chapters, specialTreatment); // NOTE: If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest // vs Historys strongest), this code will break and the duplicate will be skipped. Chapter chapter = null; @@ -291,6 +300,7 @@ namespace API.Services.Tasks if (chapter == null) { + _logger.LogDebug("Adding new chapter, {Series} - Vol {Volume} Ch {Chapter} - Needs Special Treatment? {NeedsSpecialTreatment}", info.Series, info.Volumes, info.Chapters, specialTreatment); chapter = new Chapter() { Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty, @@ -320,14 +330,12 @@ namespace API.Services.Tasks } if (chapter == null) continue; AddOrUpdateFileForChapter(chapter, info); - chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + ""; + chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty; chapter.Range = specialTreatment ? info.Filename : info.Chapters; chapter.Pages = chapter.Files.Sum(f => f.Pages); } - - // TODO: Extract to // Remove chapters that aren't in parsedInfos or have no files linked var existingChapters = volume.Chapters.ToList(); foreach (var existingChapter in existingChapters) @@ -336,8 +344,9 @@ namespace API.Services.Tasks var hasInfo = specialTreatment ? parsedInfos.Any(v => v.Filename == existingChapter.Range) : parsedInfos.Any(v => v.Chapters == existingChapter.Range); - if (!hasInfo || !existingChapter.Files.Any()) + if (!hasInfo || existingChapter.Files.Count == 0) { + _logger.LogDebug("Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series); volume.Chapters.Remove(existingChapter); } else @@ -365,8 +374,8 @@ namespace API.Services.Tasks // Check if normalized info.Series already exists and if so, update info to use that name instead info.Series = MergeName(_scannedSeries, info); - - _scannedSeries.AddOrUpdate(info.Series, new List() {info}, (_, oldValue) => + + _scannedSeries.AddOrUpdate(Parser.Parser.Normalize(info.Series), new List() {info}, (_, oldValue) => { oldValue ??= new List(); if (!oldValue.Contains(info))