More Scan Loop Bugfixes (#1471)

* Updated scan time for watcher to 30 seconds for non-dev. Moved ScanFolder off the Scan queue as it doesn't need to be there. Updated loggers

* Fixed jumpbar missing

* Tweaked the messaging for CoverGen

* When we return early due to nothing being done on library and series scan, make sure we kick off other tasks that need to occur.

* Fixed a foreign constraint issue on Volumes when we were adding to a new series.

* Fixed a case where when picking normalized series, capitalization differences wouldn't stack when they should.

* Reduced the logging output on dev and prod settings.

* Fixed a bug in the code that finds the highest directory from a file, where we were not checking against a normalized path.

* Cleaned up some code

* Fixed broken unit tests
This commit is contained in:
Joseph Milazzo 2022-08-24 11:27:32 -05:00 committed by GitHub
parent fc0121e7a8
commit 1e535a8184
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 77 additions and 62 deletions

1
.gitignore vendored
View File

@ -485,7 +485,6 @@ Thumbs.db
ssl/
# App specific
appsettings.json
/API/kavita.db
/API/kavita.db-shm
/API/kavita.db-wal

View File

@ -678,6 +678,7 @@ namespace API.Tests.Services
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/Dir 2/"}, new [] {"C:/Manga/Dir 1/Love Hina/Vol. 01.cbz"}, "C:/Manga/Dir 1/Love Hina")]
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/"}, new [] {"D:/Manga/Love Hina/Vol. 01.cbz", "D:/Manga/Vol. 01.cbz"}, "")]
[InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga//Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")]
[InlineData(new [] {@"C:\mount\drive\Library\Test Library\Comics\"}, new [] {@"C:\mount\drive\Library\Test Library\Comics\Bruce Lee (1994)\Bruce Lee #001 (1994).cbz"}, @"C:/mount/drive/Library/Test Library/Comics/Bruce Lee (1994)")]
public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory)
{
var fileSystem = new MockFileSystem();

View File

@ -1,4 +1,5 @@
using System.Threading.Tasks;
using System;
using System.Threading.Tasks;
using API.Data.Repositories;
using API.Entities;
using AutoMapper;
@ -26,7 +27,6 @@ public interface IUnitOfWork
bool Commit();
Task<bool> CommitAsync();
bool HasChanges();
bool Rollback();
Task<bool> RollbackAsync();
}
public class UnitOfWork : IUnitOfWork
@ -93,16 +93,15 @@ public class UnitOfWork : IUnitOfWork
/// <returns></returns>
public async Task<bool> RollbackAsync()
{
await _context.DisposeAsync();
return true;
}
/// <summary>
/// Rollback transaction
/// </summary>
/// <returns></returns>
public bool Rollback()
try
{
_context.Dispose();
await _context.Database.RollbackTransactionAsync();
}
catch (Exception)
{
// Swallow exception (this might be used in places where a transaction isn't setup)
}
return true;
}
}

View File

@ -508,10 +508,10 @@ namespace API.Services
break;
}
var fullPath = Path.Join(folder, parts.Last());
var fullPath = Parser.Parser.NormalizePath(Path.Join(folder, parts.Last()));
if (!dirs.ContainsKey(fullPath))
{
dirs.Add(Parser.Parser.NormalizePath(fullPath), string.Empty);
dirs.Add(fullPath, string.Empty);
}
}
}

View File

@ -161,7 +161,7 @@ public class MetadataService : IMetadataService
/// <param name="forceUpdate"></param>
private async Task ProcessSeriesCoverGen(Series series, bool forceUpdate)
{
_logger.LogDebug("[MetadataService] Processing series {SeriesName}", series.OriginalName);
_logger.LogDebug("[MetadataService] Generating cover images for series: {SeriesName}", series.OriginalName);
try
{
var volumeIndex = 0;

View File

@ -78,7 +78,7 @@ public class LibraryWatcher : ILibraryWatcher
_logger = logger;
_scannerService = scannerService;
_queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(10) : TimeSpan.FromMinutes(5);
_queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(10) : TimeSpan.FromSeconds(30);
}
@ -142,18 +142,18 @@ public class LibraryWatcher : ILibraryWatcher
private void OnChanged(object sender, FileSystemEventArgs e)
{
if (e.ChangeType != WatcherChangeTypes.Changed) return;
Console.WriteLine($"Changed: {e.FullPath}, {e.Name}");
_logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}", e.FullPath, e.Name);
ProcessChange(e.FullPath);
}
private void OnCreated(object sender, FileSystemEventArgs e)
{
Console.WriteLine($"Created: {e.FullPath}, {e.Name}");
_logger.LogDebug("[LibraryWatcher] Created: {FullPath}, {Name}", e.FullPath, e.Name);
ProcessChange(e.FullPath, !_directoryService.FileSystem.File.Exists(e.Name));
}
private void OnDeleted(object sender, FileSystemEventArgs e) {
Console.WriteLine($"Deleted: {e.FullPath}, {e.Name}");
_logger.LogDebug("[LibraryWatcher] Deleted: {FullPath}, {Name}", e.FullPath, e.Name);
// On deletion, we need another type of check. We need to check if e.Name has an extension or not
// NOTE: File deletion will trigger a folder change event, so this might not be needed
@ -164,9 +164,9 @@ public class LibraryWatcher : ILibraryWatcher
private void OnRenamed(object sender, RenamedEventArgs e)
{
Console.WriteLine($"Renamed:");
Console.WriteLine($" Old: {e.OldFullPath}");
Console.WriteLine($" New: {e.FullPath}");
_logger.LogDebug($"[LibraryWatcher] Renamed:");
_logger.LogDebug(" Old: {OldFullPath}", e.OldFullPath);
_logger.LogDebug(" New: {FullPath}", e.FullPath);
ProcessChange(e.FullPath, _directoryService.FileSystem.Directory.Exists(e.FullPath));
}
@ -179,14 +179,6 @@ public class LibraryWatcher : ILibraryWatcher
{
// We need to check if directory or not
if (!isDirectoryChange && !new Regex(Parser.Parser.SupportedExtensions).IsMatch(new FileInfo(filePath).Extension)) return;
// Don't do anything if a Library or ScanSeries in progress
// if (TaskScheduler.RunningAnyTasksByMethod(new[] {"MetadataService", "ScannerService"}))
// {
// // NOTE: I'm not sure we need this to be honest. Now with the speed of the new loop and the queue, we should just put in queue for processing
// _logger.LogDebug("Suppressing Change due to scan being inprogress");
// return;
// }
var parentDirectory = _directoryService.GetParentDirectoryName(filePath);
if (string.IsNullOrEmpty(parentDirectory)) return;
@ -206,13 +198,11 @@ public class LibraryWatcher : ILibraryWatcher
FolderPath = fullPath,
QueueTime = DateTime.Now
};
if (_scanQueue.Contains(queueItem, _folderScanQueueableComparer))
if (!_scanQueue.Contains(queueItem, _folderScanQueueableComparer))
{
ProcessQueue();
return;
}
_logger.LogDebug("[LibraryWatcher] Queuing job for {Folder}", fullPath);
_scanQueue.Enqueue(queueItem);
}
ProcessQueue();
}
@ -228,7 +218,7 @@ public class LibraryWatcher : ILibraryWatcher
var item = _scanQueue.Peek();
if (item.QueueTime < DateTime.Now.Subtract(_queueWaitTime))
{
_logger.LogDebug("Scheduling ScanSeriesFolder for {Folder}", item.FolderPath);
_logger.LogDebug("[LibraryWatcher] Scheduling ScanSeriesFolder for {Folder}", item.FolderPath);
BackgroundJob.Enqueue(() => _scannerService.ScanFolder(item.FolderPath));
_scanQueue.Dequeue();
i++;

View File

@ -320,7 +320,10 @@ namespace API.Services.Tasks.Scanner
// NOTE: If we have multiple series in a folder with a localized title, then this will fail. It will group into one series. User needs to fix this themselves.
string nonLocalizedSeries;
var nonLocalizedSeriesFound = infos.Where(i => !i.IsSpecial).Select(i => i.Series).Distinct().ToList();
// Normalize this as many of the cases is a capitalization difference
var nonLocalizedSeriesFound = infos
.Where(i => !i.IsSpecial)
.Select(i => i.Series).DistinctBy(Parser.Parser.Normalize).ToList();
if (nonLocalizedSeriesFound.Count == 1)
{
nonLocalizedSeries = nonLocalizedSeriesFound.First();
@ -330,7 +333,7 @@ namespace API.Services.Tasks.Scanner
// There can be a case where there are multiple series in a folder that causes merging.
if (nonLocalizedSeriesFound.Count > 2)
{
_logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder: {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound));
_logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder or ensure there is only 2 potential series (localized and series): {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound));
}
nonLocalizedSeries = nonLocalizedSeriesFound.FirstOrDefault(s => !s.Equals(localizedSeries));
}

View File

@ -183,7 +183,7 @@ public class ProcessSeries : IProcessSeries
}
_logger.LogInformation("[ScannerService] Finished series update on {SeriesName} in {Milliseconds} ms", seriesName, scanWatch.ElapsedMilliseconds);
EnqueuePostSeriesProcessTasks(series.LibraryId, series.Id, false);
EnqueuePostSeriesProcessTasks(series.LibraryId, series.Id);
}
private async Task UpdateSeriesFolderPath(IEnumerable<ParserInfo> parsedInfos, Library library, Series series)
@ -431,7 +431,6 @@ public class ProcessSeries : IProcessSeries
volume = DbFactory.Volume(volumeNumber);
volume.SeriesId = series.Id;
series.Volumes.Add(volume);
_unitOfWork.VolumeRepository.Add(volume);
}
volume.Name = volumeNumber;

View File

@ -1,10 +1,8 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
@ -41,9 +39,6 @@ public interface IScannerService
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
Task ScanSeries(int seriesId, bool bypassFolderOptimizationChecks = true);
[Queue(TaskScheduler.ScanQueue)]
[DisableConcurrentExecution(60 * 60 * 60)]
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
Task ScanFolder(string folder);
}
@ -81,11 +76,12 @@ public class ScannerService : IScannerService
private readonly IDirectoryService _directoryService;
private readonly IReadingItemService _readingItemService;
private readonly IProcessSeries _processSeries;
private readonly IWordCountAnalyzerService _wordCountAnalyzerService;
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger,
IMetadataService metadataService, ICacheService cacheService, IEventHub eventHub,
IDirectoryService directoryService, IReadingItemService readingItemService,
IProcessSeries processSeries)
IProcessSeries processSeries, IWordCountAnalyzerService wordCountAnalyzerService)
{
_unitOfWork = unitOfWork;
_logger = logger;
@ -95,9 +91,9 @@ public class ScannerService : IScannerService
_directoryService = directoryService;
_readingItemService = readingItemService;
_processSeries = processSeries;
_wordCountAnalyzerService = wordCountAnalyzerService;
}
[Queue(TaskScheduler.ScanQueue)]
public async Task ScanFolder(string folder)
{
var seriesId = await _unitOfWork.SeriesRepository.GetSeriesIdByFolder(folder);
@ -138,7 +134,12 @@ public class ScannerService : IScannerService
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId});
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId, LibraryIncludes.Folders);
var libraryPaths = library.Folders.Select(f => f.Path).ToList();
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, true) != ScanCancelReason.NoCancel) return;
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, true) != ScanCancelReason.NoCancel)
{
BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForSeries(series.LibraryId, seriesId, false));
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(library.Id, seriesId, false));
return;
}
var folderPath = series.FolderPath;
if (string.IsNullOrEmpty(folderPath) || !_directoryService.Exists(folderPath))
@ -420,6 +421,9 @@ public class ScannerService : IScannerService
await _eventHub.SendMessageAsync(MessageFactory.Info,
MessageFactory.InfoEvent($"{library.Name} scan has no work to do",
"All folders have not been changed since last scan. Scan will be aborted."));
BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForLibrary(library.Id, false));
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanLibrary(library.Id, false));
return;
}
}

View File

@ -6,10 +6,10 @@
"Logging": {
"LogLevel": {
"Default": "Debug",
"Microsoft": "Information",
"Microsoft": "Error",
"Microsoft.Hosting.Lifetime": "Error",
"Hangfire": "Information",
"Microsoft.AspNetCore.Hosting.Internal.WebHost": "Information"
"Hangfire": "Error",
"Microsoft.AspNetCore.Hosting.Internal.WebHost": "Error"
},
"File": {
"Path": "config//logs/kavita.log",

View File

@ -0,0 +1,22 @@
{
"ConnectionStrings": {
"DefaultConnection": "Data source=config/kavita.db"
},
"TokenKey": "super secret unguessable key",
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft": "Error",
"Microsoft.Hosting.Lifetime": "Error",
"Hangfire": "Error",
"Microsoft.AspNetCore.Hosting.Internal.WebHost": "Error"
},
"File": {
"Path": "config/logs/kavita.log",
"Append": "True",
"FileSizeLimitBytes": 10485760,
"MaxRollingFiles": 1
}
},
"Port": 5000
}

View File

@ -31,7 +31,7 @@ export class JumpbarService {
const jumpBarKeysToRender: Array<JumpKey> = [];
const targetNumberOfKeys = parseInt(Math.floor(currentSize / keySize) + '', 10);
const removeCount = jumpBarKeys.length - targetNumberOfKeys - 3;
if (removeCount <= 0) return jumpBarKeysToRender;
if (removeCount <= 0) return [...jumpBarKeys];
const removalTimes = Math.ceil(removeCount / 2);
const midPoint = Math.floor(jumpBarKeys.length / 2);

View File

@ -13,13 +13,13 @@
</div>
</div>
<app-metadata-filter [filterSettings]="filterSettings" [filterOpen]="filterOpen" (applyFilter)="applyMetadataFilter($event)"></app-metadata-filter>
<div class="viewport-container" [ngClass]="{'empty': items.length === 0}">
<div class="viewport-container" [ngClass]="{'empty': items.length === 0 && !isLoading}">
<div class="content-container">
<div class="card-container mt-2 mb-2">
<p *ngIf="items.length === 0 && !isLoading">
<ng-container [ngTemplateOutlet]="noDataTemplate"></ng-container>
</p>
<virtual-scroller [ngClass]="{'empty': items.length === 0}" #scroll [items]="items" [bufferAmount]="1" [parentScroll]="parentScroll">
<virtual-scroller [ngClass]="{'empty': items.length === 0 && !isLoading}" #scroll [items]="items" [bufferAmount]="1" [parentScroll]="parentScroll">
<div class="grid row g-0" #container>
<div class="card col-auto mt-2 mb-2" *ngFor="let item of scroll.viewPortItems; trackBy:trackByIdentity; index as i" id="jumpbar-index--{{i}}" [attr.jumpbar-index]="i">
<ng-container [ngTemplateOutlet]="itemTemplate" [ngTemplateOutletContext]="{ $implicit: item, idx: scroll.viewPortInfo.startIndexWithBuffer + i }"></ng-container>
@ -29,7 +29,7 @@
</div>
</div>
<ng-container *ngIf="jumpBarKeysToRender.length >= 4 && items.length !== 0 && scroll.viewPortInfo.maxScrollPosition > 0" [ngTemplateOutlet]="jumpBar" [ngTemplateOutletContext]="{ id: 'jumpbar' }"></ng-container>
<ng-container *ngIf="jumpBarKeysToRender.length >= 4 && items.length > 0 && scroll.viewPortInfo.maxScrollPosition > 0" [ngTemplateOutlet]="jumpBar" [ngTemplateOutletContext]="{ id: 'jumpbar' }"></ng-container>
</div>
<ng-template #cardTemplate>
<virtual-scroller #scroll [items]="items" [bufferAmount]="1">

View File

@ -46,8 +46,8 @@ export class CardDetailLayoutComponent implements OnInit, OnDestroy, OnChanges {
@Input() refresh!: EventEmitter<void>;
@Input() jumpBarKeys: Array<JumpKey> = []; // This is aprox 784 pixels wide
jumpBarKeysToRender: Array<JumpKey> = []; // Original
@Input() jumpBarKeys: Array<JumpKey> = []; // This is aprox 784 pixels tall, original keys
jumpBarKeysToRender: Array<JumpKey> = []; // What is rendered on screen
@Output() itemClicked: EventEmitter<any> = new EventEmitter();
@Output() applyFilter: EventEmitter<FilterEvent> = new EventEmitter();
@ -115,7 +115,6 @@ export class CardDetailLayoutComponent implements OnInit, OnDestroy, OnChanges {
this.jumpBarKeysToRender = [...this.jumpBarKeys];
this.resizeJumpBar();
if (!this.hasResumedJumpKey && this.jumpBarKeysToRender.length > 0) {
const resumeKey = this.jumpbarService.getResumeKey(this.router.url);
if (resumeKey === '') return;
@ -156,6 +155,5 @@ export class CardDetailLayoutComponent implements OnInit, OnDestroy, OnChanges {
this.virtualScroller.scrollToIndex(targetIndex, true, 0, 1000);
this.jumpbarService.saveResumeKey(this.router.url, jumpKey.key);
this.changeDetectionRef.markForCheck();
return;
}
}