Scanner Logging (#2997)

This commit is contained in:
Joe Milazzo 2024-06-12 12:14:52 -05:00 committed by GitHub
parent 094e775f0b
commit a6a60c4221
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 106 additions and 6 deletions

View File

@ -137,10 +137,24 @@ public class ParseScannedFiles
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent(directory, library.Name, ProgressEventType.Updated)); MessageFactory.FileScanProgressEvent(directory, library.Name, ProgressEventType.Updated));
// This is debug code to help understand why some installs aren't working correctly
if (!forceCheck && seriesPaths.TryGetValue(directory, out var series2) && series2.Count > 1 && series2.All(s => !string.IsNullOrEmpty(s.LowestFolderPath)))
{
_logger.LogDebug("[ProcessFiles] Dirty check passed, series list: {@SeriesModified}", series2);
foreach (var s in series2)
{
_logger.LogDebug("[ProcessFiles] Last Scanned: {LastScanned} vs Directory Check: {DirectoryLastScanned}", s.LastScanned, _directoryService
.GetLastWriteTime(s.LowestFolderPath!)
.Truncate(TimeSpan.TicksPerSecond));
}
}
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, directory, forceCheck)) if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, directory, forceCheck))
{ {
if (result.Exists(r => r.Folder == directory)) if (result.Exists(r => r.Folder == directory))
{ {
_logger.LogDebug("[ProcessFiles] Skipping adding {Directory} as it's already added", directory);
continue; continue;
} }
result.Add(CreateScanResult(directory, folderPath, false, ArraySegment<string>.Empty)); result.Add(CreateScanResult(directory, folderPath, false, ArraySegment<string>.Empty));
@ -152,9 +166,7 @@ public class ParseScannedFiles
_logger.LogDebug("[ProcessFiles] {Directory} is dirty and has multiple series folders, checking if we can avoid a full scan", directory); _logger.LogDebug("[ProcessFiles] {Directory} is dirty and has multiple series folders, checking if we can avoid a full scan", directory);
foreach (var seriesModified in series) foreach (var seriesModified in series)
{ {
var hasFolderChangedSinceLastScan = seriesModified.LastScanned.Truncate(TimeSpan.TicksPerSecond) <
// TODO: We can check directly against seriesModified.LastScanned instead of library scan
var hasFolderChangedSinceLastScan = forceCheck || library.LastScanned.Truncate(TimeSpan.TicksPerSecond) <
_directoryService _directoryService
.GetLastWriteTime(seriesModified.LowestFolderPath!) .GetLastWriteTime(seriesModified.LowestFolderPath!)
.Truncate(TimeSpan.TicksPerSecond); .Truncate(TimeSpan.TicksPerSecond);
@ -226,7 +238,7 @@ public class ParseScannedFiles
return new ScanResult() return new ScanResult()
{ {
Files = files, Files = files,
Folder = folderPath, Folder = Parser.Parser.NormalizePath(folderPath),
LibraryRoot = libraryRoot, LibraryRoot = libraryRoot,
HasChanged = hasChanged HasChanged = hasChanged
}; };

View File

@ -18,6 +18,7 @@ using API.Services.Tasks.Scanner.Parser;
using API.SignalR; using API.SignalR;
using Hangfire; using Hangfire;
using Kavita.Common; using Kavita.Common;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Services.Tasks.Scanner; namespace API.Services.Tasks.Scanner;
@ -189,6 +190,39 @@ public class ProcessSeries : IProcessSeries
{ {
await _unitOfWork.CommitAsync(); await _unitOfWork.CommitAsync();
} }
catch (DbUpdateConcurrencyException ex)
{
foreach (var entry in ex.Entries)
{
if (entry.Entity is Series)
{
var proposedValues = entry.CurrentValues;
var databaseValues = await entry.GetDatabaseValuesAsync();
foreach (var property in proposedValues.Properties)
{
var proposedValue = proposedValues[property];
var databaseValue = databaseValues[property];
// TODO: decide which value should be written to database
_logger.LogDebug("Property conflict, proposed: {Proposed} vs db: {Database}", proposedValue, databaseValue);
// proposedValues[property] = <value to be saved>;
}
// Refresh original values to bypass next concurrency check
entry.OriginalValues.SetValues(databaseValues);
}
}
_logger.LogCritical(ex,
"[ScannerService] There was an issue writing to the database for series {SeriesName}",
series.Name);
await _eventHub.SendMessageAsync(MessageFactory.Error,
MessageFactory.ErrorEvent($"There was an issue writing to the DB for Series {series.OriginalName}",
ex.Message));
return;
}
catch (Exception ex) catch (Exception ex)
{ {
await _unitOfWork.RollbackAsync(); await _unitOfWork.RollbackAsync();

View File

@ -9,6 +9,8 @@ using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Helpers.Builders; using API.Helpers.Builders;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks.Scanner; namespace API.Services.Tasks.Scanner;
#nullable enable #nullable enable
@ -39,6 +41,7 @@ public interface ITagManagerService
public class TagManagerService : ITagManagerService public class TagManagerService : ITagManagerService
{ {
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<TagManagerService> _logger;
private Dictionary<string, Genre> _genres; private Dictionary<string, Genre> _genres;
private Dictionary<string, Tag> _tags; private Dictionary<string, Tag> _tags;
private Dictionary<string, Person> _people; private Dictionary<string, Person> _people;
@ -49,9 +52,10 @@ public class TagManagerService : ITagManagerService
private readonly SemaphoreSlim _personSemaphore = new SemaphoreSlim(1, 1); private readonly SemaphoreSlim _personSemaphore = new SemaphoreSlim(1, 1);
private readonly SemaphoreSlim _collectionTagSemaphore = new SemaphoreSlim(1, 1); private readonly SemaphoreSlim _collectionTagSemaphore = new SemaphoreSlim(1, 1);
public TagManagerService(IUnitOfWork unitOfWork) public TagManagerService(IUnitOfWork unitOfWork, ILogger<TagManagerService> logger)
{ {
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_logger = logger;
Reset(); Reset();
} }
@ -132,6 +136,11 @@ public class TagManagerService : ITagManagerService
_tags.Add(result.NormalizedTitle, result); _tags.Add(result.NormalizedTitle, result);
return result; return result;
} }
catch (Exception ex)
{
_logger.LogCritical(ex, "There was an exception when creating a new Tag. Scan again to get this included: {Tag}", tag);
return null;
}
finally finally
{ {
_tagSemaphore.Release(); _tagSemaphore.Release();
@ -164,6 +173,46 @@ public class TagManagerService : ITagManagerService
_people.Add(key, result); _people.Add(key, result);
return result; return result;
} }
catch (DbUpdateConcurrencyException ex)
{
foreach (var entry in ex.Entries)
{
if (entry.Entity is Person)
{
var proposedValues = entry.CurrentValues;
var databaseValues = await entry.GetDatabaseValuesAsync();
foreach (var property in proposedValues.Properties)
{
var proposedValue = proposedValues[property];
var databaseValue = databaseValues[property];
// TODO: decide which value should be written to database
_logger.LogDebug(ex, "There was an exception when creating a new Person: {PersonName} ({Role})", name, role);
_logger.LogDebug("Property conflict, proposed: {Proposed} vs db: {Database}", proposedValue, databaseValue);
// proposedValues[property] = <value to be saved>;
}
// Refresh original values to bypass next concurrency check
entry.OriginalValues.SetValues(databaseValues);
//return (Person) entry.Entity;
return null;
}
// else
// {
// throw new NotSupportedException(
// "Don't know how to handle concurrency conflicts for "
// + entry.Metadata.Name);
// }
}
return null;
}
catch (Exception ex)
{
_logger.LogCritical(ex, "There was an exception when creating a new Person. Scan again to get this included: {PersonName} ({Role})", name, role);
return null;
}
finally finally
{ {
_personSemaphore.Release(); _personSemaphore.Release();
@ -205,6 +254,11 @@ public class TagManagerService : ITagManagerService
await _unitOfWork.CommitAsync(); await _unitOfWork.CommitAsync();
_collectionTags.Add(result.NormalizedTitle, result); _collectionTags.Add(result.NormalizedTitle, result);
} }
catch (Exception ex)
{
_logger.LogCritical(ex, "There was an exception when creating a new Collection. Scan again to get this included: {Tag}", tag);
return Tuple.Create<AppUserCollection?, bool>(null, false);
}
finally finally
{ {
_collectionTagSemaphore.Release(); _collectionTagSemaphore.Release();

View File

@ -211,7 +211,7 @@ export class LibraryDetailComponent implements OnInit {
const seriesRemoved = event.payload as SeriesRemovedEvent; const seriesRemoved = event.payload as SeriesRemovedEvent;
if (seriesRemoved.libraryId !== this.libraryId) return; if (seriesRemoved.libraryId !== this.libraryId) return;
if (!this.utilityService.deepEqual(this.filter, this.filterActiveCheck)) { if (!this.utilityService.deepEqual(this.filter, this.filterActiveCheck)) {
this.loadPage(); this.loadPage(); // TODO: This can be quite expensive when bulk deleting. We can refactor this to an ReplaySubject to debounce
return; return;
} }