mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-07-31 14:33:50 -04:00
Scanner Logging (#2997)
This commit is contained in:
parent
094e775f0b
commit
a6a60c4221
@ -137,10 +137,24 @@ public class ParseScannedFiles
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent(directory, library.Name, ProgressEventType.Updated));
|
||||
|
||||
// This is debug code to help understand why some installs aren't working correctly
|
||||
if (!forceCheck && seriesPaths.TryGetValue(directory, out var series2) && series2.Count > 1 && series2.All(s => !string.IsNullOrEmpty(s.LowestFolderPath)))
|
||||
{
|
||||
_logger.LogDebug("[ProcessFiles] Dirty check passed, series list: {@SeriesModified}", series2);
|
||||
foreach (var s in series2)
|
||||
{
|
||||
_logger.LogDebug("[ProcessFiles] Last Scanned: {LastScanned} vs Directory Check: {DirectoryLastScanned}", s.LastScanned, _directoryService
|
||||
.GetLastWriteTime(s.LowestFolderPath!)
|
||||
.Truncate(TimeSpan.TicksPerSecond));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, directory, forceCheck))
|
||||
{
|
||||
if (result.Exists(r => r.Folder == directory))
|
||||
{
|
||||
_logger.LogDebug("[ProcessFiles] Skipping adding {Directory} as it's already added", directory);
|
||||
continue;
|
||||
}
|
||||
result.Add(CreateScanResult(directory, folderPath, false, ArraySegment<string>.Empty));
|
||||
@ -152,9 +166,7 @@ public class ParseScannedFiles
|
||||
_logger.LogDebug("[ProcessFiles] {Directory} is dirty and has multiple series folders, checking if we can avoid a full scan", directory);
|
||||
foreach (var seriesModified in series)
|
||||
{
|
||||
|
||||
// TODO: We can check directly against seriesModified.LastScanned instead of library scan
|
||||
var hasFolderChangedSinceLastScan = forceCheck || library.LastScanned.Truncate(TimeSpan.TicksPerSecond) <
|
||||
var hasFolderChangedSinceLastScan = seriesModified.LastScanned.Truncate(TimeSpan.TicksPerSecond) <
|
||||
_directoryService
|
||||
.GetLastWriteTime(seriesModified.LowestFolderPath!)
|
||||
.Truncate(TimeSpan.TicksPerSecond);
|
||||
@ -226,7 +238,7 @@ public class ParseScannedFiles
|
||||
return new ScanResult()
|
||||
{
|
||||
Files = files,
|
||||
Folder = folderPath,
|
||||
Folder = Parser.Parser.NormalizePath(folderPath),
|
||||
LibraryRoot = libraryRoot,
|
||||
HasChanged = hasChanged
|
||||
};
|
||||
|
@ -18,6 +18,7 @@ using API.Services.Tasks.Scanner.Parser;
|
||||
using API.SignalR;
|
||||
using Hangfire;
|
||||
using Kavita.Common;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Services.Tasks.Scanner;
|
||||
@ -189,6 +190,39 @@ public class ProcessSeries : IProcessSeries
|
||||
{
|
||||
await _unitOfWork.CommitAsync();
|
||||
}
|
||||
catch (DbUpdateConcurrencyException ex)
|
||||
{
|
||||
foreach (var entry in ex.Entries)
|
||||
{
|
||||
if (entry.Entity is Series)
|
||||
{
|
||||
var proposedValues = entry.CurrentValues;
|
||||
var databaseValues = await entry.GetDatabaseValuesAsync();
|
||||
|
||||
foreach (var property in proposedValues.Properties)
|
||||
{
|
||||
var proposedValue = proposedValues[property];
|
||||
var databaseValue = databaseValues[property];
|
||||
|
||||
// TODO: decide which value should be written to database
|
||||
_logger.LogDebug("Property conflict, proposed: {Proposed} vs db: {Database}", proposedValue, databaseValue);
|
||||
// proposedValues[property] = <value to be saved>;
|
||||
}
|
||||
|
||||
// Refresh original values to bypass next concurrency check
|
||||
entry.OriginalValues.SetValues(databaseValues);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
_logger.LogCritical(ex,
|
||||
"[ScannerService] There was an issue writing to the database for series {SeriesName}",
|
||||
series.Name);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Error,
|
||||
MessageFactory.ErrorEvent($"There was an issue writing to the DB for Series {series.OriginalName}",
|
||||
ex.Message));
|
||||
return;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
await _unitOfWork.RollbackAsync();
|
||||
|
@ -9,6 +9,8 @@ using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Services.Tasks.Scanner;
|
||||
#nullable enable
|
||||
@ -39,6 +41,7 @@ public interface ITagManagerService
|
||||
public class TagManagerService : ITagManagerService
|
||||
{
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly ILogger<TagManagerService> _logger;
|
||||
private Dictionary<string, Genre> _genres;
|
||||
private Dictionary<string, Tag> _tags;
|
||||
private Dictionary<string, Person> _people;
|
||||
@ -49,9 +52,10 @@ public class TagManagerService : ITagManagerService
|
||||
private readonly SemaphoreSlim _personSemaphore = new SemaphoreSlim(1, 1);
|
||||
private readonly SemaphoreSlim _collectionTagSemaphore = new SemaphoreSlim(1, 1);
|
||||
|
||||
public TagManagerService(IUnitOfWork unitOfWork)
|
||||
public TagManagerService(IUnitOfWork unitOfWork, ILogger<TagManagerService> logger)
|
||||
{
|
||||
_unitOfWork = unitOfWork;
|
||||
_logger = logger;
|
||||
Reset();
|
||||
|
||||
}
|
||||
@ -132,6 +136,11 @@ public class TagManagerService : ITagManagerService
|
||||
_tags.Add(result.NormalizedTitle, result);
|
||||
return result;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogCritical(ex, "There was an exception when creating a new Tag. Scan again to get this included: {Tag}", tag);
|
||||
return null;
|
||||
}
|
||||
finally
|
||||
{
|
||||
_tagSemaphore.Release();
|
||||
@ -164,6 +173,46 @@ public class TagManagerService : ITagManagerService
|
||||
_people.Add(key, result);
|
||||
return result;
|
||||
}
|
||||
catch (DbUpdateConcurrencyException ex)
|
||||
{
|
||||
foreach (var entry in ex.Entries)
|
||||
{
|
||||
if (entry.Entity is Person)
|
||||
{
|
||||
var proposedValues = entry.CurrentValues;
|
||||
var databaseValues = await entry.GetDatabaseValuesAsync();
|
||||
|
||||
foreach (var property in proposedValues.Properties)
|
||||
{
|
||||
var proposedValue = proposedValues[property];
|
||||
var databaseValue = databaseValues[property];
|
||||
|
||||
// TODO: decide which value should be written to database
|
||||
_logger.LogDebug(ex, "There was an exception when creating a new Person: {PersonName} ({Role})", name, role);
|
||||
_logger.LogDebug("Property conflict, proposed: {Proposed} vs db: {Database}", proposedValue, databaseValue);
|
||||
// proposedValues[property] = <value to be saved>;
|
||||
}
|
||||
|
||||
// Refresh original values to bypass next concurrency check
|
||||
entry.OriginalValues.SetValues(databaseValues);
|
||||
//return (Person) entry.Entity;
|
||||
return null;
|
||||
}
|
||||
// else
|
||||
// {
|
||||
// throw new NotSupportedException(
|
||||
// "Don't know how to handle concurrency conflicts for "
|
||||
// + entry.Metadata.Name);
|
||||
// }
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogCritical(ex, "There was an exception when creating a new Person. Scan again to get this included: {PersonName} ({Role})", name, role);
|
||||
return null;
|
||||
}
|
||||
finally
|
||||
{
|
||||
_personSemaphore.Release();
|
||||
@ -205,6 +254,11 @@ public class TagManagerService : ITagManagerService
|
||||
await _unitOfWork.CommitAsync();
|
||||
_collectionTags.Add(result.NormalizedTitle, result);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogCritical(ex, "There was an exception when creating a new Collection. Scan again to get this included: {Tag}", tag);
|
||||
return Tuple.Create<AppUserCollection?, bool>(null, false);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_collectionTagSemaphore.Release();
|
||||
|
@ -211,7 +211,7 @@ export class LibraryDetailComponent implements OnInit {
|
||||
const seriesRemoved = event.payload as SeriesRemovedEvent;
|
||||
if (seriesRemoved.libraryId !== this.libraryId) return;
|
||||
if (!this.utilityService.deepEqual(this.filter, this.filterActiveCheck)) {
|
||||
this.loadPage();
|
||||
this.loadPage(); // TODO: This can be quite expensive when bulk deleting. We can refactor this to an ReplaySubject to debounce
|
||||
return;
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user