Scan Loop Fixes (#1459)

* Added Last Folder Scanned time to series info modal.

Tweaked the info event detail modal to have a primary and thus be auto-dismissable

* Added an error event when multiple series are found in processing a series.

* Fixed a bug where a series could get stuck with other series due to a bad select query.

Started adding the force flag hook for the UI and designing the confirm.

Confirm service now also has ability to hide the close button.

Updated error events and logging in the loop, to be more informative

* Fixed a bug where confirm service wasn't showing the proper body content.

* Hooked up force scan series

* refresh metadata now has force update

* Fixed up the messaging with the prompt on scan, hooked it up properly in the scan library to avoid the check if the whole library needs to even be scanned. Fixed a bug where NormalizedLocalizedName wasn't being calculated on new entities.

Started adding unit tests for this problematic repo method.

* Fixed a bug where we updated NormalizedLocalizedName before we set it.

* Send an info to the UI when series are spread between multiple library level folders.

* Added some logger output when there are no files found in a folder. Return early if there are no files found, so we can avoid some small loops of code.

* Fixed an issue where multiple series in a folder with localized series would cause unintended grouping. This is not supported and hence we will warn them and allow the bad grouping.

* Added a case where scan series fails due to the folder being removed. We will now log an error

* Normalize paths when finding the highest directory till root.

* Fixed an issue with Scan Series where changing a series' folder to a different path but the original series folder existed with another series in it, would cause the series to not be deleted.

* Fixed some bugs around specials causing a series merge issue on scan series.

* Removed a bug marker

* Cleaned up some of the scan loop and removed a test I don't need.

* Remove any prompts for force flow, it doesn't work well. Leave the API as is though.

* Fixed up a check for duplicate ScanLibrary calls
This commit is contained in:
Joseph Milazzo 2022-08-22 12:14:31 -05:00 committed by GitHub
parent 354be09c4c
commit 1c9544fc47
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 367 additions and 222 deletions

View File

@ -0,0 +1,156 @@
using System.Collections.Generic;
using System.Data.Common;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Entities.Enums;
using API.Helpers;
using API.Services;
using AutoMapper;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Repository;
public class SeriesRepositoryTests
{
private readonly IUnitOfWork _unitOfWork;
private readonly DbConnection _connection;
private readonly DataContext _context;
private const string CacheDirectory = "C:/kavita/config/cache/";
private const string CoverImageDirectory = "C:/kavita/config/covers/";
private const string BackupDirectory = "C:/kavita/config/backups/";
private const string DataDirectory = "C:/data/";
public SeriesRepositoryTests()
{
var contextOptions = new DbContextOptionsBuilder().UseSqlite(CreateInMemoryDatabase()).Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
var config = new MapperConfiguration(cfg => cfg.AddProfile<AutoMapperProfiles>());
var mapper = config.CreateMapper();
_unitOfWork = new UnitOfWork(_context, mapper, null);
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context,
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
var lib = new Library()
{
Name = "Manga", Folders = new List<FolderPath>() {new FolderPath() {Path = "C:/data/"}}
};
_context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
{
lib
}
});
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDb()
{
_context.Series.RemoveRange(_context.Series.ToList());
_context.AppUserRating.RemoveRange(_context.AppUserRating.ToList());
_context.Genre.RemoveRange(_context.Genre.ToList());
_context.CollectionTag.RemoveRange(_context.CollectionTag.ToList());
_context.Person.RemoveRange(_context.Person.ToList());
await _context.SaveChangesAsync();
}
private static MockFileSystem CreateFileSystem()
{
var fileSystem = new MockFileSystem();
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
fileSystem.AddDirectory("C:/kavita/config/");
fileSystem.AddDirectory(CacheDirectory);
fileSystem.AddDirectory(CoverImageDirectory);
fileSystem.AddDirectory(BackupDirectory);
fileSystem.AddDirectory(DataDirectory);
return fileSystem;
}
#endregion
private async Task SetupSeriesData()
{
var library = new Library()
{
Name = "Manga",
Type = LibraryType.Manga,
Folders = new List<FolderPath>()
{
new FolderPath() {Path = "C:/data/manga/"}
}
};
library.Series = new List<Series>()
{
DbFactory.Series("The Idaten Deities Know Only Peace", "Heion Sedai no Idaten-tachi"),
};
_unitOfWork.LibraryRepository.Add(library);
await _unitOfWork.CommitAsync();
}
[InlineData("Heion Sedai no Idaten-tachi", "", "The Idaten Deities Know Only Peace")] // Matching on localized name in DB
public async Task GetFullSeriesByAnyName_Should(string seriesName, string localizedName, string? expected)
{
var firstSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
var series =
await _unitOfWork.SeriesRepository.GetFullSeriesByAnyName(seriesName, localizedName,
1);
if (expected == null)
{
Assert.Null(series);
}
else
{
Assert.NotNull(series);
Assert.Equal(expected, series.Name);
}
}
}

View File

@ -156,96 +156,6 @@ public class ParseScannedFilesTests
#endregion
#region GetInfosByName
[Fact]
public void GetInfosByName_ShouldReturnGivenMatchingSeriesName()
{
var fileSystem = new MockFileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
var infos = new List<ParserInfo>()
{
ParserInfoFactory.CreateParsedInfo("Accel World", "1", "0", "Accel World v1.cbz", false),
ParserInfoFactory.CreateParsedInfo("Accel World", "2", "0", "Accel World v2.cbz", false)
};
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>
{
{
new ParsedSeries()
{
Format = MangaFormat.Archive,
Name = "Accel World",
NormalizedName = API.Parser.Parser.Normalize("Accel World")
},
infos
},
{
new ParsedSeries()
{
Format = MangaFormat.Pdf,
Name = "Accel World",
NormalizedName = API.Parser.Parser.Normalize("Accel World")
},
new List<ParserInfo>()
}
};
var series = DbFactory.Series("Accel World");
series.Format = MangaFormat.Pdf;
Assert.Empty(ParseScannedFiles.GetInfosByName(parsedSeries, series));
series.Format = MangaFormat.Archive;
Assert.Equal(2, ParseScannedFiles.GetInfosByName(parsedSeries, series).Count());
}
[Fact]
public void GetInfosByName_ShouldReturnGivenMatchingNormalizedSeriesName()
{
var fileSystem = new MockFileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
var infos = new List<ParserInfo>()
{
ParserInfoFactory.CreateParsedInfo("Accel World", "1", "0", "Accel World v1.cbz", false),
ParserInfoFactory.CreateParsedInfo("Accel World", "2", "0", "Accel World v2.cbz", false)
};
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>
{
{
new ParsedSeries()
{
Format = MangaFormat.Archive,
Name = "Accel World",
NormalizedName = API.Parser.Parser.Normalize("Accel World")
},
infos
},
{
new ParsedSeries()
{
Format = MangaFormat.Pdf,
Name = "Accel World",
NormalizedName = API.Parser.Parser.Normalize("Accel World")
},
new List<ParserInfo>()
}
};
var series = DbFactory.Series("accel world");
series.Format = MangaFormat.Archive;
Assert.Equal(2, ParseScannedFiles.GetInfosByName(parsedSeries, series).Count());
}
#endregion
#region MergeName
// NOTE: I don't think I can test MergeName as it relies on Tracking Files, which is more complicated than I need

View File

@ -168,17 +168,17 @@ namespace API.Controllers
[Authorize(Policy = "RequireAdminRole")]
[HttpPost("scan")]
public ActionResult Scan(int libraryId)
public ActionResult Scan(int libraryId, bool force = false)
{
_taskScheduler.ScanLibrary(libraryId);
_taskScheduler.ScanLibrary(libraryId, force);
return Ok();
}
[Authorize(Policy = "RequireAdminRole")]
[HttpPost("refresh-metadata")]
public ActionResult RefreshMetadata(int libraryId)
public ActionResult RefreshMetadata(int libraryId, bool force = true)
{
_taskScheduler.RefreshMetadata(libraryId);
_taskScheduler.RefreshMetadata(libraryId, force);
return Ok();
}

View File

@ -58,5 +58,9 @@ namespace API.DTOs
/// The highest level folder for this Series
/// </summary>
public string FolderPath { get; set; }
/// <summary>
/// The last time the folder for this series was scanned
/// </summary>
public DateTime LastFolderScanned { get; set; }
}
}

View File

@ -24,6 +24,26 @@ namespace API.Data
OriginalName = name,
LocalizedName = name,
NormalizedName = Parser.Parser.Normalize(name),
NormalizedLocalizedName = Parser.Parser.Normalize(name),
SortName = name,
Volumes = new List<Volume>(),
Metadata = SeriesMetadata(Array.Empty<CollectionTag>())
};
}
public static Series Series(string name, string localizedName)
{
if (string.IsNullOrEmpty(localizedName))
{
localizedName = name;
}
return new Series
{
Name = name,
OriginalName = name,
LocalizedName = localizedName,
NormalizedName = Parser.Parser.Normalize(name),
NormalizedLocalizedName = Parser.Parser.Normalize(localizedName),
SortName = name,
Volumes = new List<Volume>(),
Metadata = SeriesMetadata(Array.Empty<CollectionTag>())

View File

@ -1220,15 +1220,19 @@ public class SeriesRepository : ISeriesRepository
/// <returns></returns>
public Task<Series> GetFullSeriesByAnyName(string seriesName, string localizedName, int libraryId)
{
var localizedSeries = Parser.Parser.Normalize(seriesName);
var normalizedSeries = Parser.Parser.Normalize(seriesName);
var normalizedLocalized = Parser.Parser.Normalize(localizedName);
return _context.Series
.Where(s => s.NormalizedName.Equals(localizedSeries)
|| s.NormalizedName.Equals(normalizedLocalized)
|| s.NormalizedLocalizedName.Equals(localizedSeries)
|| s.NormalizedLocalizedName.Equals(normalizedLocalized))
var query = _context.Series
.Where(s => s.LibraryId == libraryId)
.Include(s => s.Metadata)
.Where(s => s.NormalizedName.Equals(normalizedSeries)
|| (s.NormalizedLocalizedName.Equals(normalizedSeries) && s.NormalizedLocalizedName != string.Empty));
if (!string.IsNullOrEmpty(normalizedLocalized))
{
query = query.Where(s =>
s.NormalizedName.Equals(normalizedLocalized) || s.NormalizedLocalizedName.Equals(normalizedLocalized));
}
return query.Include(s => s.Metadata)
.ThenInclude(m => m.People)
.Include(s => s.Metadata)
.ThenInclude(m => m.Genres)

View File

@ -492,10 +492,10 @@ namespace API.Services
{
var stopLookingForDirectories = false;
var dirs = new Dictionary<string, string>();
foreach (var folder in libraryFolders)
foreach (var folder in libraryFolders.Select(Parser.Parser.NormalizePath))
{
if (stopLookingForDirectories) break;
foreach (var file in filePaths)
foreach (var file in filePaths.Select(Parser.Parser.NormalizePath))
{
if (!file.Contains(folder)) continue;

View File

@ -19,7 +19,7 @@ public interface ITaskScheduler
Task ScheduleTasks();
Task ScheduleStatsTasks();
void ScheduleUpdaterTasks();
void ScanLibrary(int libraryId);
void ScanLibrary(int libraryId, bool force = false);
void CleanupChapters(int[] chapterIds);
void RefreshMetadata(int libraryId, bool forceUpdate = true);
void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false);
@ -174,9 +174,12 @@ public class TaskScheduler : ITaskScheduler
_scannerService.ScanLibraries();
}
public void ScanLibrary(int libraryId)
public void ScanLibrary(int libraryId, bool force = false)
{
if (HasAlreadyEnqueuedTask("ScannerService","ScanLibrary", new object[] {libraryId}, ScanQueue))
var alreadyEnqueued =
HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, true}, ScanQueue) ||
HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, false}, ScanQueue);
if (alreadyEnqueued)
{
_logger.LogInformation("A duplicate request to scan library for library occured. Skipping");
return;
@ -184,12 +187,12 @@ public class TaskScheduler : ITaskScheduler
if (RunningAnyTasksByMethod(new List<string>() {"ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"}, ScanQueue))
{
_logger.LogInformation("A Scan is already running, rescheduling ScanLibrary in 3 hours");
BackgroundJob.Schedule(() => ScanLibrary(libraryId), TimeSpan.FromHours(3));
BackgroundJob.Schedule(() => ScanLibrary(libraryId, force), TimeSpan.FromHours(3));
return;
}
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId));
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, force));
// When we do a scan, force cache to re-unpack in case page numbers change
BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory());
}
@ -201,7 +204,11 @@ public class TaskScheduler : ITaskScheduler
public void RefreshMetadata(int libraryId, bool forceUpdate = true)
{
if (HasAlreadyEnqueuedTask("MetadataService","GenerateCoversForLibrary", new object[] {libraryId, forceUpdate}))
var alreadyEnqueued = HasAlreadyEnqueuedTask("MetadataService", "GenerateCoversForLibrary",
new object[] {libraryId, true}) ||
HasAlreadyEnqueuedTask("MetadataService", "GenerateCoversForLibrary",
new object[] {libraryId, false});
if (alreadyEnqueued)
{
_logger.LogInformation("A duplicate request to refresh metadata for library occured. Skipping");
return;
@ -232,7 +239,7 @@ public class TaskScheduler : ITaskScheduler
}
if (RunningAnyTasksByMethod(new List<string>() {"ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"}, ScanQueue))
{
_logger.LogInformation("A Scan is already running, rescheduling ScanSeries in 10 mins");
_logger.LogInformation("A Scan is already running, rescheduling ScanSeries in 10 minutes");
BackgroundJob.Schedule(() => ScanSeries(libraryId, seriesId, forceUpdate), TimeSpan.FromMinutes(10));
return;
}
@ -276,7 +283,7 @@ public class TaskScheduler : ITaskScheduler
/// <param name="args">object[] of arguments in the order they are passed to enqueued job</param>
/// <param name="queue">Queue to check against. Defaults to "default"</param>
/// <returns></returns>
public static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue)
private static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue)
{
var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue);
return enqueuedJobs.Any(j => j.Value.InEnqueuedState &&

View File

@ -3,10 +3,8 @@ using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers;
using API.Parser;
using API.SignalR;
using Microsoft.Extensions.Logging;
@ -68,26 +66,6 @@ namespace API.Services.Tasks.Scanner
_eventHub = eventHub;
}
/// <summary>
/// Gets the list of all parserInfos given a Series (Will match on Name, LocalizedName, OriginalName). If the series does not exist within, return empty list.
/// </summary>
/// <param name="parsedSeries"></param>
/// <param name="series"></param>
/// <returns></returns>
public static IList<ParserInfo> GetInfosByName(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Series series)
{
var allKeys = parsedSeries.Keys.Where(ps =>
SeriesHelper.FindSeries(series, ps));
var infos = new List<ParserInfo>();
foreach (var key in allKeys)
{
infos.AddRange(parsedSeries[key]);
}
return infos;
}
/// <summary>
/// This will Scan all files in a folder path. For each folder within the folderPath, FolderAction will be invoked for all files contained
@ -192,7 +170,7 @@ namespace API.Services.Tasks.Scanner
/// </summary>
/// <param name="info"></param>
/// <returns>Series Name to group this info into</returns>
public string MergeName(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, ParserInfo info)
private string MergeName(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, ParserInfo info)
{
var normalizedSeries = Parser.Parser.Normalize(info.Series);
var normalizedLocalSeries = Parser.Parser.Normalize(info.LocalizedSeries);
@ -230,7 +208,7 @@ namespace API.Services.Tasks.Scanner
/// <summary>
/// This is a new version which will process series by folder groups.
/// This will process series by folder groups.
/// </summary>
/// <param name="libraryType"></param>
/// <param name="folders"></param>
@ -263,8 +241,16 @@ namespace API.Services.Tasks.Scanner
}
_logger.LogDebug("Found {Count} files for {Folder}", files.Count, folder);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(folderPath, libraryName, ProgressEventType.Updated));
if (files.Count == 0)
{
_logger.LogInformation("[ScannerService] {Folder} is empty", folder);
return;
}
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
var infos = files.Select(file => _readingItemService.ParseFile(file, folderPath, libraryType)).Where(info => info != null).ToList();
var infos = files
.Select(file => _readingItemService.ParseFile(file, folderPath, libraryType))
.Where(info => info != null)
.ToList();
MergeLocalizedSeriesWithSeries(infos);
@ -320,17 +306,36 @@ namespace API.Services.Tasks.Scanner
/// World of Acceleration v02.cbz having Series "Accel World" and Localized Series of "World of Acceleration"
/// </example>
/// <param name="infos">A collection of ParserInfos</param>
private static void MergeLocalizedSeriesWithSeries(IReadOnlyCollection<ParserInfo> infos)
private void MergeLocalizedSeriesWithSeries(IReadOnlyCollection<ParserInfo> infos)
{
var hasLocalizedSeries = infos.Any(i => !string.IsNullOrEmpty(i.LocalizedSeries));
if (!hasLocalizedSeries) return;
var localizedSeries = infos.Select(i => i.LocalizedSeries).Distinct()
var localizedSeries = infos
.Where(i => !i.IsSpecial)
.Select(i => i.LocalizedSeries)
.Distinct()
.FirstOrDefault(i => !string.IsNullOrEmpty(i));
if (string.IsNullOrEmpty(localizedSeries)) return;
var nonLocalizedSeries = infos.Select(i => i.Series).Distinct()
.FirstOrDefault(series => !series.Equals(localizedSeries));
// NOTE: If we have multiple series in a folder with a localized title, then this will fail. It will group into one series. User needs to fix this themselves.
string nonLocalizedSeries;
var nonLocalizedSeriesFound = infos.Where(i => !i.IsSpecial).Select(i => i.Series).Distinct().ToList();
if (nonLocalizedSeriesFound.Count == 1)
{
nonLocalizedSeries = nonLocalizedSeriesFound.First();
}
else
{
// There can be a case where there are multiple series in a folder that causes merging.
if (nonLocalizedSeriesFound.Count > 2)
{
_logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder: {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound));
}
nonLocalizedSeries = nonLocalizedSeriesFound.FirstOrDefault(s => !s.Equals(localizedSeries));
}
if (string.IsNullOrEmpty(nonLocalizedSeries)) return;
var normalizedNonLocalizedSeries = Parser.Parser.Normalize(nonLocalizedSeries);
foreach (var infoNeedingMapping in infos.Where(i =>

View File

@ -88,7 +88,7 @@ public class ProcessSeries : IProcessSeries
// Check if there is a Series
var firstInfo = parsedInfos.First();
Series series = null;
Series series;
try
{
series =
@ -97,29 +97,29 @@ public class ProcessSeries : IProcessSeries
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception finding existing series for {SeriesName} with Localized name of {LocalizedName}. This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan", firstInfo.Series, firstInfo.LocalizedSeries);
_logger.LogError(ex, "There was an exception finding existing series for {SeriesName} with Localized name of {LocalizedName} for library {LibraryId}. This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan", firstInfo.Series, firstInfo.LocalizedSeries, library.Id);
await _eventHub.SendMessageAsync(MessageFactory.Error,
MessageFactory.ErrorEvent($"There was an exception finding existing series for {firstInfo.Series} with Localized name of {firstInfo.LocalizedSeries} for library {library.Id}",
"This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan."));
return;
}
if (series == null)
{
seriesAdded = true;
series = DbFactory.Series(firstInfo.Series);
series.LocalizedName = firstInfo.LocalizedSeries;
series = DbFactory.Series(firstInfo.Series, firstInfo.LocalizedSeries);
}
if (series.LibraryId == 0) series.LibraryId = library.Id;
try
{
_logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
UpdateVolumes(series, parsedInfos);
series.Pages = series.Volumes.Sum(v => v.Pages);
series.NormalizedName = Parser.Parser.Normalize(series.Name);
series.NormalizedLocalizedName = Parser.Parser.Normalize(series.LocalizedName);
series.OriginalName ??= parsedInfos[0].Series;
if (series.Format == MangaFormat.Unknown)
{
@ -144,13 +144,17 @@ public class ProcessSeries : IProcessSeries
if (!series.LocalizedNameLocked && !string.IsNullOrEmpty(localizedSeries))
{
series.LocalizedName = localizedSeries;
series.NormalizedLocalizedName = Parser.Parser.Normalize(series.LocalizedName);
}
// Update series FolderPath here (TODO: Move this into it's own private method)
var seriesDirs = _directoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(l => l.Path), parsedInfos.Select(f => f.FullFilePath).ToList());
if (seriesDirs.Keys.Count == 0)
{
_logger.LogCritical("Scan Series has files spread outside a main series folder. This has negative performance effects. Please ensure all series are in a folder");
_logger.LogCritical("Scan Series has files spread outside a main series folder. This has negative performance effects. Please ensure all series are under a single folder from library");
await _eventHub.SendMessageAsync(MessageFactory.Info,
MessageFactory.InfoEvent($"{series.Name} has files spread outside a single series folder",
"This has negative performance effects. Please ensure all series are under a single folder from library"));
}
else
{

View File

@ -29,7 +29,7 @@ public interface IScannerService
[Queue(TaskScheduler.ScanQueue)]
[DisableConcurrentExecution(60 * 60 * 60)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
Task ScanLibrary(int libraryId);
Task ScanLibrary(int libraryId, bool forceUpdate = false);
[Queue(TaskScheduler.ScanQueue)]
[DisableConcurrentExecution(60 * 60 * 60)]
@ -62,6 +62,10 @@ public enum ScanCancelReason
/// There has been no change to the filesystem since last scan
/// </summary>
NoChange = 2,
/// <summary>
/// The underlying folder is missing
/// </summary>
FolderMissing = 3
}
/**
@ -117,10 +121,15 @@ public class ScannerService : IScannerService
var library = libraries.FirstOrDefault(l => l.Folders.Select(Parser.Parser.NormalizePath).Contains(libraryFolder));
if (library != null)
{
BackgroundJob.Enqueue(() => ScanLibrary(library.Id));
BackgroundJob.Enqueue(() => ScanLibrary(library.Id, false));
}
}
/// <summary>
///
/// </summary>
/// <param name="seriesId"></param>
/// <param name="bypassFolderOptimizationChecks">Not Used. Scan series will always force</param>
[Queue(TaskScheduler.ScanQueue)]
public async Task ScanSeries(int seriesId, bool bypassFolderOptimizationChecks = true)
{
@ -130,12 +139,7 @@ public class ScannerService : IScannerService
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId});
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId, LibraryIncludes.Folders);
var libraryPaths = library.Folders.Select(f => f.Path).ToList();
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, bypassFolderOptimizationChecks) != ScanCancelReason.NoCancel) return;
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
var seenSeries = new List<ParsedSeries>();
var processTasks = new List<Task>();
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, true) != ScanCancelReason.NoCancel) return;
var folderPath = series.FolderPath;
if (string.IsNullOrEmpty(folderPath) || !_directoryService.Exists(folderPath))
@ -150,22 +154,32 @@ public class ScannerService : IScannerService
}
folderPath = seriesDirs.Keys.FirstOrDefault();
// We should check if folderPath is a library folder path and if so, return early and tell user to correct their setup.
if (libraryPaths.Contains(folderPath))
{
_logger.LogCritical("[ScannerSeries] {SeriesName} scan aborted. Files for series are not in a nested folder under library path. Correct this and rescan", series.Name);
await _eventHub.SendMessageAsync(MessageFactory.Error, MessageFactory.ErrorEvent($"{series.Name} scan aborted", "Files for series are not in a nested folder under library path. Correct this and rescan."));
return;
}
}
if (string.IsNullOrEmpty(folderPath))
{
_logger.LogCritical("Scan Series could not find a single, valid folder root for files");
_logger.LogCritical("[ScannerSeries] Scan Series could not find a single, valid folder root for files");
await _eventHub.SendMessageAsync(MessageFactory.Error, MessageFactory.ErrorEvent($"{series.Name} scan aborted", "Scan Series could not find a single, valid folder root for files"));
return;
}
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
var processTasks = new List<Task>();
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Started, series.Name));
await _processSeries.Prime();
void TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
{
var skippedScan = parsedInfo.Item1;
var parsedFiles = parsedInfo.Item2;
if (parsedFiles.Count == 0) return;
@ -176,44 +190,21 @@ public class ScannerService : IScannerService
Format = parsedFiles.First().Format
};
if (skippedScan)
if (!foundParsedSeries.NormalizedName.Equals(series.NormalizedName))
{
seenSeries.AddRange(parsedFiles.Select(pf => new ParsedSeries()
{
Name = pf.Series,
NormalizedName = Parser.Parser.Normalize(pf.Series),
Format = pf.Format
}));
return;
}
seenSeries.Add(foundParsedSeries);
processTasks.Add(_processSeries.ProcessSeriesAsync(parsedFiles, library));
parsedSeries.Add(foundParsedSeries, parsedFiles);
}
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
var scanElapsedTime = await ScanFiles(library, new []{folderPath}, false, TrackFiles, bypassFolderOptimizationChecks);
var scanElapsedTime = await ScanFiles(library, new []{folderPath}, false, TrackFiles, true);
_logger.LogInformation("ScanFiles for {Series} took {Time}", series.Name, scanElapsedTime);
await Task.WhenAll(processTasks);
// At this point, we've already inserted the series into the DB OR we haven't and seenSeries has our series
// We now need to do any leftover work, like removing
// We need to handle if parsedSeries is empty but seenSeries has our series
if (seenSeries.Any(s => s.NormalizedName.Equals(series.NormalizedName)) && parsedSeries.Keys.Count == 0)
{
// Nothing has changed
_logger.LogInformation("[ScannerService] {SeriesName} scan has no work to do. All folders have not been changed since last scan", series.Name);
await _eventHub.SendMessageAsync(MessageFactory.Info,
MessageFactory.InfoEvent($"{series.Name} scan has no work to do",
"All folders have not been changed since last scan. Scan will be aborted."));
_processSeries.EnqueuePostSeriesProcessTasks(series.LibraryId, seriesId, false);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name));
return;
}
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name));
// Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder
@ -222,8 +213,8 @@ public class ScannerService : IScannerService
// If nothing was found, first validate any of the files still exist. If they don't then we have a deletion and can skip the rest of the logic flow
if (parsedSeries.Count == 0)
{
var anyFilesExist =
(await _unitOfWork.SeriesRepository.GetFilesForSeries(series.Id)).Any(m => File.Exists(m.FilePath));
var seriesFiles = (await _unitOfWork.SeriesRepository.GetFilesForSeries(series.Id));
var anyFilesExist = seriesFiles.Where(f => f.FilePath.Contains(series.FolderPath)).Any(m => File.Exists(m.FilePath));
if (!anyFilesExist)
{
@ -287,23 +278,36 @@ public class ScannerService : IScannerService
}
// If all series Folder paths haven't been modified since last scan, abort
// NOTE: On windows, the parent folder will not update LastWriteTime if a subfolder was updated with files. Need to do a bit of light I/O.
if (!bypassFolderChecks)
{
var allFolders = seriesFolderPaths.SelectMany(path => _directoryService.GetDirectories(path)).ToList();
allFolders.AddRange(seriesFolderPaths);
try
{
if (allFolders.All(folder => _directoryService.GetLastWriteTime(folder) <= series.LastFolderScanned))
{
_logger.LogInformation(
"[ScannerService] {SeriesName} scan has no work to do. All folders have not been changed since last scan",
series.Name);
await _eventHub.SendMessageAsync(MessageFactory.Info,
MessageFactory.InfoEvent($"{series.Name} scan has no work to do", "All folders have not been changed since last scan. Scan will be aborted."));
MessageFactory.InfoEvent($"{series.Name} scan has no work to do",
"All folders have not been changed since last scan. Scan will be aborted."));
return ScanCancelReason.NoChange;
}
}
catch (IOException ex)
{
// If there is an exception it means that the folder doesn't exist. So we should delete the series
_logger.LogError(ex, "[ScannerService] Scan series for {SeriesName} found the folder path no longer exists",
series.Name);
await _eventHub.SendMessageAsync(MessageFactory.Info,
MessageFactory.ErrorEvent($"{series.Name} scan has no work to do",
"The folder the series is in is missing. Delete series manually or perform a library scan."));
return ScanCancelReason.NoCancel;
}
}
return ScanCancelReason.NoCancel;
@ -393,7 +397,7 @@ public class ScannerService : IScannerService
[Queue(TaskScheduler.ScanQueue)]
[DisableConcurrentExecution(60 * 60 * 60)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public async Task ScanLibrary(int libraryId)
public async Task ScanLibrary(int libraryId, bool forceUpdate = false)
{
var sw = Stopwatch.StartNew();
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders);
@ -405,7 +409,7 @@ public class ScannerService : IScannerService
var wasLibraryUpdatedSinceLastScan = (library.LastModified.Truncate(TimeSpan.TicksPerMinute) >
library.LastScanned.Truncate(TimeSpan.TicksPerMinute))
&& library.LastScanned != DateTime.MinValue;
if (!wasLibraryUpdatedSinceLastScan)
if (!forceUpdate && !wasLibraryUpdatedSinceLastScan)
{
var haveFoldersChangedSinceLastScan = library.Folders
.All(f => _directoryService.GetLastWriteTime(f.Path).Truncate(TimeSpan.TicksPerMinute) > f.LastScanned.Truncate(TimeSpan.TicksPerMinute));

View File

@ -48,6 +48,10 @@ export interface Series {
* DateTime representing last time a chapter was added to the Series
*/
lastChapterAdded: string;
/**
* DateTime representing last time the series folder was scanned
*/
lastFolderScanned: string;
/**
* Number of words in the series
*/

View File

@ -18,9 +18,9 @@ export enum Action {
*/
MarkAsUnread = 1,
/**
* Invoke a Scan Library
* Invoke a Scan on Series/Library
*/
ScanLibrary = 2,
Scan = 2,
/**
* Delete the entity
*/
@ -129,7 +129,7 @@ export class ActionFactoryService {
});
this.seriesActions.push({
action: Action.ScanLibrary,
action: Action.Scan,
title: 'Scan Series',
callback: this.dummyCallback,
requiresAdmin: true
@ -171,7 +171,7 @@ export class ActionFactoryService {
});
this.libraryActions.push({
action: Action.ScanLibrary,
action: Action.Scan,
title: 'Scan Library',
callback: this.dummyCallback,
requiresAdmin: true

View File

@ -52,11 +52,15 @@ export class ActionService implements OnDestroy {
* @param callback Optional callback to perform actions after API completes
* @returns
*/
scanLibrary(library: Partial<Library>, callback?: LibraryActionCallback) {
async scanLibrary(library: Partial<Library>, callback?: LibraryActionCallback) {
if (!library.hasOwnProperty('id') || library.id === undefined) {
return;
}
this.libraryService.scan(library?.id).pipe(take(1)).subscribe((res: any) => {
// Prompt user if we should do a force or not
const force = false; // await this.promptIfForce();
this.libraryService.scan(library.id, force).pipe(take(1)).subscribe((res: any) => {
this.toastr.info('Scan queued for ' + library.name);
if (callback) {
callback(library);
@ -83,7 +87,9 @@ export class ActionService implements OnDestroy {
return;
}
this.libraryService.refreshMetadata(library?.id).pipe(take(1)).subscribe((res: any) => {
const forceUpdate = true; //await this.promptIfForce();
this.libraryService.refreshMetadata(library?.id, forceUpdate).pipe(take(1)).subscribe((res: any) => {
this.toastr.info('Scan queued for ' + library.name);
if (callback) {
callback(library);
@ -152,7 +158,7 @@ export class ActionService implements OnDestroy {
* @param series Series, must have libraryId and name populated
* @param callback Optional callback to perform actions after API completes
*/
scanSeries(series: Series, callback?: SeriesActionCallback) {
async scanSeries(series: Series, callback?: SeriesActionCallback) {
this.seriesService.scan(series.libraryId, series.id).pipe(take(1)).subscribe((res: any) => {
this.toastr.info('Scan queued for ' + series.name);
if (callback) {
@ -545,4 +551,16 @@ export class ActionService implements OnDestroy {
}
});
}
private async promptIfForce(extraContent: string = '') {
// Prompt user if we should do a force or not
const config = this.confirmService.defaultConfirm;
config.header = 'Force Scan';
config.buttons = [
{text: 'Yes', type: 'secondary'},
{text: 'No', type: 'primary'},
];
const msg = 'Do you want to force this scan? This is will ignore optimizations that reduce processing and I/O. ' + extraContent;
return !await this.confirmService.confirm(msg, config); // Not because primary is the false state
}
}

View File

@ -76,16 +76,16 @@ export class LibraryService {
return this.httpClient.post(this.baseUrl + 'library/grant-access', {username, selectedLibraries});
}
scan(libraryId: number) {
return this.httpClient.post(this.baseUrl + 'library/scan?libraryId=' + libraryId, {});
scan(libraryId: number, force = false) {
return this.httpClient.post(this.baseUrl + 'library/scan?libraryId=' + libraryId + '&force=' + force, {});
}
analyze(libraryId: number) {
return this.httpClient.post(this.baseUrl + 'library/analyze?libraryId=' + libraryId, {});
}
refreshMetadata(libraryId: number) {
return this.httpClient.post(this.baseUrl + 'library/refresh-metadata?libraryId=' + libraryId, {});
refreshMetadata(libraryId: number, forceUpdate = false) {
return this.httpClient.post(this.baseUrl + 'library/refresh-metadata?libraryId=' + libraryId + '&force=' + forceUpdate, {});
}
create(model: {name: string, type: number, folders: string[]}) {

View File

@ -153,8 +153,8 @@ export class SeriesService {
return this.httpClient.post(this.baseUrl + 'series/refresh-metadata', {libraryId: series.libraryId, seriesId: series.id});
}
scan(libraryId: number, seriesId: number) {
return this.httpClient.post(this.baseUrl + 'series/scan', {libraryId: libraryId, seriesId: seriesId});
scan(libraryId: number, seriesId: number, force = false) {
return this.httpClient.post(this.baseUrl + 'series/scan', {libraryId: libraryId, seriesId: seriesId, forceUpdate: force});
}
analyzeFiles(libraryId: number, seriesId: number) {

View File

@ -346,7 +346,8 @@
<div class="row g-0 mb-2">
<div class="col-md-6">Created: {{series.created | date:'shortDate'}}</div>
<div class="col-md-6">Last Read: {{series.latestReadDate | date:'shortDate' | defaultDate}}</div>
<div class="col-md-6">Last Added To: {{series.lastChapterAdded | date:'shortDate' | defaultDate}}</div>
<div class="col-md-6">Last Added To: {{series.lastChapterAdded | date:'short' | defaultDate}}</div>
<div class="col-md-6">Last Scanned: {{series.lastFolderScanned | date:'short' | defaultDate}}</div>
<div class="col-md-6">Folder Path: {{series.folderPath | defaultValue}}</div>
</div>
<div class="row g-0 mb-2" *ngIf="metadata">

View File

@ -7,5 +7,5 @@
<button ngbDropdownItem *ngFor="let action of adminActions" (click)="performAction($event, action)">{{action.title}}</button>
</div>
</div>
<!-- TODO: If we are not on desktop, then let's open a bottom drawer instead-->
<!-- IDEA: If we are not on desktop, then let's open a bottom drawer instead-->
</ng-container>

View File

@ -82,7 +82,7 @@ export class SeriesCardComponent implements OnInit, OnChanges, OnDestroy {
case(Action.MarkAsUnread):
this.markAsUnread(series);
break;
case(Action.ScanLibrary):
case(Action.Scan):
this.scanLibrary(series);
break;
case(Action.RefreshMetadata):

View File

@ -203,7 +203,7 @@ export class LibraryDetailComponent implements OnInit, OnDestroy {
lib = {id: this.libraryId, name: this.libraryName};
}
switch (action) {
case(Action.ScanLibrary):
case(Action.Scan):
this.actionService.scanLibrary(lib);
break;
case(Action.RefreshMetadata):

View File

@ -152,11 +152,15 @@ export class EventsWidgetComponent implements OnInit, OnDestroy {
async seeMore(event: ErrorEvent | InfoEvent) {
const config = new ConfirmConfig();
if (event.name === EVENTS.Error) {
config.buttons = [
{text: 'Ok', type: 'secondary'},
{text: 'Dismiss', type: 'primary'}
];
} else {
config.buttons = [
{text: 'Ok', type: 'primary'},
];
if (event.name === EVENTS.Error) {
config.buttons = [{text: 'Dismiss', type: 'primary'}, ...config.buttons];
}
config.header = event.title;
config.content = event.subTitle;

View File

@ -345,7 +345,7 @@ export class SeriesDetailComponent implements OnInit, OnDestroy, AfterContentChe
this.loadSeries(series.id);
});
break;
case(Action.ScanLibrary):
case(Action.Scan):
this.actionService.scanSeries(series, () => {
this.actionInProgress = false;
this.changeDetectionRef.markForCheck();

View File

@ -3,5 +3,5 @@ export interface ConfirmButton {
/**
* Type for css class. ie) primary, secondary
*/
type: string;
type: 'secondary' | 'primary';
}

View File

@ -5,4 +5,8 @@ export class ConfirmConfig {
header: string = 'Confirm';
content: string = '';
buttons: Array<ConfirmButton> = [];
/**
* If the close button shouldn't be rendered
*/
disableEscape: boolean = false;
}

View File

@ -2,9 +2,7 @@
<div class="modal-header">
<h4 class="modal-title" id="modal-basic-title">{{config.header}}</h4>
<button type="button" class="btn-close" aria-label="Close" (click)="close()">
</button>
<button type="button" class="btn-close" aria-label="Close" (click)="close()" *ngIf="!config.disableEscape"></button>
</div>
<div class="modal-body" style="overflow-x: auto" [innerHtml]="config.content | safeHtml">
</div>
@ -12,5 +10,4 @@
<div *ngFor="let btn of config.buttons">
<button type="button" class="btn btn-{{btn.type}}" (click)="clickButton(btn)">{{btn.text}}</button>
</div>
</div>

View File

@ -34,6 +34,9 @@ export class ConfirmService {
config = this.defaultConfirm;
config.content = content;
}
if (content !== undefined && content !== '' && config!.content === '') {
config!.content = content;
}
const modalRef = this.modalService.open(ConfirmDialogComponent);
modalRef.componentInstance.config = config;

View File

@ -78,7 +78,7 @@ export class SideNavComponent implements OnInit, OnDestroy {
handleAction(action: Action, library: Library) {
switch (action) {
case(Action.ScanLibrary):
case(Action.Scan):
this.actionService.scanLibrary(library);
break;
case(Action.RefreshMetadata):