mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-07-09 03:04:19 -04:00
More Scan Loop Bugfixes (#1471)
* Updated scan time for watcher to 30 seconds for non-dev. Moved ScanFolder off the Scan queue as it doesn't need to be there. Updated loggers * Fixed jumpbar missing * Tweaked the messaging for CoverGen * When we return early due to nothing being done on library and series scan, make sure we kick off other tasks that need to occur. * Fixed a foreign constraint issue on Volumes when we were adding to a new series. * Fixed a case where when picking normalized series, capitalization differences wouldn't stack when they should. * Reduced the logging output on dev and prod settings. * Fixed a bug in the code that finds the highest directory from a file, where we were not checking against a normalized path. * Cleaned up some code * Fixed broken unit tests
This commit is contained in:
parent
fc0121e7a8
commit
1e535a8184
1
.gitignore
vendored
1
.gitignore
vendored
@ -485,7 +485,6 @@ Thumbs.db
|
|||||||
ssl/
|
ssl/
|
||||||
|
|
||||||
# App specific
|
# App specific
|
||||||
appsettings.json
|
|
||||||
/API/kavita.db
|
/API/kavita.db
|
||||||
/API/kavita.db-shm
|
/API/kavita.db-shm
|
||||||
/API/kavita.db-wal
|
/API/kavita.db-wal
|
||||||
|
@ -678,6 +678,7 @@ namespace API.Tests.Services
|
|||||||
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/Dir 2/"}, new [] {"C:/Manga/Dir 1/Love Hina/Vol. 01.cbz"}, "C:/Manga/Dir 1/Love Hina")]
|
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/Dir 2/"}, new [] {"C:/Manga/Dir 1/Love Hina/Vol. 01.cbz"}, "C:/Manga/Dir 1/Love Hina")]
|
||||||
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/"}, new [] {"D:/Manga/Love Hina/Vol. 01.cbz", "D:/Manga/Vol. 01.cbz"}, "")]
|
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/"}, new [] {"D:/Manga/Love Hina/Vol. 01.cbz", "D:/Manga/Vol. 01.cbz"}, "")]
|
||||||
[InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga//Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")]
|
[InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga//Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")]
|
||||||
|
[InlineData(new [] {@"C:\mount\drive\Library\Test Library\Comics\"}, new [] {@"C:\mount\drive\Library\Test Library\Comics\Bruce Lee (1994)\Bruce Lee #001 (1994).cbz"}, @"C:/mount/drive/Library/Test Library/Comics/Bruce Lee (1994)")]
|
||||||
public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory)
|
public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory)
|
||||||
{
|
{
|
||||||
var fileSystem = new MockFileSystem();
|
var fileSystem = new MockFileSystem();
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
using System.Threading.Tasks;
|
using System;
|
||||||
|
using System.Threading.Tasks;
|
||||||
using API.Data.Repositories;
|
using API.Data.Repositories;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using AutoMapper;
|
using AutoMapper;
|
||||||
@ -26,7 +27,6 @@ public interface IUnitOfWork
|
|||||||
bool Commit();
|
bool Commit();
|
||||||
Task<bool> CommitAsync();
|
Task<bool> CommitAsync();
|
||||||
bool HasChanges();
|
bool HasChanges();
|
||||||
bool Rollback();
|
|
||||||
Task<bool> RollbackAsync();
|
Task<bool> RollbackAsync();
|
||||||
}
|
}
|
||||||
public class UnitOfWork : IUnitOfWork
|
public class UnitOfWork : IUnitOfWork
|
||||||
@ -93,16 +93,15 @@ public class UnitOfWork : IUnitOfWork
|
|||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
public async Task<bool> RollbackAsync()
|
public async Task<bool> RollbackAsync()
|
||||||
{
|
{
|
||||||
await _context.DisposeAsync();
|
try
|
||||||
return true;
|
{
|
||||||
}
|
await _context.Database.RollbackTransactionAsync();
|
||||||
/// <summary>
|
}
|
||||||
/// Rollback transaction
|
catch (Exception)
|
||||||
/// </summary>
|
{
|
||||||
/// <returns></returns>
|
// Swallow exception (this might be used in places where a transaction isn't setup)
|
||||||
public bool Rollback()
|
}
|
||||||
{
|
|
||||||
_context.Dispose();
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -508,10 +508,10 @@ namespace API.Services
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
var fullPath = Path.Join(folder, parts.Last());
|
var fullPath = Parser.Parser.NormalizePath(Path.Join(folder, parts.Last()));
|
||||||
if (!dirs.ContainsKey(fullPath))
|
if (!dirs.ContainsKey(fullPath))
|
||||||
{
|
{
|
||||||
dirs.Add(Parser.Parser.NormalizePath(fullPath), string.Empty);
|
dirs.Add(fullPath, string.Empty);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -161,7 +161,7 @@ public class MetadataService : IMetadataService
|
|||||||
/// <param name="forceUpdate"></param>
|
/// <param name="forceUpdate"></param>
|
||||||
private async Task ProcessSeriesCoverGen(Series series, bool forceUpdate)
|
private async Task ProcessSeriesCoverGen(Series series, bool forceUpdate)
|
||||||
{
|
{
|
||||||
_logger.LogDebug("[MetadataService] Processing series {SeriesName}", series.OriginalName);
|
_logger.LogDebug("[MetadataService] Generating cover images for series: {SeriesName}", series.OriginalName);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var volumeIndex = 0;
|
var volumeIndex = 0;
|
||||||
|
@ -78,7 +78,7 @@ public class LibraryWatcher : ILibraryWatcher
|
|||||||
_logger = logger;
|
_logger = logger;
|
||||||
_scannerService = scannerService;
|
_scannerService = scannerService;
|
||||||
|
|
||||||
_queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(10) : TimeSpan.FromMinutes(5);
|
_queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(10) : TimeSpan.FromSeconds(30);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -142,18 +142,18 @@ public class LibraryWatcher : ILibraryWatcher
|
|||||||
private void OnChanged(object sender, FileSystemEventArgs e)
|
private void OnChanged(object sender, FileSystemEventArgs e)
|
||||||
{
|
{
|
||||||
if (e.ChangeType != WatcherChangeTypes.Changed) return;
|
if (e.ChangeType != WatcherChangeTypes.Changed) return;
|
||||||
Console.WriteLine($"Changed: {e.FullPath}, {e.Name}");
|
_logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}", e.FullPath, e.Name);
|
||||||
ProcessChange(e.FullPath);
|
ProcessChange(e.FullPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void OnCreated(object sender, FileSystemEventArgs e)
|
private void OnCreated(object sender, FileSystemEventArgs e)
|
||||||
{
|
{
|
||||||
Console.WriteLine($"Created: {e.FullPath}, {e.Name}");
|
_logger.LogDebug("[LibraryWatcher] Created: {FullPath}, {Name}", e.FullPath, e.Name);
|
||||||
ProcessChange(e.FullPath, !_directoryService.FileSystem.File.Exists(e.Name));
|
ProcessChange(e.FullPath, !_directoryService.FileSystem.File.Exists(e.Name));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void OnDeleted(object sender, FileSystemEventArgs e) {
|
private void OnDeleted(object sender, FileSystemEventArgs e) {
|
||||||
Console.WriteLine($"Deleted: {e.FullPath}, {e.Name}");
|
_logger.LogDebug("[LibraryWatcher] Deleted: {FullPath}, {Name}", e.FullPath, e.Name);
|
||||||
|
|
||||||
// On deletion, we need another type of check. We need to check if e.Name has an extension or not
|
// On deletion, we need another type of check. We need to check if e.Name has an extension or not
|
||||||
// NOTE: File deletion will trigger a folder change event, so this might not be needed
|
// NOTE: File deletion will trigger a folder change event, so this might not be needed
|
||||||
@ -164,9 +164,9 @@ public class LibraryWatcher : ILibraryWatcher
|
|||||||
|
|
||||||
private void OnRenamed(object sender, RenamedEventArgs e)
|
private void OnRenamed(object sender, RenamedEventArgs e)
|
||||||
{
|
{
|
||||||
Console.WriteLine($"Renamed:");
|
_logger.LogDebug($"[LibraryWatcher] Renamed:");
|
||||||
Console.WriteLine($" Old: {e.OldFullPath}");
|
_logger.LogDebug(" Old: {OldFullPath}", e.OldFullPath);
|
||||||
Console.WriteLine($" New: {e.FullPath}");
|
_logger.LogDebug(" New: {FullPath}", e.FullPath);
|
||||||
ProcessChange(e.FullPath, _directoryService.FileSystem.Directory.Exists(e.FullPath));
|
ProcessChange(e.FullPath, _directoryService.FileSystem.Directory.Exists(e.FullPath));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -179,14 +179,6 @@ public class LibraryWatcher : ILibraryWatcher
|
|||||||
{
|
{
|
||||||
// We need to check if directory or not
|
// We need to check if directory or not
|
||||||
if (!isDirectoryChange && !new Regex(Parser.Parser.SupportedExtensions).IsMatch(new FileInfo(filePath).Extension)) return;
|
if (!isDirectoryChange && !new Regex(Parser.Parser.SupportedExtensions).IsMatch(new FileInfo(filePath).Extension)) return;
|
||||||
// Don't do anything if a Library or ScanSeries in progress
|
|
||||||
// if (TaskScheduler.RunningAnyTasksByMethod(new[] {"MetadataService", "ScannerService"}))
|
|
||||||
// {
|
|
||||||
// // NOTE: I'm not sure we need this to be honest. Now with the speed of the new loop and the queue, we should just put in queue for processing
|
|
||||||
// _logger.LogDebug("Suppressing Change due to scan being inprogress");
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
|
|
||||||
|
|
||||||
var parentDirectory = _directoryService.GetParentDirectoryName(filePath);
|
var parentDirectory = _directoryService.GetParentDirectoryName(filePath);
|
||||||
if (string.IsNullOrEmpty(parentDirectory)) return;
|
if (string.IsNullOrEmpty(parentDirectory)) return;
|
||||||
@ -206,14 +198,12 @@ public class LibraryWatcher : ILibraryWatcher
|
|||||||
FolderPath = fullPath,
|
FolderPath = fullPath,
|
||||||
QueueTime = DateTime.Now
|
QueueTime = DateTime.Now
|
||||||
};
|
};
|
||||||
if (_scanQueue.Contains(queueItem, _folderScanQueueableComparer))
|
if (!_scanQueue.Contains(queueItem, _folderScanQueueableComparer))
|
||||||
{
|
{
|
||||||
ProcessQueue();
|
_logger.LogDebug("[LibraryWatcher] Queuing job for {Folder}", fullPath);
|
||||||
return;
|
_scanQueue.Enqueue(queueItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
_scanQueue.Enqueue(queueItem);
|
|
||||||
|
|
||||||
ProcessQueue();
|
ProcessQueue();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -228,7 +218,7 @@ public class LibraryWatcher : ILibraryWatcher
|
|||||||
var item = _scanQueue.Peek();
|
var item = _scanQueue.Peek();
|
||||||
if (item.QueueTime < DateTime.Now.Subtract(_queueWaitTime))
|
if (item.QueueTime < DateTime.Now.Subtract(_queueWaitTime))
|
||||||
{
|
{
|
||||||
_logger.LogDebug("Scheduling ScanSeriesFolder for {Folder}", item.FolderPath);
|
_logger.LogDebug("[LibraryWatcher] Scheduling ScanSeriesFolder for {Folder}", item.FolderPath);
|
||||||
BackgroundJob.Enqueue(() => _scannerService.ScanFolder(item.FolderPath));
|
BackgroundJob.Enqueue(() => _scannerService.ScanFolder(item.FolderPath));
|
||||||
_scanQueue.Dequeue();
|
_scanQueue.Dequeue();
|
||||||
i++;
|
i++;
|
||||||
|
@ -320,7 +320,10 @@ namespace API.Services.Tasks.Scanner
|
|||||||
|
|
||||||
// NOTE: If we have multiple series in a folder with a localized title, then this will fail. It will group into one series. User needs to fix this themselves.
|
// NOTE: If we have multiple series in a folder with a localized title, then this will fail. It will group into one series. User needs to fix this themselves.
|
||||||
string nonLocalizedSeries;
|
string nonLocalizedSeries;
|
||||||
var nonLocalizedSeriesFound = infos.Where(i => !i.IsSpecial).Select(i => i.Series).Distinct().ToList();
|
// Normalize this as many of the cases is a capitalization difference
|
||||||
|
var nonLocalizedSeriesFound = infos
|
||||||
|
.Where(i => !i.IsSpecial)
|
||||||
|
.Select(i => i.Series).DistinctBy(Parser.Parser.Normalize).ToList();
|
||||||
if (nonLocalizedSeriesFound.Count == 1)
|
if (nonLocalizedSeriesFound.Count == 1)
|
||||||
{
|
{
|
||||||
nonLocalizedSeries = nonLocalizedSeriesFound.First();
|
nonLocalizedSeries = nonLocalizedSeriesFound.First();
|
||||||
@ -330,7 +333,7 @@ namespace API.Services.Tasks.Scanner
|
|||||||
// There can be a case where there are multiple series in a folder that causes merging.
|
// There can be a case where there are multiple series in a folder that causes merging.
|
||||||
if (nonLocalizedSeriesFound.Count > 2)
|
if (nonLocalizedSeriesFound.Count > 2)
|
||||||
{
|
{
|
||||||
_logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder: {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound));
|
_logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder or ensure there is only 2 potential series (localized and series): {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound));
|
||||||
}
|
}
|
||||||
nonLocalizedSeries = nonLocalizedSeriesFound.FirstOrDefault(s => !s.Equals(localizedSeries));
|
nonLocalizedSeries = nonLocalizedSeriesFound.FirstOrDefault(s => !s.Equals(localizedSeries));
|
||||||
}
|
}
|
||||||
|
@ -183,7 +183,7 @@ public class ProcessSeries : IProcessSeries
|
|||||||
}
|
}
|
||||||
|
|
||||||
_logger.LogInformation("[ScannerService] Finished series update on {SeriesName} in {Milliseconds} ms", seriesName, scanWatch.ElapsedMilliseconds);
|
_logger.LogInformation("[ScannerService] Finished series update on {SeriesName} in {Milliseconds} ms", seriesName, scanWatch.ElapsedMilliseconds);
|
||||||
EnqueuePostSeriesProcessTasks(series.LibraryId, series.Id, false);
|
EnqueuePostSeriesProcessTasks(series.LibraryId, series.Id);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task UpdateSeriesFolderPath(IEnumerable<ParserInfo> parsedInfos, Library library, Series series)
|
private async Task UpdateSeriesFolderPath(IEnumerable<ParserInfo> parsedInfos, Library library, Series series)
|
||||||
@ -431,7 +431,6 @@ public class ProcessSeries : IProcessSeries
|
|||||||
volume = DbFactory.Volume(volumeNumber);
|
volume = DbFactory.Volume(volumeNumber);
|
||||||
volume.SeriesId = series.Id;
|
volume.SeriesId = series.Id;
|
||||||
series.Volumes.Add(volume);
|
series.Volumes.Add(volume);
|
||||||
_unitOfWork.VolumeRepository.Add(volume);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
volume.Name = volumeNumber;
|
volume.Name = volumeNumber;
|
||||||
|
@ -1,10 +1,8 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Concurrent;
|
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading;
|
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using API.Data;
|
using API.Data;
|
||||||
using API.Data.Repositories;
|
using API.Data.Repositories;
|
||||||
@ -41,9 +39,6 @@ public interface IScannerService
|
|||||||
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||||
Task ScanSeries(int seriesId, bool bypassFolderOptimizationChecks = true);
|
Task ScanSeries(int seriesId, bool bypassFolderOptimizationChecks = true);
|
||||||
|
|
||||||
[Queue(TaskScheduler.ScanQueue)]
|
|
||||||
[DisableConcurrentExecution(60 * 60 * 60)]
|
|
||||||
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
|
||||||
Task ScanFolder(string folder);
|
Task ScanFolder(string folder);
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -81,11 +76,12 @@ public class ScannerService : IScannerService
|
|||||||
private readonly IDirectoryService _directoryService;
|
private readonly IDirectoryService _directoryService;
|
||||||
private readonly IReadingItemService _readingItemService;
|
private readonly IReadingItemService _readingItemService;
|
||||||
private readonly IProcessSeries _processSeries;
|
private readonly IProcessSeries _processSeries;
|
||||||
|
private readonly IWordCountAnalyzerService _wordCountAnalyzerService;
|
||||||
|
|
||||||
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger,
|
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger,
|
||||||
IMetadataService metadataService, ICacheService cacheService, IEventHub eventHub,
|
IMetadataService metadataService, ICacheService cacheService, IEventHub eventHub,
|
||||||
IDirectoryService directoryService, IReadingItemService readingItemService,
|
IDirectoryService directoryService, IReadingItemService readingItemService,
|
||||||
IProcessSeries processSeries)
|
IProcessSeries processSeries, IWordCountAnalyzerService wordCountAnalyzerService)
|
||||||
{
|
{
|
||||||
_unitOfWork = unitOfWork;
|
_unitOfWork = unitOfWork;
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
@ -95,9 +91,9 @@ public class ScannerService : IScannerService
|
|||||||
_directoryService = directoryService;
|
_directoryService = directoryService;
|
||||||
_readingItemService = readingItemService;
|
_readingItemService = readingItemService;
|
||||||
_processSeries = processSeries;
|
_processSeries = processSeries;
|
||||||
|
_wordCountAnalyzerService = wordCountAnalyzerService;
|
||||||
}
|
}
|
||||||
|
|
||||||
[Queue(TaskScheduler.ScanQueue)]
|
|
||||||
public async Task ScanFolder(string folder)
|
public async Task ScanFolder(string folder)
|
||||||
{
|
{
|
||||||
var seriesId = await _unitOfWork.SeriesRepository.GetSeriesIdByFolder(folder);
|
var seriesId = await _unitOfWork.SeriesRepository.GetSeriesIdByFolder(folder);
|
||||||
@ -138,7 +134,12 @@ public class ScannerService : IScannerService
|
|||||||
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId});
|
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId});
|
||||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId, LibraryIncludes.Folders);
|
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId, LibraryIncludes.Folders);
|
||||||
var libraryPaths = library.Folders.Select(f => f.Path).ToList();
|
var libraryPaths = library.Folders.Select(f => f.Path).ToList();
|
||||||
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, true) != ScanCancelReason.NoCancel) return;
|
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, true) != ScanCancelReason.NoCancel)
|
||||||
|
{
|
||||||
|
BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForSeries(series.LibraryId, seriesId, false));
|
||||||
|
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(library.Id, seriesId, false));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
var folderPath = series.FolderPath;
|
var folderPath = series.FolderPath;
|
||||||
if (string.IsNullOrEmpty(folderPath) || !_directoryService.Exists(folderPath))
|
if (string.IsNullOrEmpty(folderPath) || !_directoryService.Exists(folderPath))
|
||||||
@ -420,6 +421,9 @@ public class ScannerService : IScannerService
|
|||||||
await _eventHub.SendMessageAsync(MessageFactory.Info,
|
await _eventHub.SendMessageAsync(MessageFactory.Info,
|
||||||
MessageFactory.InfoEvent($"{library.Name} scan has no work to do",
|
MessageFactory.InfoEvent($"{library.Name} scan has no work to do",
|
||||||
"All folders have not been changed since last scan. Scan will be aborted."));
|
"All folders have not been changed since last scan. Scan will be aborted."));
|
||||||
|
|
||||||
|
BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForLibrary(library.Id, false));
|
||||||
|
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanLibrary(library.Id, false));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -455,7 +459,7 @@ public class ScannerService : IScannerService
|
|||||||
Format = parsedFiles.First().Format
|
Format = parsedFiles.First().Format
|
||||||
};
|
};
|
||||||
|
|
||||||
// NOTE: Could we check if there are multiple found series (different series) and process each one?
|
// NOTE: Could we check if there are multiple found series (different series) and process each one?
|
||||||
|
|
||||||
if (skippedScan)
|
if (skippedScan)
|
||||||
{
|
{
|
||||||
|
@ -6,10 +6,10 @@
|
|||||||
"Logging": {
|
"Logging": {
|
||||||
"LogLevel": {
|
"LogLevel": {
|
||||||
"Default": "Debug",
|
"Default": "Debug",
|
||||||
"Microsoft": "Information",
|
"Microsoft": "Error",
|
||||||
"Microsoft.Hosting.Lifetime": "Error",
|
"Microsoft.Hosting.Lifetime": "Error",
|
||||||
"Hangfire": "Information",
|
"Hangfire": "Error",
|
||||||
"Microsoft.AspNetCore.Hosting.Internal.WebHost": "Information"
|
"Microsoft.AspNetCore.Hosting.Internal.WebHost": "Error"
|
||||||
},
|
},
|
||||||
"File": {
|
"File": {
|
||||||
"Path": "config//logs/kavita.log",
|
"Path": "config//logs/kavita.log",
|
||||||
|
22
API/config/appsettings.json
Normal file
22
API/config/appsettings.json
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"ConnectionStrings": {
|
||||||
|
"DefaultConnection": "Data source=config/kavita.db"
|
||||||
|
},
|
||||||
|
"TokenKey": "super secret unguessable key",
|
||||||
|
"Logging": {
|
||||||
|
"LogLevel": {
|
||||||
|
"Default": "Information",
|
||||||
|
"Microsoft": "Error",
|
||||||
|
"Microsoft.Hosting.Lifetime": "Error",
|
||||||
|
"Hangfire": "Error",
|
||||||
|
"Microsoft.AspNetCore.Hosting.Internal.WebHost": "Error"
|
||||||
|
},
|
||||||
|
"File": {
|
||||||
|
"Path": "config/logs/kavita.log",
|
||||||
|
"Append": "True",
|
||||||
|
"FileSizeLimitBytes": 10485760,
|
||||||
|
"MaxRollingFiles": 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Port": 5000
|
||||||
|
}
|
@ -31,7 +31,7 @@ export class JumpbarService {
|
|||||||
const jumpBarKeysToRender: Array<JumpKey> = [];
|
const jumpBarKeysToRender: Array<JumpKey> = [];
|
||||||
const targetNumberOfKeys = parseInt(Math.floor(currentSize / keySize) + '', 10);
|
const targetNumberOfKeys = parseInt(Math.floor(currentSize / keySize) + '', 10);
|
||||||
const removeCount = jumpBarKeys.length - targetNumberOfKeys - 3;
|
const removeCount = jumpBarKeys.length - targetNumberOfKeys - 3;
|
||||||
if (removeCount <= 0) return jumpBarKeysToRender;
|
if (removeCount <= 0) return [...jumpBarKeys];
|
||||||
|
|
||||||
const removalTimes = Math.ceil(removeCount / 2);
|
const removalTimes = Math.ceil(removeCount / 2);
|
||||||
const midPoint = Math.floor(jumpBarKeys.length / 2);
|
const midPoint = Math.floor(jumpBarKeys.length / 2);
|
||||||
|
@ -13,13 +13,13 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<app-metadata-filter [filterSettings]="filterSettings" [filterOpen]="filterOpen" (applyFilter)="applyMetadataFilter($event)"></app-metadata-filter>
|
<app-metadata-filter [filterSettings]="filterSettings" [filterOpen]="filterOpen" (applyFilter)="applyMetadataFilter($event)"></app-metadata-filter>
|
||||||
<div class="viewport-container" [ngClass]="{'empty': items.length === 0}">
|
<div class="viewport-container" [ngClass]="{'empty': items.length === 0 && !isLoading}">
|
||||||
<div class="content-container">
|
<div class="content-container">
|
||||||
<div class="card-container mt-2 mb-2">
|
<div class="card-container mt-2 mb-2">
|
||||||
<p *ngIf="items.length === 0 && !isLoading">
|
<p *ngIf="items.length === 0 && !isLoading">
|
||||||
<ng-container [ngTemplateOutlet]="noDataTemplate"></ng-container>
|
<ng-container [ngTemplateOutlet]="noDataTemplate"></ng-container>
|
||||||
</p>
|
</p>
|
||||||
<virtual-scroller [ngClass]="{'empty': items.length === 0}" #scroll [items]="items" [bufferAmount]="1" [parentScroll]="parentScroll">
|
<virtual-scroller [ngClass]="{'empty': items.length === 0 && !isLoading}" #scroll [items]="items" [bufferAmount]="1" [parentScroll]="parentScroll">
|
||||||
<div class="grid row g-0" #container>
|
<div class="grid row g-0" #container>
|
||||||
<div class="card col-auto mt-2 mb-2" *ngFor="let item of scroll.viewPortItems; trackBy:trackByIdentity; index as i" id="jumpbar-index--{{i}}" [attr.jumpbar-index]="i">
|
<div class="card col-auto mt-2 mb-2" *ngFor="let item of scroll.viewPortItems; trackBy:trackByIdentity; index as i" id="jumpbar-index--{{i}}" [attr.jumpbar-index]="i">
|
||||||
<ng-container [ngTemplateOutlet]="itemTemplate" [ngTemplateOutletContext]="{ $implicit: item, idx: scroll.viewPortInfo.startIndexWithBuffer + i }"></ng-container>
|
<ng-container [ngTemplateOutlet]="itemTemplate" [ngTemplateOutletContext]="{ $implicit: item, idx: scroll.viewPortInfo.startIndexWithBuffer + i }"></ng-container>
|
||||||
@ -29,7 +29,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<ng-container *ngIf="jumpBarKeysToRender.length >= 4 && items.length !== 0 && scroll.viewPortInfo.maxScrollPosition > 0" [ngTemplateOutlet]="jumpBar" [ngTemplateOutletContext]="{ id: 'jumpbar' }"></ng-container>
|
<ng-container *ngIf="jumpBarKeysToRender.length >= 4 && items.length > 0 && scroll.viewPortInfo.maxScrollPosition > 0" [ngTemplateOutlet]="jumpBar" [ngTemplateOutletContext]="{ id: 'jumpbar' }"></ng-container>
|
||||||
</div>
|
</div>
|
||||||
<ng-template #cardTemplate>
|
<ng-template #cardTemplate>
|
||||||
<virtual-scroller #scroll [items]="items" [bufferAmount]="1">
|
<virtual-scroller #scroll [items]="items" [bufferAmount]="1">
|
||||||
|
@ -46,8 +46,8 @@ export class CardDetailLayoutComponent implements OnInit, OnDestroy, OnChanges {
|
|||||||
@Input() refresh!: EventEmitter<void>;
|
@Input() refresh!: EventEmitter<void>;
|
||||||
|
|
||||||
|
|
||||||
@Input() jumpBarKeys: Array<JumpKey> = []; // This is aprox 784 pixels wide
|
@Input() jumpBarKeys: Array<JumpKey> = []; // This is aprox 784 pixels tall, original keys
|
||||||
jumpBarKeysToRender: Array<JumpKey> = []; // Original
|
jumpBarKeysToRender: Array<JumpKey> = []; // What is rendered on screen
|
||||||
|
|
||||||
@Output() itemClicked: EventEmitter<any> = new EventEmitter();
|
@Output() itemClicked: EventEmitter<any> = new EventEmitter();
|
||||||
@Output() applyFilter: EventEmitter<FilterEvent> = new EventEmitter();
|
@Output() applyFilter: EventEmitter<FilterEvent> = new EventEmitter();
|
||||||
@ -114,8 +114,7 @@ export class CardDetailLayoutComponent implements OnInit, OnDestroy, OnChanges {
|
|||||||
ngOnChanges(): void {
|
ngOnChanges(): void {
|
||||||
this.jumpBarKeysToRender = [...this.jumpBarKeys];
|
this.jumpBarKeysToRender = [...this.jumpBarKeys];
|
||||||
this.resizeJumpBar();
|
this.resizeJumpBar();
|
||||||
|
|
||||||
|
|
||||||
if (!this.hasResumedJumpKey && this.jumpBarKeysToRender.length > 0) {
|
if (!this.hasResumedJumpKey && this.jumpBarKeysToRender.length > 0) {
|
||||||
const resumeKey = this.jumpbarService.getResumeKey(this.router.url);
|
const resumeKey = this.jumpbarService.getResumeKey(this.router.url);
|
||||||
if (resumeKey === '') return;
|
if (resumeKey === '') return;
|
||||||
@ -156,6 +155,5 @@ export class CardDetailLayoutComponent implements OnInit, OnDestroy, OnChanges {
|
|||||||
this.virtualScroller.scrollToIndex(targetIndex, true, 0, 1000);
|
this.virtualScroller.scrollToIndex(targetIndex, true, 0, 1000);
|
||||||
this.jumpbarService.saveResumeKey(this.router.url, jumpKey.key);
|
this.jumpbarService.saveResumeKey(this.router.url, jumpKey.key);
|
||||||
this.changeDetectionRef.markForCheck();
|
this.changeDetectionRef.markForCheck();
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user