Event Widget Update (#1098)

* Took care of some notes in the code

* Fixed an issue where Extra might get flagged as special too early, if in a word like Extraordinary

* Moved Tag cleanup code into Scanner service. Added a SplitQuery to another heavy API. Refactored Scan loop to remove parallelism and use async instead.

* Lots of rework on the codebase to support detailed messages and easier management of message sending. Need to take a break on this work.

* Progress is being made, but slowly. Code is broken in this commit.

* Progress is being made, but slowly. Code is broken in this commit.

* Fixed merge issue

* Fixed unit tests

* CoverUpdate is now hooked into new ProgressEvent structure

* Refactored code to remove custom observables and have everything use standard messages$

* Refactored a ton of instances to NotificationProgressEvent style and tons of the UI to respect that too. UI is still a bit buggy, but wholistically the work is done.

* Working much better. Sometimes events come in too fast. Currently cover update progress doesn't display on UI

* Fixed unit tests

* Removed SignalREvent to minimize internal event types. Updated the UI to use progress bars. Finished SiteThemeService.

* Merged metadata refresh progress events and changed library scan events to merge cleaner in the UI

* Changed RefreshMetadataProgress to CoverUpdateProgress to reflect the event better.

* Theme Cleanup (#1089)

* Fixed e-ink theme not properly applying correctly

* Fixed some seed changes. Changed card checkboxes to use our themed ones

* Fixed recently added carousel not going to recently-added page

* Fixed an issue where no results found would show when searching for a library name

* Cleaned up list a bit, typeahead dropdown still needs work

* Added a TODO to streamline series-card component

* Removed ng-lazyload-image module since we don't use it. We use lazysizes

* Darken card on hover

* Fixing accordion focus style

* ux pass updates

- Fixed typeahead width
- Fixed changelog download buttons
- Fixed a select
- Fixed various input box-shadows
- Fixed all anchors to only have underline on hover
- Added navtab hover and active effects

* more ux pass

- Fixed spacing on theme cards
- Fixed some light theme issues
- Exposed text-muted-color for theme card subtitle color

* UX pass fixes

- Changed back to bright green for primary on dark theme
- Changed fa icon to black on e-ink

* Merged changelog component

* Fixed anchor buttons text decoration

* Changed nav tabs to have a background color instead of open active state

* When user is not authenticated, make sure we set default theme (dark)

* Cleanup on carousel

* Updated Users tab to use small buttons with icons to align with Library tab

* Cleaned up brand to not underline, removed default link underline on hover in dropdown and pill tabs

* Fixed collection detail posters not rendering

Co-authored-by: Robbie Davis <robbie@therobbiedavis.com>

* Bump versions by dotnet-bump-version.

* Tweaked some of the emitting code

* Some css, but pretty bad. Robbie please save me

* Removed a todo

* styling update

* Only send filename on FileScanProgress

* Some console.log spam cleanup

* Various updates

* Show events widget activity based on activeEvents

* progress bar color updates

* Code cleanup

Co-authored-by: Robbie Davis <robbie@therobbiedavis.com>
This commit is contained in:
Joseph Milazzo 2022-02-18 18:57:37 -08:00 committed by GitHub
parent d24620fd15
commit eddbb7ab18
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
49 changed files with 1022 additions and 463 deletions

View File

@ -4,6 +4,7 @@ using API.Entities.Enums;
using API.Parser; using API.Parser;
using API.Services; using API.Services;
using API.Services.Tasks.Scanner; using API.Services.Tasks.Scanner;
using API.SignalR;
using BenchmarkDotNet.Attributes; using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Order; using BenchmarkDotNet.Order;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
@ -28,7 +29,8 @@ namespace API.Benchmark
_parseScannedFiles = new ParseScannedFiles( _parseScannedFiles = new ParseScannedFiles(
Substitute.For<ILogger>(), Substitute.For<ILogger>(),
directoryService, directoryService,
new ReadingItemService(_archiveService, new BookService(_bookLogger, directoryService, new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService)), Substitute.For<ImageService>(), directoryService)); new ReadingItemService(_archiveService, new BookService(_bookLogger, directoryService, new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService)), Substitute.For<ImageService>(), directoryService),
Substitute.For<IEventHub>());
} }
// [Benchmark] // [Benchmark]
@ -59,8 +61,7 @@ namespace API.Benchmark
Title = "A Town Where You Live", Title = "A Town Where You Live",
Volumes = "1" Volumes = "1"
}; };
_parseScannedFiles.ScanLibrariesForSeries(LibraryType.Manga, new [] {libraryPath}, _parseScannedFiles.ScanLibrariesForSeries(LibraryType.Manga, new [] {libraryPath}, "Manga");
out _, out _);
_parseScannedFiles.MergeName(p1); _parseScannedFiles.MergeName(p1);
} }
} }

View File

@ -26,7 +26,7 @@ public class BackupServiceTests
{ {
private readonly ILogger<BackupService> _logger = Substitute.For<ILogger<BackupService>>(); private readonly ILogger<BackupService> _logger = Substitute.For<ILogger<BackupService>>();
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>(); private readonly IEventHub _messageHub = Substitute.For<IEventHub>();
private readonly IConfiguration _config; private readonly IConfiguration _config;
private readonly DbConnection _connection; private readonly DbConnection _connection;

View File

@ -26,7 +26,7 @@ public class CleanupServiceTests
{ {
private readonly ILogger<CleanupService> _logger = Substitute.For<ILogger<CleanupService>>(); private readonly ILogger<CleanupService> _logger = Substitute.For<ILogger<CleanupService>>();
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>(); private readonly IEventHub _messageHub = Substitute.For<IEventHub>();
private readonly DbConnection _connection; private readonly DbConnection _connection;
private readonly DataContext _context; private readonly DataContext _context;

View File

@ -11,6 +11,7 @@ using API.Entities.Enums;
using API.Parser; using API.Parser;
using API.Services; using API.Services;
using API.Services.Tasks.Scanner; using API.Services.Tasks.Scanner;
using API.SignalR;
using API.Tests.Helpers; using API.Tests.Helpers;
using AutoMapper; using AutoMapper;
using Microsoft.Data.Sqlite; using Microsoft.Data.Sqlite;
@ -155,7 +156,7 @@ public class ParseScannedFilesTests
var fileSystem = new MockFileSystem(); var fileSystem = new MockFileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem); var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds, var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(new DefaultParser(ds))); new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
var infos = new List<ParserInfo>() var infos = new List<ParserInfo>()
{ {
@ -200,7 +201,7 @@ public class ParseScannedFilesTests
var fileSystem = new MockFileSystem(); var fileSystem = new MockFileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem); var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds, var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(new DefaultParser(ds))); new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
var infos = new List<ParserInfo>() var infos = new List<ParserInfo>()
{ {
@ -240,7 +241,7 @@ public class ParseScannedFilesTests
#region MergeName #region MergeName
[Fact] [Fact]
public void MergeName_ShouldMergeMatchingFormatAndName() public async Task MergeName_ShouldMergeMatchingFormatAndName()
{ {
var fileSystem = new MockFileSystem(); var fileSystem = new MockFileSystem();
fileSystem.AddDirectory("C:/Data/"); fileSystem.AddDirectory("C:/Data/");
@ -250,10 +251,10 @@ public class ParseScannedFilesTests
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem); var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds, var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(new DefaultParser(ds))); new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
psf.ScanLibrariesForSeries(LibraryType.Manga, new List<string>() {"C:/Data/"}, out _, out _); await psf.ScanLibrariesForSeries(LibraryType.Manga, new List<string>() {"C:/Data/"}, "libraryName");
Assert.Equal("Accel World", psf.MergeName(ParserInfoFactory.CreateParsedInfo("Accel World", "1", "0", "Accel World v1.cbz", false))); Assert.Equal("Accel World", psf.MergeName(ParserInfoFactory.CreateParsedInfo("Accel World", "1", "0", "Accel World v1.cbz", false)));
Assert.Equal("Accel World", psf.MergeName(ParserInfoFactory.CreateParsedInfo("accel_world", "1", "0", "Accel World v1.cbz", false))); Assert.Equal("Accel World", psf.MergeName(ParserInfoFactory.CreateParsedInfo("accel_world", "1", "0", "Accel World v1.cbz", false)));
@ -261,7 +262,7 @@ public class ParseScannedFilesTests
} }
[Fact] [Fact]
public void MergeName_ShouldMerge_MismatchedFormatSameName() public async Task MergeName_ShouldMerge_MismatchedFormatSameName()
{ {
var fileSystem = new MockFileSystem(); var fileSystem = new MockFileSystem();
fileSystem.AddDirectory("C:/Data/"); fileSystem.AddDirectory("C:/Data/");
@ -271,10 +272,10 @@ public class ParseScannedFilesTests
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem); var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds, var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(new DefaultParser(ds))); new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
psf.ScanLibrariesForSeries(LibraryType.Manga, new List<string>() {"C:/Data/"}, out _, out _); await psf.ScanLibrariesForSeries(LibraryType.Manga, new List<string>() {"C:/Data/"}, "libraryName");
Assert.Equal("Accel World", psf.MergeName(ParserInfoFactory.CreateParsedInfo("Accel World", "1", "0", "Accel World v1.epub", false))); Assert.Equal("Accel World", psf.MergeName(ParserInfoFactory.CreateParsedInfo("Accel World", "1", "0", "Accel World v1.epub", false)));
Assert.Equal("Accel World", psf.MergeName(ParserInfoFactory.CreateParsedInfo("accel_world", "1", "0", "Accel World v1.epub", false))); Assert.Equal("Accel World", psf.MergeName(ParserInfoFactory.CreateParsedInfo("accel_world", "1", "0", "Accel World v1.epub", false)));
@ -285,7 +286,7 @@ public class ParseScannedFilesTests
#region ScanLibrariesForSeries #region ScanLibrariesForSeries
[Fact] [Fact]
public void ScanLibrariesForSeries_ShouldFindFiles() public async Task ScanLibrariesForSeries_ShouldFindFiles()
{ {
var fileSystem = new MockFileSystem(); var fileSystem = new MockFileSystem();
fileSystem.AddDirectory("C:/Data/"); fileSystem.AddDirectory("C:/Data/");
@ -296,10 +297,10 @@ public class ParseScannedFilesTests
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem); var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds, var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(new DefaultParser(ds))); new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
var parsedSeries = psf.ScanLibrariesForSeries(LibraryType.Manga, new List<string>() {"C:/Data/"}, out _, out _); var parsedSeries = await psf.ScanLibrariesForSeries(LibraryType.Manga, new List<string>() {"C:/Data/"}, "libraryName");
Assert.Equal(3, parsedSeries.Values.Count); Assert.Equal(3, parsedSeries.Values.Count);
Assert.NotEmpty(parsedSeries.Keys.Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World"))); Assert.NotEmpty(parsedSeries.Keys.Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));

View File

@ -26,7 +26,7 @@ namespace API.Tests.Services;
public class SiteThemeServiceTests public class SiteThemeServiceTests
{ {
private readonly ILogger<SiteThemeService> _logger = Substitute.For<ILogger<SiteThemeService>>(); private readonly ILogger<SiteThemeService> _logger = Substitute.For<ILogger<SiteThemeService>>();
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>(); private readonly IEventHub _messageHub = Substitute.For<IEventHub>();
private readonly DbConnection _connection; private readonly DbConnection _connection;
private readonly DataContext _context; private readonly DataContext _context;

View File

@ -48,6 +48,8 @@
<PackageReference Include="Hangfire.MemoryStorage.Core" Version="1.4.0" /> <PackageReference Include="Hangfire.MemoryStorage.Core" Version="1.4.0" />
<PackageReference Include="HtmlAgilityPack" Version="1.11.38" /> <PackageReference Include="HtmlAgilityPack" Version="1.11.38" />
<PackageReference Include="MarkdownDeep.NET.Core" Version="1.5.0.4" /> <PackageReference Include="MarkdownDeep.NET.Core" Version="1.5.0.4" />
<PackageReference Include="MediatR" Version="10.0.1" />
<PackageReference Include="MediatR.Extensions.Microsoft.DependencyInjection" Version="10.0.1" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="6.0.1" /> <PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="6.0.1" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="6.0.1" /> <PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="6.0.1" />
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="6.0.1" /> <PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="6.0.1" />

View File

@ -19,13 +19,13 @@ namespace API.Controllers
public class CollectionController : BaseApiController public class CollectionController : BaseApiController
{ {
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub; private readonly IEventHub _eventHub;
/// <inheritdoc /> /// <inheritdoc />
public CollectionController(IUnitOfWork unitOfWork, IHubContext<MessageHub> messageHub) public CollectionController(IUnitOfWork unitOfWork, IEventHub eventHub)
{ {
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_messageHub = messageHub; _eventHub = eventHub;
} }
/// <summary> /// <summary>
@ -156,7 +156,8 @@ namespace API.Controllers
{ {
tag.CoverImageLocked = false; tag.CoverImageLocked = false;
tag.CoverImage = string.Empty; tag.CoverImage = string.Empty;
await _messageHub.Clients.All.SendAsync(SignalREvents.CoverUpdate, MessageFactory.CoverUpdateEvent(tag.Id, "collectionTag")); await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate,
MessageFactory.CoverUpdateEvent(tag.Id, "collectionTag"), false);
_unitOfWork.CollectionTagRepository.Update(tag); _unitOfWork.CollectionTagRepository.Update(tag);
} }

View File

@ -27,19 +27,19 @@ namespace API.Controllers
private readonly IArchiveService _archiveService; private readonly IArchiveService _archiveService;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly IDownloadService _downloadService; private readonly IDownloadService _downloadService;
private readonly IHubContext<MessageHub> _messageHub; private readonly IEventHub _eventHub;
private readonly UserManager<AppUser> _userManager; private readonly UserManager<AppUser> _userManager;
private readonly ILogger<DownloadController> _logger; private readonly ILogger<DownloadController> _logger;
private const string DefaultContentType = "application/octet-stream"; private const string DefaultContentType = "application/octet-stream";
public DownloadController(IUnitOfWork unitOfWork, IArchiveService archiveService, IDirectoryService directoryService, public DownloadController(IUnitOfWork unitOfWork, IArchiveService archiveService, IDirectoryService directoryService,
IDownloadService downloadService, IHubContext<MessageHub> messageHub, UserManager<AppUser> userManager, ILogger<DownloadController> logger) IDownloadService downloadService, IEventHub eventHub, UserManager<AppUser> userManager, ILogger<DownloadController> logger)
{ {
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_archiveService = archiveService; _archiveService = archiveService;
_directoryService = directoryService; _directoryService = directoryService;
_downloadService = downloadService; _downloadService = downloadService;
_messageHub = messageHub; _eventHub = eventHub;
_userManager = userManager; _userManager = userManager;
_logger = logger; _logger = logger;
} }
@ -119,30 +119,30 @@ namespace API.Controllers
{ {
try try
{ {
await _messageHub.Clients.All.SendAsync(SignalREvents.DownloadProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.DownloadProgressEvent(User.GetUsername(), MessageFactory.DownloadProgressEvent(User.GetUsername(),
Path.GetFileNameWithoutExtension(downloadName), 0F)); Path.GetFileNameWithoutExtension(downloadName), 0F, "started"));
if (files.Count == 1) if (files.Count == 1)
{ {
await _messageHub.Clients.All.SendAsync(SignalREvents.DownloadProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.DownloadProgressEvent(User.GetUsername(), MessageFactory.DownloadProgressEvent(User.GetUsername(),
Path.GetFileNameWithoutExtension(downloadName), 1F)); Path.GetFileNameWithoutExtension(downloadName), 1F, "ended"));
return await GetFirstFileDownload(files); return await GetFirstFileDownload(files);
} }
var (fileBytes, _) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), var (fileBytes, _) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath),
tempFolder); tempFolder);
await _messageHub.Clients.All.SendAsync(SignalREvents.DownloadProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.DownloadProgressEvent(User.GetUsername(), MessageFactory.DownloadProgressEvent(User.GetUsername(),
Path.GetFileNameWithoutExtension(downloadName), 1F)); Path.GetFileNameWithoutExtension(downloadName), 1F, "ended"));
return File(fileBytes, DefaultContentType, downloadName); return File(fileBytes, DefaultContentType, downloadName);
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, "There was an exception when trying to download files"); _logger.LogError(ex, "There was an exception when trying to download files");
await _messageHub.Clients.All.SendAsync(SignalREvents.DownloadProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.DownloadProgressEvent(User.GetUsername(), MessageFactory.DownloadProgressEvent(User.GetUsername(),
Path.GetFileNameWithoutExtension(downloadName), 1F)); Path.GetFileNameWithoutExtension(downloadName), 1F, "ended"));
throw; throw;
} }
} }
@ -181,11 +181,11 @@ namespace API.Controllers
.Select(b => Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(bookmarkDirectory, $"{b.ChapterId}_{b.FileName}"))); .Select(b => Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(bookmarkDirectory, $"{b.ChapterId}_{b.FileName}")));
var filename = $"{series.Name} - Bookmarks.zip"; var filename = $"{series.Name} - Bookmarks.zip";
await _messageHub.Clients.All.SendAsync(SignalREvents.DownloadProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.DownloadProgressEvent(User.GetUsername(), Path.GetFileNameWithoutExtension(filename), 0F)); MessageFactory.DownloadProgressEvent(User.GetUsername(), Path.GetFileNameWithoutExtension(filename), 0F));
var (fileBytes, _) = await _archiveService.CreateZipForDownload(files, var (fileBytes, _) = await _archiveService.CreateZipForDownload(files,
$"download_{user.Id}_{series.Id}_bookmarks"); $"download_{user.Id}_{series.Id}_bookmarks");
await _messageHub.Clients.All.SendAsync(SignalREvents.DownloadProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.DownloadProgressEvent(User.GetUsername(), Path.GetFileNameWithoutExtension(filename), 1F)); MessageFactory.DownloadProgressEvent(User.GetUsername(), Path.GetFileNameWithoutExtension(filename), 1F));
return File(fileBytes, DefaultContentType, filename); return File(fileBytes, DefaultContentType, filename);
} }

View File

@ -26,14 +26,15 @@ namespace API.Controllers
private readonly ILogger<SeriesController> _logger; private readonly ILogger<SeriesController> _logger;
private readonly ITaskScheduler _taskScheduler; private readonly ITaskScheduler _taskScheduler;
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub; private readonly IEventHub _eventHub;
public SeriesController(ILogger<SeriesController> logger, ITaskScheduler taskScheduler, IUnitOfWork unitOfWork, IHubContext<MessageHub> messageHub)
public SeriesController(ILogger<SeriesController> logger, ITaskScheduler taskScheduler, IUnitOfWork unitOfWork, IEventHub eventHub)
{ {
_logger = logger; _logger = logger;
_taskScheduler = taskScheduler; _taskScheduler = taskScheduler;
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_messageHub = messageHub; _eventHub = eventHub;
} }
[HttpPost] [HttpPost]
@ -93,8 +94,9 @@ namespace API.Controllers
await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries(); await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
await _unitOfWork.CommitAsync(); await _unitOfWork.CommitAsync();
_taskScheduler.CleanupChapters(chapterIds); _taskScheduler.CleanupChapters(chapterIds);
await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesRemoved,
MessageFactory.SeriesRemovedEvent(seriesId, series.Name, series.LibraryId)); await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved,
MessageFactory.SeriesRemovedEvent(seriesId, series.Name, series.LibraryId), false);
} }
return Ok(result); return Ok(result);
} }
@ -378,9 +380,9 @@ namespace API.Controllers
{ {
foreach (var tag in updateSeriesMetadataDto.Tags) foreach (var tag in updateSeriesMetadataDto.Tags)
{ {
await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesAddedToCollection, await _eventHub.SendMessageAsync(MessageFactory.SeriesAddedToCollection,
MessageFactory.SeriesAddedToCollection(tag.Id, MessageFactory.SeriesAddedToCollectionEvent(tag.Id,
updateSeriesMetadataDto.SeriesMetadata.SeriesId)); updateSeriesMetadataDto.SeriesMetadata.SeriesId), false);
} }
return Ok("Successfully updated"); return Ok("Successfully updated");
} }

View File

@ -0,0 +1,10 @@
using System.Collections.Generic;
using MediatR;
namespace API.DTOs;
public class UpdateUserRole : IRequest<bool>
{
public string Username { get; init; }
public IList<string> Roles { get; init; }
}

View File

@ -3,6 +3,7 @@ using API.Data;
using API.Helpers; using API.Helpers;
using API.Services; using API.Services;
using API.Services.Tasks; using API.Services.Tasks;
using API.SignalR;
using API.SignalR.Presence; using API.SignalR.Presence;
using Kavita.Common; using Kavita.Common;
using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Hosting;
@ -41,11 +42,13 @@ namespace API.Extensions
services.AddScoped<IBookmarkService, BookmarkService>(); services.AddScoped<IBookmarkService, BookmarkService>();
services.AddScoped<ISiteThemeService, SiteThemeService>(); services.AddScoped<ISiteThemeService, SiteThemeService>();
services.AddScoped<IFileSystem, FileSystem>(); services.AddScoped<IFileSystem, FileSystem>();
services.AddScoped<IFileService, FileService>(); services.AddScoped<IFileService, FileService>();
services.AddScoped<ICacheHelper, CacheHelper>(); services.AddScoped<ICacheHelper, CacheHelper>();
services.AddScoped<IPresenceTracker, PresenceTracker>(); services.AddScoped<IPresenceTracker, PresenceTracker>();
services.AddScoped<IEventHub, EventHub>();
services.AddSqLite(config, env); services.AddSqLite(config, env);
services.AddLogging(config); services.AddLogging(config);

View File

@ -80,6 +80,7 @@ namespace API.Parser
/// <summary> /// <summary>
/// Merges non empty/null properties from info2 into this entity. /// Merges non empty/null properties from info2 into this entity.
/// </summary> /// </summary>
/// <remarks>This does not merge ComicInfo as they should always be the same</remarks>
/// <param name="info2"></param> /// <param name="info2"></param>
public void Merge(ParserInfo info2) public void Merge(ParserInfo info2)
{ {

View File

@ -5,8 +5,11 @@ using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators; using API.Comparators;
using API.Data; using API.Data;
using API.Data.Metadata;
using API.Data.Repositories; using API.Data.Repositories;
using API.Data.Scanner;
using API.Entities; using API.Entities;
using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Helpers; using API.Helpers;
using API.SignalR; using API.SignalR;
@ -35,18 +38,18 @@ public class MetadataService : IMetadataService
{ {
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<MetadataService> _logger; private readonly ILogger<MetadataService> _logger;
private readonly IHubContext<MessageHub> _messageHub; private readonly IEventHub _eventHub;
private readonly ICacheHelper _cacheHelper; private readonly ICacheHelper _cacheHelper;
private readonly IReadingItemService _readingItemService; private readonly IReadingItemService _readingItemService;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst(); private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst();
public MetadataService(IUnitOfWork unitOfWork, ILogger<MetadataService> logger, public MetadataService(IUnitOfWork unitOfWork, ILogger<MetadataService> logger,
IHubContext<MessageHub> messageHub, ICacheHelper cacheHelper, IEventHub eventHub, ICacheHelper cacheHelper,
IReadingItemService readingItemService, IDirectoryService directoryService) IReadingItemService readingItemService, IDirectoryService directoryService)
{ {
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_logger = logger; _logger = logger;
_messageHub = messageHub; _eventHub = eventHub;
_cacheHelper = cacheHelper; _cacheHelper = cacheHelper;
_readingItemService = readingItemService; _readingItemService = readingItemService;
_directoryService = directoryService; _directoryService = directoryService;
@ -68,8 +71,8 @@ public class MetadataService : IMetadataService
_logger.LogDebug("[MetadataService] Generating cover image for {File}", firstFile.FilePath); _logger.LogDebug("[MetadataService] Generating cover image for {File}", firstFile.FilePath);
chapter.CoverImage = _readingItemService.GetCoverImage(firstFile.FilePath, ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId), firstFile.Format); chapter.CoverImage = _readingItemService.GetCoverImage(firstFile.FilePath, ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId), firstFile.Format);
await _messageHub.Clients.All.SendAsync(SignalREvents.CoverUpdate, MessageFactory.CoverUpdateEvent(chapter.Id, "chapter")); await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate,
MessageFactory.CoverUpdateEvent(chapter.Id, "chapter"), false);
return true; return true;
} }
@ -98,7 +101,7 @@ public class MetadataService : IMetadataService
if (firstChapter == null) return false; if (firstChapter == null) return false;
volume.CoverImage = firstChapter.CoverImage; volume.CoverImage = firstChapter.CoverImage;
await _messageHub.Clients.All.SendAsync(SignalREvents.CoverUpdate, MessageFactory.CoverUpdateEvent(volume.Id, "volume")); await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, MessageFactory.CoverUpdateEvent(volume.Id, "volume"), false);
return true; return true;
} }
@ -135,7 +138,7 @@ public class MetadataService : IMetadataService
} }
} }
series.CoverImage = firstCover?.CoverImage ?? coverImage; series.CoverImage = firstCover?.CoverImage ?? coverImage;
await _messageHub.Clients.All.SendAsync(SignalREvents.CoverUpdate, MessageFactory.CoverUpdateEvent(series.Id, "series")); await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, MessageFactory.CoverUpdateEvent(series.Id, "series"), false);
} }
@ -200,8 +203,9 @@ public class MetadataService : IMetadataService
var stopwatch = Stopwatch.StartNew(); var stopwatch = Stopwatch.StartNew();
var totalTime = 0L; var totalTime = 0L;
_logger.LogInformation("[MetadataService] Refreshing Library {LibraryName}. Total Items: {TotalSize}. Total Chunks: {TotalChunks} with {ChunkSize} size", library.Name, chunkInfo.TotalSize, chunkInfo.TotalChunks, chunkInfo.ChunkSize); _logger.LogInformation("[MetadataService] Refreshing Library {LibraryName}. Total Items: {TotalSize}. Total Chunks: {TotalChunks} with {ChunkSize} size", library.Name, chunkInfo.TotalSize, chunkInfo.TotalChunks, chunkInfo.ChunkSize);
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, 0F)); await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.CoverUpdateProgressEvent(library.Id, 0F, ProgressEventType.Started, $"Starting {library.Name}"));
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++) for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++)
{ {
@ -223,6 +227,12 @@ public class MetadataService : IMetadataService
var seriesIndex = 0; var seriesIndex = 0;
foreach (var series in nonLibrarySeries) foreach (var series in nonLibrarySeries)
{ {
var index = chunk * seriesIndex;
var progress = Math.Max(0F, Math.Min(1F, index * 1F / chunkInfo.TotalSize));
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.CoverUpdateProgressEvent(library.Id, progress, ProgressEventType.Updated, series.Name));
try try
{ {
await ProcessSeriesMetadataUpdate(series, forceUpdate); await ProcessSeriesMetadataUpdate(series, forceUpdate);
@ -231,11 +241,6 @@ public class MetadataService : IMetadataService
{ {
_logger.LogError(ex, "[MetadataService] There was an exception during metadata refresh for {SeriesName}", series.Name); _logger.LogError(ex, "[MetadataService] There was an exception during metadata refresh for {SeriesName}", series.Name);
} }
var index = chunk * seriesIndex;
var progress = Math.Max(0F, Math.Min(1F, index * 1F / chunkInfo.TotalSize));
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, progress));
seriesIndex++; seriesIndex++;
} }
@ -246,8 +251,8 @@ public class MetadataService : IMetadataService
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name); chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name);
} }
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, 1F)); MessageFactory.CoverUpdateProgressEvent(library.Id, 1F, ProgressEventType.Ended, $"Complete"));
await RemoveAbandonedMetadataKeys(); await RemoveAbandonedMetadataKeys();
@ -277,8 +282,8 @@ public class MetadataService : IMetadataService
return; return;
} }
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.RefreshMetadataProgressEvent(libraryId, 0F)); MessageFactory.CoverUpdateProgressEvent(libraryId, 0F, ProgressEventType.Started, series.Name));
await ProcessSeriesMetadataUpdate(series, forceUpdate); await ProcessSeriesMetadataUpdate(series, forceUpdate);
@ -288,11 +293,16 @@ public class MetadataService : IMetadataService
await _unitOfWork.CommitAsync(); await _unitOfWork.CommitAsync();
} }
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.RefreshMetadataProgressEvent(libraryId, 1F)); MessageFactory.CoverUpdateProgressEvent(libraryId, 1F, ProgressEventType.Ended, series.Name));
await RemoveAbandonedMetadataKeys(); await RemoveAbandonedMetadataKeys();
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{
await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, MessageFactory.CoverUpdateEvent(series.Id, "series"), false);
}
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds); _logger.LogInformation("[MetadataService] Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
} }
} }

View File

@ -31,17 +31,17 @@ public class BackupService : IBackupService
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<BackupService> _logger; private readonly ILogger<BackupService> _logger;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly IHubContext<MessageHub> _messageHub; private readonly IEventHub _eventHub;
private readonly IList<string> _backupFiles; private readonly IList<string> _backupFiles;
public BackupService(ILogger<BackupService> logger, IUnitOfWork unitOfWork, public BackupService(ILogger<BackupService> logger, IUnitOfWork unitOfWork,
IDirectoryService directoryService, IConfiguration config, IHubContext<MessageHub> messageHub) IDirectoryService directoryService, IConfiguration config, IEventHub eventHub)
{ {
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_logger = logger; _logger = logger;
_directoryService = directoryService; _directoryService = directoryService;
_messageHub = messageHub; _eventHub = eventHub;
var maxRollingFiles = config.GetMaxRollingFiles(); var maxRollingFiles = config.GetMaxRollingFiles();
var loggingSection = config.GetLoggingFileName(); var loggingSection = config.GetLoggingFileName();
@ -94,7 +94,7 @@ public class BackupService : IBackupService
return; return;
} }
await SendProgress(0F); await SendProgress(0F, "Started backup");
var dateString = $"{DateTime.Now.ToShortDateString()}_{DateTime.Now.ToLongTimeString()}".Replace("/", "_").Replace(":", "_"); var dateString = $"{DateTime.Now.ToShortDateString()}_{DateTime.Now.ToLongTimeString()}".Replace("/", "_").Replace(":", "_");
var zipPath = _directoryService.FileSystem.Path.Join(backupDirectory, $"kavita_backup_{dateString}.zip"); var zipPath = _directoryService.FileSystem.Path.Join(backupDirectory, $"kavita_backup_{dateString}.zip");
@ -112,15 +112,15 @@ public class BackupService : IBackupService
_directoryService.CopyFilesToDirectory( _directoryService.CopyFilesToDirectory(
_backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)).ToList(), tempDirectory); _backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)).ToList(), tempDirectory);
await SendProgress(0.25F); await SendProgress(0.25F, "Copying core files");
await CopyCoverImagesToBackupDirectory(tempDirectory); await CopyCoverImagesToBackupDirectory(tempDirectory);
await SendProgress(0.5F); await SendProgress(0.5F, "Copying cover images");
await CopyBookmarksToBackupDirectory(tempDirectory); await CopyBookmarksToBackupDirectory(tempDirectory);
await SendProgress(0.75F); await SendProgress(0.75F, "Copying bookmarks");
try try
{ {
@ -133,7 +133,7 @@ public class BackupService : IBackupService
_directoryService.ClearAndDeleteDirectory(tempDirectory); _directoryService.ClearAndDeleteDirectory(tempDirectory);
_logger.LogInformation("Database backup completed"); _logger.LogInformation("Database backup completed");
await SendProgress(1F); await SendProgress(1F, "Completed backup");
} }
private async Task CopyCoverImagesToBackupDirectory(string tempDirectory) private async Task CopyCoverImagesToBackupDirectory(string tempDirectory)
@ -189,10 +189,10 @@ public class BackupService : IBackupService
} }
} }
private async Task SendProgress(float progress) private async Task SendProgress(float progress, string subtitle)
{ {
await _messageHub.Clients.All.SendAsync(SignalREvents.BackupDatabaseProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.BackupDatabaseProgressEvent(progress)); MessageFactory.BackupDatabaseProgressEvent(progress, subtitle));
} }
} }

View File

@ -28,16 +28,16 @@ namespace API.Services.Tasks
{ {
private readonly ILogger<CleanupService> _logger; private readonly ILogger<CleanupService> _logger;
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub; private readonly IEventHub _eventHub;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
public CleanupService(ILogger<CleanupService> logger, public CleanupService(ILogger<CleanupService> logger,
IUnitOfWork unitOfWork, IHubContext<MessageHub> messageHub, IUnitOfWork unitOfWork, IEventHub eventHub,
IDirectoryService directoryService) IDirectoryService directoryService)
{ {
_logger = logger; _logger = logger;
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_messageHub = messageHub; _eventHub = eventHub;
_directoryService = directoryService; _directoryService = directoryService;
} }
@ -49,25 +49,23 @@ namespace API.Services.Tasks
public async Task Cleanup() public async Task Cleanup()
{ {
_logger.LogInformation("Starting Cleanup"); _logger.LogInformation("Starting Cleanup");
await SendProgress(0F); await SendProgress(0F, "Starting cleanup");
_logger.LogInformation("Cleaning temp directory"); _logger.LogInformation("Cleaning temp directory");
_directoryService.ClearDirectory(_directoryService.TempDirectory); _directoryService.ClearDirectory(_directoryService.TempDirectory);
await SendProgress(0.1F); await SendProgress(0.1F, "Cleaning temp directory");
CleanupCacheDirectory(); CleanupCacheDirectory();
await SendProgress(0.25F); await SendProgress(0.25F, "Cleaning old database backups");
_logger.LogInformation("Cleaning old database backups"); _logger.LogInformation("Cleaning old database backups");
await CleanupBackups(); await CleanupBackups();
await SendProgress(0.50F); await SendProgress(0.50F, "Cleaning deleted cover images");
_logger.LogInformation("Cleaning deleted cover images"); _logger.LogInformation("Cleaning deleted cover images");
await DeleteSeriesCoverImages(); await DeleteSeriesCoverImages();
await SendProgress(0.6F); await SendProgress(0.6F, "Cleaning deleted cover images");
await DeleteChapterCoverImages(); await DeleteChapterCoverImages();
await SendProgress(0.7F); await SendProgress(0.7F, "Cleaning deleted cover images");
await DeleteTagCoverImages(); await DeleteTagCoverImages();
await SendProgress(0.8F); await SendProgress(0.8F, "Cleaning deleted cover images");
//_logger.LogInformation("Cleaning old bookmarks"); await SendProgress(1F, "Cleanup finished");
//await CleanupBookmarks();
await SendProgress(1F);
_logger.LogInformation("Cleanup finished"); _logger.LogInformation("Cleanup finished");
} }
@ -82,10 +80,10 @@ namespace API.Services.Tasks
await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries(); await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
} }
private async Task SendProgress(float progress) private async Task SendProgress(float progress, string subtitle)
{ {
await _messageHub.Clients.All.SendAsync(SignalREvents.CleanupProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.CleanupProgressEvent(progress)); MessageFactory.CleanupProgressEvent(progress, subtitle));
} }
/// <summary> /// <summary>

View File

@ -4,10 +4,14 @@ using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks;
using API.Data.Metadata;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Helpers; using API.Helpers;
using API.Parser; using API.Parser;
using API.SignalR;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Services.Tasks.Scanner namespace API.Services.Tasks.Scanner
@ -26,6 +30,7 @@ namespace API.Services.Tasks.Scanner
private readonly ILogger _logger; private readonly ILogger _logger;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly IReadingItemService _readingItemService; private readonly IReadingItemService _readingItemService;
private readonly IEventHub _eventHub;
private readonly DefaultParser _defaultParser; private readonly DefaultParser _defaultParser;
/// <summary> /// <summary>
@ -36,13 +41,14 @@ namespace API.Services.Tasks.Scanner
/// <param name="directoryService">Directory Service</param> /// <param name="directoryService">Directory Service</param>
/// <param name="readingItemService">ReadingItemService Service for extracting information on a number of formats</param> /// <param name="readingItemService">ReadingItemService Service for extracting information on a number of formats</param>
public ParseScannedFiles(ILogger logger, IDirectoryService directoryService, public ParseScannedFiles(ILogger logger, IDirectoryService directoryService,
IReadingItemService readingItemService) IReadingItemService readingItemService, IEventHub eventHub)
{ {
_logger = logger; _logger = logger;
_directoryService = directoryService; _directoryService = directoryService;
_readingItemService = readingItemService; _readingItemService = readingItemService;
_scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>(); _scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
_defaultParser = new DefaultParser(_directoryService); _defaultParser = new DefaultParser(_directoryService);
_eventHub = eventHub;
} }
/// <summary> /// <summary>
@ -74,8 +80,6 @@ namespace API.Services.Tasks.Scanner
/// <param name="type">Library type to determine parsing to perform</param> /// <param name="type">Library type to determine parsing to perform</param>
private void ProcessFile(string path, string rootPath, LibraryType type) private void ProcessFile(string path, string rootPath, LibraryType type)
{ {
// TODO: Emit event with what is being processed. It can look like Kavita isn't doing anything during file scan
var info = _readingItemService.Parse(path, rootPath, type); var info = _readingItemService.Parse(path, rootPath, type);
if (info == null) if (info == null)
{ {
@ -138,8 +142,6 @@ namespace API.Services.Tasks.Scanner
NormalizedName = Parser.Parser.Normalize(info.Series) NormalizedName = Parser.Parser.Normalize(info.Series)
}; };
_scannedSeries.AddOrUpdate(existingKey, new List<ParserInfo>() {info}, (_, oldValue) => _scannedSeries.AddOrUpdate(existingKey, new List<ParserInfo>() {info}, (_, oldValue) =>
{ {
oldValue ??= new List<ParserInfo>(); oldValue ??= new List<ParserInfo>();
@ -177,29 +179,28 @@ namespace API.Services.Tasks.Scanner
/// </summary> /// </summary>
/// <param name="libraryType">Type of library. Used for selecting the correct file extensions to search for and parsing files</param> /// <param name="libraryType">Type of library. Used for selecting the correct file extensions to search for and parsing files</param>
/// <param name="folders">The folders to scan. By default, this should be library.Folders, however it can be overwritten to restrict folders</param> /// <param name="folders">The folders to scan. By default, this should be library.Folders, however it can be overwritten to restrict folders</param>
/// <param name="totalFiles">Total files scanned</param>
/// <param name="scanElapsedTime">Time it took to scan and parse files</param>
/// <returns></returns> /// <returns></returns>
public Dictionary<ParsedSeries, List<ParserInfo>> ScanLibrariesForSeries(LibraryType libraryType, IEnumerable<string> folders, out int totalFiles, public async Task<Dictionary<ParsedSeries, List<ParserInfo>>> ScanLibrariesForSeries(LibraryType libraryType, IEnumerable<string> folders, string libraryName)
out long scanElapsedTime)
{ {
var sw = Stopwatch.StartNew(); await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("", libraryName, ProgressEventType.Started));
totalFiles = 0;
foreach (var folderPath in folders) foreach (var folderPath in folders)
{ {
try try
{ {
totalFiles += _directoryService.TraverseTreeParallelForEach(folderPath, (f) => async void Action(string f)
{ {
try try
{ {
ProcessFile(f, folderPath, libraryType); ProcessFile(f, folderPath, libraryType);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(f, libraryName, ProgressEventType.Updated));
} }
catch (FileNotFoundException exception) catch (FileNotFoundException exception)
{ {
_logger.LogError(exception, "The file {Filename} could not be found", f); _logger.LogError(exception, "The file {Filename} could not be found", f);
} }
}, Parser.Parser.SupportedExtensions, _logger); }
_directoryService.TraverseTreeParallelForEach(folderPath, Action, Parser.Parser.SupportedExtensions, _logger);
} }
catch (ArgumentException ex) catch (ArgumentException ex)
{ {
@ -207,9 +208,7 @@ namespace API.Services.Tasks.Scanner
} }
} }
scanElapsedTime = sw.ElapsedMilliseconds; await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("", libraryName, ProgressEventType.Ended));
_logger.LogInformation("Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles,
scanElapsedTime);
return SeriesWithInfos(); return SeriesWithInfos();
} }

View File

@ -17,7 +17,6 @@ using API.Parser;
using API.Services.Tasks.Scanner; using API.Services.Tasks.Scanner;
using API.SignalR; using API.SignalR;
using Hangfire; using Hangfire;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Services.Tasks; namespace API.Services.Tasks;
@ -39,14 +38,14 @@ public class ScannerService : IScannerService
private readonly ILogger<ScannerService> _logger; private readonly ILogger<ScannerService> _logger;
private readonly IMetadataService _metadataService; private readonly IMetadataService _metadataService;
private readonly ICacheService _cacheService; private readonly ICacheService _cacheService;
private readonly IHubContext<MessageHub> _messageHub; private readonly IEventHub _eventHub;
private readonly IFileService _fileService; private readonly IFileService _fileService;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly IReadingItemService _readingItemService; private readonly IReadingItemService _readingItemService;
private readonly ICacheHelper _cacheHelper; private readonly ICacheHelper _cacheHelper;
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger,
IMetadataService metadataService, ICacheService cacheService, IHubContext<MessageHub> messageHub, IMetadataService metadataService, ICacheService cacheService, IEventHub eventHub,
IFileService fileService, IDirectoryService directoryService, IReadingItemService readingItemService, IFileService fileService, IDirectoryService directoryService, IReadingItemService readingItemService,
ICacheHelper cacheHelper) ICacheHelper cacheHelper)
{ {
@ -54,7 +53,7 @@ public class ScannerService : IScannerService
_logger = logger; _logger = logger;
_metadataService = metadataService; _metadataService = metadataService;
_cacheService = cacheService; _cacheService = cacheService;
_messageHub = messageHub; _eventHub = eventHub;
_fileService = fileService; _fileService = fileService;
_directoryService = directoryService; _directoryService = directoryService;
_readingItemService = readingItemService; _readingItemService = readingItemService;
@ -72,8 +71,8 @@ public class ScannerService : IScannerService
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders); var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders);
var folderPaths = library.Folders.Select(f => f.Path).ToList(); var folderPaths = library.Folders.Select(f => f.Path).ToList();
// Check if any of the folder roots are not available (ie disconnected from network, etc) and fail if any of them are
if (folderPaths.Any(f => !_directoryService.IsDriveMounted(f))) if (!await CheckMounts(library.Folders.Select(f => f.Path).ToList()))
{ {
_logger.LogError("Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted"); _logger.LogError("Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted");
return; return;
@ -86,8 +85,9 @@ public class ScannerService : IScannerService
var dirs = _directoryService.FindHighestDirectoriesFromFiles(folderPaths, files.Select(f => f.FilePath).ToList()); var dirs = _directoryService.FindHighestDirectoriesFromFiles(folderPaths, files.Select(f => f.FilePath).ToList());
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name); _logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService); var (totalFiles, scanElapsedTime, parsedSeries) = await ScanFiles(library, dirs.Keys);
var parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles, out var scanElapsedTime);
// Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder // Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder
RemoveParsedInfosNotForSeries(parsedSeries, series); RemoveParsedInfosNotForSeries(parsedSeries, series);
@ -133,11 +133,11 @@ public class ScannerService : IScannerService
} }
} }
var (totalFiles2, scanElapsedTime2, parsedSeries2) = await ScanFiles(library, dirs.Keys);
_logger.LogInformation("{SeriesName} has bad naming convention, forcing rescan at a higher directory", series.OriginalName); _logger.LogInformation("{SeriesName} has bad naming convention, forcing rescan at a higher directory", series.OriginalName);
scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService);
parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2);
totalFiles += totalFiles2; totalFiles += totalFiles2;
scanElapsedTime += scanElapsedTime2; scanElapsedTime += scanElapsedTime2;
parsedSeries = parsedSeries2;
RemoveParsedInfosNotForSeries(parsedSeries, series); RemoveParsedInfosNotForSeries(parsedSeries, series);
} }
} }
@ -148,9 +148,12 @@ public class ScannerService : IScannerService
// Merge any series together that might have different ParsedSeries but belong to another group of ParsedSeries // Merge any series together that might have different ParsedSeries but belong to another group of ParsedSeries
try try
{ {
UpdateSeries(series, parsedSeries, allPeople, allTags, allGenres, library.Type); await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Started, series.Name));
await UpdateSeries(series, parsedSeries, allPeople, allTags, allGenres, library.Type);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name));
await CommitAndSend(totalFiles, parsedSeries, sw, scanElapsedTime, series); await CommitAndSend(totalFiles, parsedSeries, sw, scanElapsedTime, series);
await RemoveAbandonedMetadataKeys();
} }
catch (Exception ex) catch (Exception ex)
{ {
@ -158,7 +161,8 @@ public class ScannerService : IScannerService
await _unitOfWork.RollbackAsync(); await _unitOfWork.RollbackAsync();
} }
// Tell UI that this series is done // Tell UI that this series is done
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.ScanSeriesEvent(seriesId, series.Name), token); await _eventHub.SendMessageAsync(MessageFactory.ScanSeries,
MessageFactory.ScanSeriesEvent(seriesId, series.Name));
await CleanupDbEntities(); await CleanupDbEntities();
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds)); BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds));
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, series.Id, false)); BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, series.Id, false));
@ -186,6 +190,64 @@ public class ScannerService : IScannerService
} }
} }
private async Task<bool> CheckMounts(IList<string> folders)
{
// TODO: IF false, inform UI
// Check if any of the folder roots are not available (ie disconnected from network, etc) and fail if any of them are
if (folders.Any(f => !_directoryService.IsDriveMounted(f)))
{
_logger.LogError("Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted");
await _eventHub.SendMessageAsync("library.scan.error", new SignalRMessage()
{
Name = "library.scan.error",
Body =
new {
Message =
"Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted",
Details = ""
},
Title = "Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted",
SubTitle = string.Join(", ", folders.Where(f => !_directoryService.IsDriveMounted(f)))
});
return false;
}
// For Docker instances check if any of the folder roots are not available (ie disconnected volumes, etc) and fail if any of them are
if (folders.Any(f => _directoryService.IsDirectoryEmpty(f)))
{
// TODO: Food for thought, move this to throw an exception and let a middleware inform the UI to keep the code clean. (We can throw a custom exception which
// will always propagate to the UI)
// That way logging and UI informing is all in one place with full context
_logger.LogError("Some of the root folders for the library are empty. " +
"Either your mount has been disconnected or you are trying to delete all series in the library. " +
"Scan will be aborted. " +
"Check that your mount is connected or change the library's root folder and rescan");
// TODO: Use a factory method
await _eventHub.SendMessageAsync(MessageFactory.Error, new SignalRMessage()
{
Name = MessageFactory.Error,
Title = "Some of the root folders for the library are empty.",
SubTitle = "Either your mount has been disconnected or you are trying to delete all series in the library. " +
"Scan will be aborted. " +
"Check that your mount is connected or change the library's root folder and rescan",
Body =
new {
Title =
"Some of the root folders for the library are empty.",
SubTitle = "Either your mount has been disconnected or you are trying to delete all series in the library. " +
"Scan will be aborted. " +
"Check that your mount is connected or change the library's root folder and rescan"
}
}, true);
return false;
}
return true;
}
[DisableConcurrentExecution(timeoutInSeconds: 360)] [DisableConcurrentExecution(timeoutInSeconds: 360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
@ -223,12 +285,11 @@ public class ScannerService : IScannerService
return; return;
} }
// Check if any of the folder roots are not available (ie disconnected from network, etc) and fail if any of them are if (!await CheckMounts(library.Folders.Select(f => f.Path).ToList()))
if (library.Folders.Any(f => !_directoryService.IsDriveMounted(f.Path)))
{ {
_logger.LogCritical("Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted"); _logger.LogCritical("Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted");
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress, // await _eventHub.SendMessageAsync(SignalREvents.NotificationProgress,
MessageFactory.ScanLibraryProgressEvent(libraryId, 1F)); // MessageFactory.ScanLibraryProgressEvent(libraryId, 1F));
return; return;
} }
@ -239,17 +300,19 @@ public class ScannerService : IScannerService
"Either your mount has been disconnected or you are trying to delete all series in the library. " + "Either your mount has been disconnected or you are trying to delete all series in the library. " +
"Scan will be aborted. " + "Scan will be aborted. " +
"Check that your mount is connected or change the library's root folder and rescan"); "Check that your mount is connected or change the library's root folder and rescan");
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress, // await _eventHub.SendMessageAsync(SignalREvents.NotificationProgress,
MessageFactory.ScanLibraryProgressEvent(libraryId, 1F)); // MessageFactory.ScanLibraryProgressEvent(libraryId, 1F));
return; return;
} }
_logger.LogInformation("[ScannerService] Beginning file scan on {LibraryName}", library.Name); _logger.LogInformation("[ScannerService] Beginning file scan on {LibraryName}", library.Name);
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress, // await _eventHub.SendMessageAsync(SignalREvents.NotificationProgress,
MessageFactory.ScanLibraryProgressEvent(libraryId, 0)); // MessageFactory.ScanLibraryProgressEvent(libraryId, 0F));
var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService);
var series = scanner.ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime); var (totalFiles, scanElapsedTime, series) = await ScanFiles(library, library.Folders.Select(fp => fp.Path));
// var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService);
// var series = scanner.ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime);
_logger.LogInformation("[ScannerService] Finished file scan. Updating database"); _logger.LogInformation("[ScannerService] Finished file scan. Updating database");
foreach (var folderPath in library.Folders) foreach (var folderPath in library.Folders)
@ -276,11 +339,23 @@ public class ScannerService : IScannerService
await CleanupDbEntities(); await CleanupDbEntities();
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress, // await _eventHub.SendMessageAsync(SignalREvents.NotificationProgress,
MessageFactory.ScanLibraryProgressEvent(libraryId, 1F)); // MessageFactory.ScanLibraryProgressEvent(libraryId, 1F));
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, false)); BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, false));
} }
private async Task<Tuple<int, long, Dictionary<ParsedSeries, List<ParserInfo>>>> ScanFiles(Library library, IEnumerable<string> dirs)
{
var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService, _eventHub);
var scanWatch = new Stopwatch();
var parsedSeries = await scanner.ScanLibrariesForSeries(library.Type, dirs, library.Name);
var totalFiles = parsedSeries.Keys.Sum(key => parsedSeries[key].Count);
var scanElapsedTime = scanWatch.ElapsedMilliseconds;
_logger.LogInformation("Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles,
scanElapsedTime);
return new Tuple<int, long, Dictionary<ParsedSeries, List<ParserInfo>>>(totalFiles, scanElapsedTime, parsedSeries);
}
/// <summary> /// <summary>
/// Remove any user progress rows that no longer exist since scan library ran and deleted series/volumes/chapters /// Remove any user progress rows that no longer exist since scan library ran and deleted series/volumes/chapters
/// </summary> /// </summary>
@ -350,10 +425,16 @@ public class ScannerService : IScannerService
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
var librarySeries = cleanedSeries.ToList(); var librarySeries = cleanedSeries.ToList();
Parallel.ForEach(librarySeries, (series) =>
//var index = 0;
foreach (var series in librarySeries)
{ {
UpdateSeries(series, parsedSeries, allPeople, allTags, allGenres, library.Type); await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Started, series.Name));
}); await UpdateSeries(series, parsedSeries, allPeople, allTags, allGenres, library.Type);
// await _eventHub.SendMessageAsync(SignalREvents.NotificationProgress,
// MessageFactory.ScanLibraryProgressEvent(library.Id, (1F * index) / librarySeries.Count));
// index += 1;
}
try try
{ {
@ -364,10 +445,10 @@ public class ScannerService : IScannerService
_logger.LogCritical(ex, "[ScannerService] There was an issue writing to the DB. Chunk {ChunkNumber} did not save to DB. If debug mode, series to check will be printed", chunk); _logger.LogCritical(ex, "[ScannerService] There was an issue writing to the DB. Chunk {ChunkNumber} did not save to DB. If debug mode, series to check will be printed", chunk);
foreach (var series in nonLibrarySeries) foreach (var series in nonLibrarySeries)
{ {
_logger.LogDebug("[ScannerService] There may be a constraint issue with {SeriesName}", series.OriginalName); _logger.LogCritical("[ScannerService] There may be a constraint issue with {SeriesName}", series.OriginalName);
} }
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryError, await _eventHub.SendMessageAsync(MessageFactory.ScanLibraryError,
MessageFactory.ScanLibraryError(library.Id)); MessageFactory.ScanLibraryErrorEvent(library.Id, library.Name));
continue; continue;
} }
_logger.LogInformation( _logger.LogInformation(
@ -377,17 +458,19 @@ public class ScannerService : IScannerService
// Emit any series removed // Emit any series removed
foreach (var missing in missingSeries) foreach (var missing in missingSeries)
{ {
await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesRemoved, MessageFactory.SeriesRemovedEvent(missing.Id, missing.Name, library.Id)); await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved, MessageFactory.SeriesRemovedEvent(missing.Id, missing.Name, library.Id));
} }
foreach (var series in librarySeries) foreach (var series in librarySeries)
{ {
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.ScanSeriesEvent(series.Id, series.Name)); // TODO: Do I need this? Shouldn't this be NotificationProgress
// This is something more like, the series has finished updating in the backend. It may or may not have been modified.
await _eventHub.SendMessageAsync(MessageFactory.ScanSeries, MessageFactory.ScanSeriesEvent(series.Id, series.Name));
} }
var progress = Math.Max(0, Math.Min(1, ((chunk + 1F) * chunkInfo.ChunkSize) / chunkInfo.TotalSize)); //var progress = Math.Max(0, Math.Min(1, ((chunk + 1F) * chunkInfo.ChunkSize) / chunkInfo.TotalSize));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress, // await _eventHub.SendMessageAsync(SignalREvents.NotificationProgress,
MessageFactory.ScanLibraryProgressEvent(library.Id, progress)); // MessageFactory.ScanLibraryProgressEvent(library.Id, progress));
} }
@ -435,7 +518,7 @@ public class ScannerService : IScannerService
foreach(var series in newSeries) foreach(var series in newSeries)
{ {
_logger.LogDebug("[ScannerService] Processing series {SeriesName}", series.OriginalName); _logger.LogDebug("[ScannerService] Processing series {SeriesName}", series.OriginalName);
UpdateSeries(series, parsedSeries, allPeople, allTags, allGenres, library.Type); await UpdateSeries(series, parsedSeries, allPeople, allTags, allGenres, library.Type);
_unitOfWork.SeriesRepository.Attach(series); _unitOfWork.SeriesRepository.Attach(series);
try try
{ {
@ -445,7 +528,7 @@ public class ScannerService : IScannerService
newSeries.Count, stopwatch.ElapsedMilliseconds, library.Name); newSeries.Count, stopwatch.ElapsedMilliseconds, library.Name);
// Inform UI of new series added // Inform UI of new series added
await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesAdded, MessageFactory.SeriesAddedEvent(series.Id, series.Name, library.Id)); await _eventHub.SendMessageAsync(MessageFactory.SeriesAdded, MessageFactory.SeriesAddedEvent(series.Id, series.Name, library.Id));
} }
catch (Exception ex) catch (Exception ex)
{ {
@ -453,23 +536,28 @@ public class ScannerService : IScannerService
series.Name, $"{series.Name}_{series.NormalizedName}_{series.LocalizedName}_{series.LibraryId}_{series.Format}"); series.Name, $"{series.Name}_{series.NormalizedName}_{series.LocalizedName}_{series.LibraryId}_{series.Format}");
} }
var progress = Math.Max(0F, Math.Min(1F, i * 1F / newSeries.Count)); //var progress = Math.Max(0F, Math.Min(1F, i * 1F / newSeries.Count));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress, // await _eventHub.SendMessageAsync(SignalREvents.NotificationProgress,
MessageFactory.ScanLibraryProgressEvent(library.Id, progress)); // MessageFactory.ScanLibraryProgressEvent(library.Id, progress));
i++; i++;
} }
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended));
_logger.LogInformation( _logger.LogInformation(
"[ScannerService] Added {NewSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}", "[ScannerService] Added {NewSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
newSeries.Count, stopwatch.ElapsedMilliseconds, library.Name); newSeries.Count, stopwatch.ElapsedMilliseconds, library.Name);
} }
private void UpdateSeries(Series series, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries, private async Task UpdateSeries(Series series, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries,
ICollection<Person> allPeople, ICollection<Tag> allTags, ICollection<Genre> allGenres, LibraryType libraryType) ICollection<Person> allPeople, ICollection<Tag> allTags, ICollection<Genre> allGenres, LibraryType libraryType)
{ {
try try
{ {
_logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName); _logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
//await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.DbUpdateProgressEvent(series, ProgressEventType.Started));
//await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.DbUpdateProgressEvent(series, ProgressEventType.Updated));
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(series.Library.Name, ProgressEventType.Ended, series.Name));
// Get all associated ParsedInfos to the series. This includes infos that use a different filename that matches Series LocalizedName // Get all associated ParsedInfos to the series. This includes infos that use a different filename that matches Series LocalizedName
var parsedInfos = ParseScannedFiles.GetInfosByName(parsedSeries, series); var parsedInfos = ParseScannedFiles.GetInfosByName(parsedSeries, series);
@ -484,6 +572,8 @@ public class ScannerService : IScannerService
} }
series.OriginalName ??= parsedInfos[0].Series; series.OriginalName ??= parsedInfos[0].Series;
series.SortName ??= parsedInfos[0].SeriesSort; series.SortName ??= parsedInfos[0].SeriesSort;
//await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.DbUpdateProgressEvent(series, ProgressEventType.Updated));
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(series.Library.Name, ProgressEventType.Ended, series.Name));
UpdateSeriesMetadata(series, allPeople, allGenres, allTags, libraryType); UpdateSeriesMetadata(series, allPeople, allGenres, allTags, libraryType);
} }
@ -491,6 +581,8 @@ public class ScannerService : IScannerService
{ {
_logger.LogError(ex, "[ScannerService] There was an exception updating volumes for {SeriesName}", series.Name); _logger.LogError(ex, "[ScannerService] There was an exception updating volumes for {SeriesName}", series.Name);
} }
//await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.DbUpdateProgressEvent(series, ProgressEventType.Ended));
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(series.Library.Name, ProgressEventType.Ended, series.Name));
} }
public static IEnumerable<Series> FindSeriesNotOnDisk(IEnumerable<Series> existingSeries, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries) public static IEnumerable<Series> FindSeriesNotOnDisk(IEnumerable<Series> existingSeries, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
@ -498,6 +590,13 @@ public class ScannerService : IScannerService
return existingSeries.Where(es => !ParserInfoHelpers.SeriesHasMatchingParserInfoFormat(es, parsedSeries)); return existingSeries.Where(es => !ParserInfoHelpers.SeriesHasMatchingParserInfoFormat(es, parsedSeries));
} }
private async Task RemoveAbandonedMetadataKeys()
{
await _unitOfWork.TagRepository.RemoveAllTagNoLongerAssociated();
await _unitOfWork.PersonRepository.RemoveAllPeopleNoLongerAssociated();
await _unitOfWork.GenreRepository.RemoveAllGenreNoLongerAssociated();
}
private static void UpdateSeriesMetadata(Series series, ICollection<Person> allPeople, ICollection<Genre> allGenres, ICollection<Tag> allTags, LibraryType libraryType) private static void UpdateSeriesMetadata(Series series, ICollection<Person> allPeople, ICollection<Genre> allGenres, ICollection<Tag> allTags, LibraryType libraryType)
{ {
@ -605,6 +704,7 @@ public class ScannerService : IScannerService
_unitOfWork.VolumeRepository.Add(volume); _unitOfWork.VolumeRepository.Add(volume);
} }
// TODO: Here we can put a signalR update
_logger.LogDebug("[ScannerService] Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name); _logger.LogDebug("[ScannerService] Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray(); var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
UpdateChapters(volume, infos); UpdateChapters(volume, infos);

View File

@ -22,13 +22,13 @@ public class SiteThemeService : ISiteThemeService
{ {
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub; private readonly IEventHub _eventHub;
public SiteThemeService(IDirectoryService directoryService, IUnitOfWork unitOfWork, IHubContext<MessageHub> messageHub) public SiteThemeService(IDirectoryService directoryService, IUnitOfWork unitOfWork, IEventHub eventHub)
{ {
_directoryService = directoryService; _directoryService = directoryService;
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_messageHub = messageHub; _eventHub = eventHub;
} }
/// <summary> /// <summary>
@ -59,8 +59,6 @@ public class SiteThemeService : ISiteThemeService
.Where(name => !reservedNames.Contains(Parser.Parser.Normalize(name))).ToList(); .Where(name => !reservedNames.Contains(Parser.Parser.Normalize(name))).ToList();
var allThemes = (await _unitOfWork.SiteThemeRepository.GetThemes()).ToList(); var allThemes = (await _unitOfWork.SiteThemeRepository.GetThemes()).ToList();
var totalThemesToIterate = themeFiles.Count;
var themeIteratedCount = 0;
// First remove any files from allThemes that are User Defined and not on disk // First remove any files from allThemes that are User Defined and not on disk
var userThemes = allThemes.Where(t => t.Provider == ThemeProvider.User).ToList(); var userThemes = allThemes.Where(t => t.Provider == ThemeProvider.User).ToList();
@ -68,15 +66,11 @@ public class SiteThemeService : ISiteThemeService
{ {
var filepath = Parser.Parser.NormalizePath( var filepath = Parser.Parser.NormalizePath(
_directoryService.FileSystem.Path.Join(_directoryService.SiteThemeDirectory, userTheme.FileName)); _directoryService.FileSystem.Path.Join(_directoryService.SiteThemeDirectory, userTheme.FileName));
if (!_directoryService.FileSystem.File.Exists(filepath)) if (_directoryService.FileSystem.File.Exists(filepath)) continue;
{
// I need to do the removal different. I need to update all userpreferences to use DefaultTheme
allThemes.Remove(userTheme);
await RemoveTheme(userTheme);
await _messageHub.Clients.All.SendAsync(SignalREvents.SiteThemeProgress, // I need to do the removal different. I need to update all user preferences to use DefaultTheme
MessageFactory.SiteThemeProgressEvent(1, totalThemesToIterate, userTheme.FileName, 0F)); allThemes.Remove(userTheme);
} await RemoveTheme(userTheme);
} }
// Add new custom themes // Add new custom themes
@ -85,11 +79,8 @@ public class SiteThemeService : ISiteThemeService
{ {
var themeName = var themeName =
Parser.Parser.Normalize(_directoryService.FileSystem.Path.GetFileNameWithoutExtension(themeFile)); Parser.Parser.Normalize(_directoryService.FileSystem.Path.GetFileNameWithoutExtension(themeFile));
if (allThemeNames.Contains(themeName)) if (allThemeNames.Contains(themeName)) continue;
{
themeIteratedCount += 1;
continue;
}
_unitOfWork.SiteThemeRepository.Add(new SiteTheme() _unitOfWork.SiteThemeRepository.Add(new SiteTheme()
{ {
Name = _directoryService.FileSystem.Path.GetFileNameWithoutExtension(themeFile), Name = _directoryService.FileSystem.Path.GetFileNameWithoutExtension(themeFile),
@ -98,9 +89,9 @@ public class SiteThemeService : ISiteThemeService
Provider = ThemeProvider.User, Provider = ThemeProvider.User,
IsDefault = false, IsDefault = false,
}); });
await _messageHub.Clients.All.SendAsync(SignalREvents.SiteThemeProgress,
MessageFactory.SiteThemeProgressEvent(themeIteratedCount, totalThemesToIterate, themeName, themeIteratedCount / (totalThemesToIterate * 1.0f))); await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
themeIteratedCount += 1; MessageFactory.SiteThemeProgressEvent(_directoryService.FileSystem.Path.GetFileName(themeFile), themeName, ProgressEventType.Updated));
} }
@ -109,8 +100,8 @@ public class SiteThemeService : ISiteThemeService
await _unitOfWork.CommitAsync(); await _unitOfWork.CommitAsync();
} }
await _messageHub.Clients.All.SendAsync(SignalREvents.SiteThemeProgress, await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.SiteThemeProgressEvent(totalThemesToIterate, totalThemesToIterate, "", 1F)); MessageFactory.SiteThemeProgressEvent("", "", ProgressEventType.Ended));
} }

View File

@ -53,7 +53,7 @@ public interface IVersionUpdaterService
public class VersionUpdaterService : IVersionUpdaterService public class VersionUpdaterService : IVersionUpdaterService
{ {
private readonly ILogger<VersionUpdaterService> _logger; private readonly ILogger<VersionUpdaterService> _logger;
private readonly IHubContext<MessageHub> _messageHub; private readonly IEventHub _eventHub;
private readonly IPresenceTracker _tracker; private readonly IPresenceTracker _tracker;
private readonly Markdown _markdown = new MarkdownDeep.Markdown(); private readonly Markdown _markdown = new MarkdownDeep.Markdown();
#pragma warning disable S1075 #pragma warning disable S1075
@ -61,10 +61,10 @@ public class VersionUpdaterService : IVersionUpdaterService
private const string GithubAllReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases"; private const string GithubAllReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases";
#pragma warning restore S1075 #pragma warning restore S1075
public VersionUpdaterService(ILogger<VersionUpdaterService> logger, IHubContext<MessageHub> messageHub, IPresenceTracker tracker) public VersionUpdaterService(ILogger<VersionUpdaterService> logger, IEventHub eventHub, IPresenceTracker tracker)
{ {
_logger = logger; _logger = logger;
_messageHub = messageHub; _eventHub = eventHub;
_tracker = tracker; _tracker = tracker;
FlurlHttp.ConfigureClient(GithubLatestReleasesUrl, cli => FlurlHttp.ConfigureClient(GithubLatestReleasesUrl, cli =>
@ -117,26 +117,22 @@ public class VersionUpdaterService : IVersionUpdaterService
{ {
if (update == null) return; if (update == null) return;
var admins = await _tracker.GetOnlineAdmins();
var updateVersion = new Version(update.CurrentVersion); var updateVersion = new Version(update.CurrentVersion);
if (BuildInfo.Version < updateVersion) if (BuildInfo.Version < updateVersion)
{ {
_logger.LogInformation("Server is out of date. Current: {CurrentVersion}. Available: {AvailableUpdate}", BuildInfo.Version, updateVersion); _logger.LogInformation("Server is out of date. Current: {CurrentVersion}. Available: {AvailableUpdate}", BuildInfo.Version, updateVersion);
await SendEvent(update, admins); await _eventHub.SendMessageAsync(MessageFactory.UpdateAvailable, MessageFactory.UpdateVersionEvent(update),
true);
} }
else if (Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") == Environments.Development) else if (Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") == Environments.Development)
{ {
_logger.LogInformation("Server is up to date. Current: {CurrentVersion}", BuildInfo.Version); _logger.LogInformation("Server is up to date. Current: {CurrentVersion}", BuildInfo.Version);
await SendEvent(update, admins); await _eventHub.SendMessageAsync(MessageFactory.UpdateAvailable, MessageFactory.UpdateVersionEvent(update),
true);
} }
} }
private async Task SendEvent(UpdateNotificationDto update, IReadOnlyList<string> admins)
{
await _messageHub.Clients.Users(admins).SendAsync(SignalREvents.UpdateAvailable, MessageFactory.UpdateVersionEvent(update));
}
private static async Task<GithubReleaseMetadata> GetGithubRelease() private static async Task<GithubReleaseMetadata> GetGithubRelease()
{ {

44
API/SignalR/EventHub.cs Normal file
View File

@ -0,0 +1,44 @@
using System.Threading.Tasks;
using API.Data;
using API.SignalR.Presence;
using Microsoft.AspNetCore.SignalR;
namespace API.SignalR;
/// <summary>
/// Responsible for ushering events to the UI and allowing simple DI hook to send data
/// </summary>
public interface IEventHub
{
Task SendMessageAsync(string method, SignalRMessage message, bool onlyAdmins = true);
}
public class EventHub : IEventHub
{
private readonly IHubContext<MessageHub> _messageHub;
private readonly IPresenceTracker _presenceTracker;
private readonly IUnitOfWork _unitOfWork;
public EventHub(IHubContext<MessageHub> messageHub, IPresenceTracker presenceTracker, IUnitOfWork unitOfWork)
{
_messageHub = messageHub;
_presenceTracker = presenceTracker;
_unitOfWork = unitOfWork;
// TODO: When sending a message, queue the message up and on re-connect, reply the queued messages. Queue messages expire on a rolling basis (rolling array)
}
public async Task SendMessageAsync(string method, SignalRMessage message, bool onlyAdmins = true)
{
// TODO: If libraryId and NOT onlyAdmins, then perform RBS check before sending the event
var users = _messageHub.Clients.All;
if (onlyAdmins)
{
var admins = await _presenceTracker.GetOnlineAdmins();
_messageHub.Clients.Users(admins);
}
await users.SendAsync(method, message);
}
}

View File

@ -1,16 +1,90 @@
using System; using System;
using System.Diagnostics;
using System.IO;
using System.Threading; using System.Threading;
using API.DTOs.Update; using API.DTOs.Update;
using API.Entities;
namespace API.SignalR namespace API.SignalR
{ {
public static class MessageFactory public static class MessageFactory
{ {
/// <summary>
/// An update is available for the Kavita instance
/// </summary>
public const string UpdateAvailable = "UpdateAvailable";
/// <summary>
/// Used to tell when a scan series completes
/// </summary>
public const string ScanSeries = "ScanSeries";
/// <summary>
/// Event sent out during Refresh Metadata for progress tracking
/// </summary>
private const string CoverUpdateProgress = "CoverUpdateProgress";
/// <summary>
/// Series is added to server
/// </summary>
public const string SeriesAdded = "SeriesAdded";
/// <summary>
/// Series is removed from server
/// </summary>
public const string SeriesRemoved = "SeriesRemoved";
/// <summary>
/// When a user is connects/disconnects from server
/// </summary>
public const string OnlineUsers = "OnlineUsers";
/// <summary>
/// When a series is added to a collection
/// </summary>
public const string SeriesAddedToCollection = "SeriesAddedToCollection";
/// <summary>
/// When an error occurs during a scan library task
/// </summary>
public const string ScanLibraryError = "ScanLibraryError";
/// <summary>
/// Event sent out during backing up the database
/// </summary>
private const string BackupDatabaseProgress = "BackupDatabaseProgress";
/// <summary>
/// Event sent out during cleaning up temp and cache folders
/// </summary>
private const string CleanupProgress = "CleanupProgress";
/// <summary>
/// Event sent out during downloading of files
/// </summary>
private const string DownloadProgress = "DownloadProgress";
/// <summary>
/// A cover was updated
/// </summary>
public const string CoverUpdate = "CoverUpdate";
/// <summary>
/// A custom site theme was removed or added
/// </summary>
private const string SiteThemeProgress = "SiteThemeProgress";
/// <summary>
/// A type of event that has progress (determinate or indeterminate).
/// The underlying event will have a name to give details on how to handle.
/// </summary>
public const string NotificationProgress = "NotificationProgress";
/// <summary>
/// Event sent out when Scan Loop is parsing a file
/// </summary>
private const string FileScanProgress = "FileScanProgress";
/// <summary>
/// A generic error that can occur in background processing
/// </summary>
public const string Error = "Error";
/// <summary>
/// When DB updates are occuring during a library/series scan
/// </summary>
private const string ScanProgress = "ScanProgress";
public static SignalRMessage ScanSeriesEvent(int seriesId, string seriesName) public static SignalRMessage ScanSeriesEvent(int seriesId, string seriesName)
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = SignalREvents.ScanSeries, Name = ScanSeries,
Body = new Body = new
{ {
SeriesId = seriesId, SeriesId = seriesId,
@ -23,7 +97,7 @@ namespace API.SignalR
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = SignalREvents.SeriesAdded, Name = SeriesAdded,
Body = new Body = new
{ {
SeriesId = seriesId, SeriesId = seriesId,
@ -37,7 +111,7 @@ namespace API.SignalR
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = SignalREvents.SeriesRemoved, Name = SeriesRemoved,
Body = new Body = new
{ {
SeriesId = seriesId, SeriesId = seriesId,
@ -47,11 +121,15 @@ namespace API.SignalR
}; };
} }
public static SignalRMessage ScanLibraryProgressEvent(int libraryId, float progress) public static SignalRMessage CoverUpdateProgressEvent(int libraryId, float progress, string eventType, string subtitle = "")
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = SignalREvents.ScanLibraryProgress, Name = CoverUpdateProgress,
Title = "Refreshing Covers",
SubTitle = subtitle,
EventType = eventType,
Progress = ProgressType.Determinate,
Body = new Body = new
{ {
LibraryId = libraryId, LibraryId = libraryId,
@ -61,37 +139,40 @@ namespace API.SignalR
}; };
} }
public static SignalRMessage RefreshMetadataProgressEvent(int libraryId, float progress) public static SignalRMessage BackupDatabaseProgressEvent(float progress, string subtitle = "")
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = SignalREvents.RefreshMetadataProgress, Name = BackupDatabaseProgress,
Body = new Title = "Backing up Database",
SubTitle = subtitle,
EventType = progress switch
{ {
LibraryId = libraryId, 0f => "started",
Progress = progress, 1f => "ended",
EventTime = DateTime.Now _ => "updated"
} },
}; Progress = ProgressType.Determinate,
}
public static SignalRMessage BackupDatabaseProgressEvent(float progress)
{
return new SignalRMessage()
{
Name = SignalREvents.BackupDatabaseProgress,
Body = new Body = new
{ {
Progress = progress Progress = progress
} }
}; };
} }
public static SignalRMessage CleanupProgressEvent(float progress) public static SignalRMessage CleanupProgressEvent(float progress, string subtitle = "")
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = SignalREvents.CleanupProgress, Name = CleanupProgress,
Title = "Performing Cleanup",
SubTitle = subtitle,
EventType = progress switch
{
0f => "started",
1f => "ended",
_ => "updated"
},
Progress = ProgressType.Determinate,
Body = new Body = new
{ {
Progress = progress Progress = progress
@ -100,21 +181,26 @@ namespace API.SignalR
} }
public static SignalRMessage UpdateVersionEvent(UpdateNotificationDto update) public static SignalRMessage UpdateVersionEvent(UpdateNotificationDto update)
{ {
return new SignalRMessage return new SignalRMessage
{ {
Name = SignalREvents.UpdateAvailable, Name = UpdateAvailable,
Title = "Update Available",
SubTitle = update.UpdateTitle,
EventType = ProgressEventType.Single,
Progress = ProgressType.None,
Body = update Body = update
}; };
} }
public static SignalRMessage SeriesAddedToCollection(int tagId, int seriesId) public static SignalRMessage SeriesAddedToCollectionEvent(int tagId, int seriesId)
{ {
return new SignalRMessage return new SignalRMessage
{ {
Name = SignalREvents.UpdateAvailable, Name = SeriesAddedToCollection,
Progress = ProgressType.None,
EventType = ProgressEventType.Single,
Body = new Body = new
{ {
TagId = tagId, TagId = tagId,
@ -123,11 +209,15 @@ namespace API.SignalR
}; };
} }
public static SignalRMessage ScanLibraryError(int libraryId) public static SignalRMessage ScanLibraryErrorEvent(int libraryId, string libraryName)
{ {
return new SignalRMessage return new SignalRMessage
{ {
Name = SignalREvents.ScanLibraryError, Name = ScanLibraryError,
Title = "Error",
SubTitle = $"Error Scanning {libraryName}",
Progress = ProgressType.None,
EventType = ProgressEventType.Single,
Body = new Body = new
{ {
LibraryId = libraryId, LibraryId = libraryId,
@ -135,11 +225,15 @@ namespace API.SignalR
}; };
} }
public static SignalRMessage DownloadProgressEvent(string username, string downloadName, float progress) public static SignalRMessage DownloadProgressEvent(string username, string downloadName, float progress, string eventType = "updated")
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = SignalREvents.DownloadProgress, Name = DownloadProgress,
Title = $"Downloading {downloadName}",
SubTitle = $"{username} is downloading {downloadName}",
EventType = eventType,
Progress = ProgressType.Determinate,
Body = new Body = new
{ {
UserName = username, UserName = username,
@ -149,11 +243,73 @@ namespace API.SignalR
}; };
} }
/// <summary>
/// Represents a file being scanned by Kavita for processing and grouping
/// </summary>
/// <remarks>Does not have a progress as it's unknown how many files there are. Instead sends -1 to represent indeterminate</remarks>
/// <param name="filename"></param>
/// <param name="libraryName"></param>
/// <param name="eventType"></param>
/// <returns></returns>
public static SignalRMessage FileScanProgressEvent(string filename, string libraryName, string eventType)
{
return new SignalRMessage()
{
Name = FileScanProgress,
Title = $"Scanning {libraryName}",
SubTitle = Path.GetFileName(filename),
EventType = eventType,
Progress = ProgressType.Indeterminate,
Body = new
{
Title = $"Scanning {libraryName}",
Subtitle = filename,
Filename = filename,
EventTime = DateTime.Now,
}
};
}
public static SignalRMessage DbUpdateProgressEvent(Series series, string eventType)
{
// TODO: I want this as a detail of a Scanning Series and we can put more information like Volume or Chapter here
return new SignalRMessage()
{
Name = ScanProgress,
Title = $"Scanning {series.Library.Name}",
SubTitle = series.Name,
EventType = eventType,
Progress = ProgressType.Indeterminate,
Body = new
{
Title = "Updating Series",
SubTitle = series.Name
}
};
}
public static SignalRMessage LibraryScanProgressEvent(string libraryName, string eventType, string seriesName = "")
{
// TODO: I want this as a detail of a Scanning Series and we can put more information like Volume or Chapter here
return new SignalRMessage()
{
Name = ScanProgress,
Title = $"Scanning {libraryName}",
SubTitle = seriesName,
EventType = eventType,
Progress = ProgressType.Indeterminate,
Body = null
};
}
public static SignalRMessage CoverUpdateEvent(int id, string entityType) public static SignalRMessage CoverUpdateEvent(int id, string entityType)
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = SignalREvents.CoverUpdate, Name = CoverUpdate,
Title = "Updating Cover",
//SubTitle = series.Name, // TODO: Refactor this
Progress = ProgressType.None,
Body = new Body = new
{ {
Id = id, Id = id,
@ -162,17 +318,18 @@ namespace API.SignalR
}; };
} }
public static SignalRMessage SiteThemeProgressEvent(int themeIteratedCount, int totalThemesToIterate, string themeName, float progress) public static SignalRMessage SiteThemeProgressEvent(string subtitle, string themeName, string eventType)
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = SignalREvents.SiteThemeProgress, Name = SiteThemeProgress,
Title = "Scanning Site Theme",
SubTitle = subtitle,
EventType = eventType,
Progress = ProgressType.Indeterminate,
Body = new Body = new
{ {
TotalUpdates = totalThemesToIterate,
CurrentCount = themeIteratedCount,
ThemeName = themeName, ThemeName = themeName,
Progress = progress
} }
}; };
} }

View File

@ -1,6 +1,7 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data;
using API.Extensions; using API.Extensions;
using API.SignalR.Presence; using API.SignalR.Presence;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
@ -36,6 +37,7 @@ namespace API.SignalR
public override async Task OnConnectedAsync() public override async Task OnConnectedAsync()
{ {
lock (Connections) lock (Connections)
{ {
Connections.Add(Context.ConnectionId); Connections.Add(Context.ConnectionId);
@ -44,7 +46,7 @@ namespace API.SignalR
await _tracker.UserConnected(Context.User.GetUsername(), Context.ConnectionId); await _tracker.UserConnected(Context.User.GetUsername(), Context.ConnectionId);
var currentUsers = await PresenceTracker.GetOnlineUsers(); var currentUsers = await PresenceTracker.GetOnlineUsers();
await Clients.All.SendAsync(SignalREvents.OnlineUsers, currentUsers); await Clients.All.SendAsync(MessageFactory.OnlineUsers, currentUsers);
await base.OnConnectedAsync(); await base.OnConnectedAsync();
@ -60,7 +62,7 @@ namespace API.SignalR
await _tracker.UserDisconnected(Context.User.GetUsername(), Context.ConnectionId); await _tracker.UserDisconnected(Context.User.GetUsername(), Context.ConnectionId);
var currentUsers = await PresenceTracker.GetOnlineUsers(); var currentUsers = await PresenceTracker.GetOnlineUsers();
await Clients.All.SendAsync(SignalREvents.OnlineUsers, currentUsers); await Clients.All.SendAsync(MessageFactory.OnlineUsers, currentUsers);
await base.OnDisconnectedAsync(exception); await base.OnDisconnectedAsync(exception);

View File

@ -15,13 +15,20 @@ namespace API.SignalR.Presence
} }
internal class ConnectionDetail
{
public List<string> ConnectionIds { get; set; }
public bool IsAdmin { get; set; }
}
// TODO: This can respond to UserRoleUpdate events to handle online users
/// <summary> /// <summary>
/// This is a singleton service for tracking what users have a SignalR connection and their difference connectionIds /// This is a singleton service for tracking what users have a SignalR connection and their difference connectionIds
/// </summary> /// </summary>
public class PresenceTracker : IPresenceTracker public class PresenceTracker : IPresenceTracker
{ {
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private static readonly Dictionary<string, List<string>> OnlineUsers = new Dictionary<string, List<string>>(); private static readonly Dictionary<string, ConnectionDetail> OnlineUsers = new Dictionary<string, ConnectionDetail>();
public PresenceTracker(IUnitOfWork unitOfWork) public PresenceTracker(IUnitOfWork unitOfWork)
{ {
@ -30,20 +37,25 @@ namespace API.SignalR.Presence
public async Task UserConnected(string username, string connectionId) public async Task UserConnected(string username, string connectionId)
{ {
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(username);
var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user);
lock (OnlineUsers) lock (OnlineUsers)
{ {
if (OnlineUsers.ContainsKey(username)) if (OnlineUsers.ContainsKey(username))
{ {
OnlineUsers[username].Add(connectionId); OnlineUsers[username].ConnectionIds.Add(connectionId);
} }
else else
{ {
OnlineUsers.Add(username, new List<string>() { connectionId }); OnlineUsers.Add(username, new ConnectionDetail()
{
ConnectionIds = new List<string>() {connectionId},
IsAdmin = isAdmin
});
} }
} }
// Update the last active for the user // Update the last active for the user
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(username);
user.LastActive = DateTime.Now; user.LastActive = DateTime.Now;
await _unitOfWork.CommitAsync(); await _unitOfWork.CommitAsync();
} }
@ -54,9 +66,9 @@ namespace API.SignalR.Presence
{ {
if (!OnlineUsers.ContainsKey(username)) return Task.CompletedTask; if (!OnlineUsers.ContainsKey(username)) return Task.CompletedTask;
OnlineUsers[username].Remove(connectionId); OnlineUsers[username].ConnectionIds.Remove(connectionId);
if (OnlineUsers[username].Count == 0) if (OnlineUsers[username].ConnectionIds.Count == 0)
{ {
OnlineUsers.Remove(username); OnlineUsers.Remove(username);
} }
@ -75,18 +87,16 @@ namespace API.SignalR.Presence
return Task.FromResult(onlineUsers); return Task.FromResult(onlineUsers);
} }
public async Task<string[]> GetOnlineAdmins() public Task<string[]> GetOnlineAdmins()
{ {
string[] onlineUsers; string[] onlineUsers;
lock (OnlineUsers) lock (OnlineUsers)
{ {
onlineUsers = OnlineUsers.OrderBy(k => k.Key).Select(k => k.Key).ToArray(); onlineUsers = OnlineUsers.Where(pair => pair.Value.IsAdmin).OrderBy(k => k.Key).Select(k => k.Key).ToArray();
} }
var admins = await _unitOfWork.UserRepository.GetAdminUsersAsync();
var result = admins.Select(a => a.UserName).Intersect(onlineUsers).ToArray();
return result; return Task.FromResult(onlineUsers);
} }
public Task<List<string>> GetConnectionsForUser(string username) public Task<List<string>> GetConnectionsForUser(string username)
@ -94,7 +104,7 @@ namespace API.SignalR.Presence
List<string> connectionIds; List<string> connectionIds;
lock (OnlineUsers) lock (OnlineUsers)
{ {
connectionIds = OnlineUsers.GetValueOrDefault(username); connectionIds = OnlineUsers.GetValueOrDefault(username)?.ConnectionIds;
} }
return Task.FromResult(connectionIds); return Task.FromResult(connectionIds);

View File

@ -0,0 +1,17 @@
namespace API.SignalR;
public static class ProgressEventType
{
public const string Started = "started";
public const string Updated = "updated";
/// <summary>
/// End of the update chain
/// </summary>
public const string Ended = "ended";
/// <summary>
/// Represents a single update
/// </summary>
public const string Single = "started";
}

View File

@ -0,0 +1,21 @@
namespace API.SignalR;
/// <summary>
/// How progress should be represented on the UI
/// </summary>
public static class ProgressType
{
/// <summary>
/// Progress scales from 0F -> 1F
/// </summary>
public const string Determinate = "determinate";
/// <summary>
/// Progress has no understanding of quantity
/// </summary>
public const string Indeterminate = "indeterminate";
/// <summary>
/// No progress component to the event
/// </summary>
public const string None = "";
}

View File

@ -1,63 +0,0 @@
namespace API.SignalR
{
public static class SignalREvents
{
/// <summary>
/// An update is available for the Kavita instance
/// </summary>
public const string UpdateAvailable = "UpdateAvailable";
/// <summary>
/// Used to tell when a scan series completes
/// </summary>
public const string ScanSeries = "ScanSeries";
/// <summary>
/// Event sent out during Refresh Metadata for progress tracking
/// </summary>
public const string RefreshMetadataProgress = "RefreshMetadataProgress";
/// <summary>
/// Series is added to server
/// </summary>
public const string SeriesAdded = "SeriesAdded";
/// <summary>
/// Series is removed from server
/// </summary>
public const string SeriesRemoved = "SeriesRemoved";
/// <summary>
/// Progress event for Scan library
/// </summary>
public const string ScanLibraryProgress = "ScanLibraryProgress";
/// <summary>
/// When a user is connects/disconnects from server
/// </summary>
public const string OnlineUsers = "OnlineUsers";
/// <summary>
/// When a series is added to a collection
/// </summary>
public const string SeriesAddedToCollection = "SeriesAddedToCollection";
/// <summary>
/// When an error occurs during a scan library task
/// </summary>
public const string ScanLibraryError = "ScanLibraryError";
/// <summary>
/// Event sent out during backing up the database
/// </summary>
public const string BackupDatabaseProgress = "BackupDatabaseProgress";
/// <summary>
/// Event sent out during cleaning up temp and cache folders
/// </summary>
public const string CleanupProgress = "CleanupProgress";
/// <summary>
/// Event sent out during downloading of files
/// </summary>
public const string DownloadProgress = "DownloadProgress";
/// <summary>
/// A cover was updated
/// </summary>
public const string CoverUpdate = "CoverUpdate";
/// <summary>
/// A custom site theme was removed or added
/// </summary>
public const string SiteThemeProgress = "SiteThemeProgress";
}
}

View File

@ -1,14 +1,39 @@
namespace API.SignalR using System;
namespace API.SignalR
{ {
/// <summary> /// <summary>
/// Payload for SignalR messages to Frontend /// Payload for SignalR messages to Frontend
/// </summary> /// </summary>
public class SignalRMessage public class SignalRMessage
{ {
/// <summary>
/// Body of the event type
/// </summary>
public object Body { get; set; } public object Body { get; set; }
public string Name { get; set; } public string Name { get; set; }
/// <summary>
//[JsonIgnore] /// User friendly Title of the Event
//public ModelAction Action { get; set; } // This will be for when we add new flows /// </summary>
/// <example>Scanning Manga</example>
public string Title { get; set; } = string.Empty;
/// <summary>
/// User friendly subtitle. Should have extra info
/// </summary>
/// <example>C:/manga/Accel World V01.cbz</example>
public string SubTitle { get; set; } = string.Empty;
/// <summary>
/// Represents what this represents. started | updated | ended | single
/// <see cref="ProgressEventType"/>
/// </summary>
public string EventType { get; set; } = ProgressEventType.Updated;
/// <summary>
/// How should progress be represented. If Determinate, the Body MUST have a Progress float on it.
/// </summary>
public string Progress { get; set; } = ProgressType.None;
/// <summary>
/// When event took place
/// </summary>
public DateTime EventTime = DateTime.Now;
} }
} }

View File

@ -18,6 +18,7 @@ using Hangfire;
using Hangfire.MemoryStorage; using Hangfire.MemoryStorage;
using Kavita.Common; using Kavita.Common;
using Kavita.Common.EnvironmentInfo; using Kavita.Common.EnvironmentInfo;
using MediatR;
using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Http;
@ -129,6 +130,8 @@ namespace API
// Add IHostedService for startup tasks // Add IHostedService for startup tasks
// Any services that should be bootstrapped go here // Any services that should be bootstrapped go here
services.AddHostedService<StartupTasksHostedService>(); services.AddHostedService<StartupTasksHostedService>();
services.AddMediatR(typeof(Startup));
} }
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline. // This method gets called by the runtime. Use this method to configure the HTTP request pipeline.

View File

@ -0,0 +1,12 @@
/**
* Represents a file being scanned during a Library Scan
*/
export interface FileScanProgressEvent {
// libraryId: number;
// libraryName: string;
// fileName: string;
title: string;
subtitle: string;
eventTime: string;
}

View File

@ -0,0 +1,40 @@
export interface NotificationContainer<T> {
/**
* Represents underlying type of event
*/
type: string;
/**
* How many events are in this object
*/
size: number;
events: Array<T>;
}
export interface ActivityNotification {
type: string; // library.update.section
/**
* If this notification has some sort of cancellable operation
*/
cancellable: boolean;
userId: number;
/**
* Main action title ie) Scanning LIBRARY_NAME
*/
title: string;
/**
* Detail information about action. ie) Series Name
*/
subtitle: string;
/**
* Progress of this action [0-100]
*/
progress: number;
/**
* Any additional context backend needs to send to UI
*/
context: {
libraryId: number;
};
}

View File

@ -0,0 +1,30 @@
export interface NotificationProgressEvent {
/**
* Payload of the event subtype
*/
body: any;
/**
* Subtype event
*/
name: string;
/**
* Title to display in events widget
*/
title: string;
/**
* Optional subtitle to display. Defaults to empty string
*/
subTitle: string;
/**
* Type of event. Helps events widget to understand how to handle said event
*/
eventType: 'single' | 'started' | 'updated' | 'ended';
/**
* Type of progress. Helps widget understand how to display spinner
*/
progress: 'none' | 'indeterminate' | 'determinate';
/**
* When event was sent
*/
eventTime: string;
}

View File

@ -2,4 +2,10 @@ export interface ProgressEvent {
libraryId: number; libraryId: number;
progress: number; progress: number;
eventTime: string; eventTime: string;
// New fields
/**
* Event type
*/
name: string;
} }

View File

@ -1,7 +1,3 @@
export interface SiteThemeProgressEvent { export interface SiteThemeProgressEvent {
totalUpdates: number;
currentCount: number;
themeName: string; themeName: string;
progress: number;
eventTime: string;
} }

View File

@ -1,19 +1,16 @@
import { EventEmitter, Injectable } from '@angular/core'; import { Injectable } from '@angular/core';
import { Router } from '@angular/router'; import { Router } from '@angular/router';
import { HubConnection, HubConnectionBuilder } from '@microsoft/signalr'; import { HubConnection, HubConnectionBuilder } from '@microsoft/signalr';
import { ToastrService } from 'ngx-toastr'; import { ToastrService } from 'ngx-toastr';
import { BehaviorSubject, ReplaySubject } from 'rxjs'; import { BehaviorSubject, ReplaySubject } from 'rxjs';
import { environment } from 'src/environments/environment'; import { environment } from 'src/environments/environment';
import { ProgressEvent } from '../_models/events/scan-library-progress-event'; import { NotificationProgressEvent } from '../_models/events/notification-progress-event';
import { ScanSeriesEvent } from '../_models/events/scan-series-event';
import { SeriesAddedEvent } from '../_models/events/series-added-event';
import { SiteThemeProgressEvent } from '../_models/events/site-theme-progress-event'; import { SiteThemeProgressEvent } from '../_models/events/site-theme-progress-event';
import { User } from '../_models/user'; import { User } from '../_models/user';
export enum EVENTS { export enum EVENTS {
UpdateAvailable = 'UpdateAvailable', UpdateAvailable = 'UpdateAvailable',
ScanSeries = 'ScanSeries', ScanSeries = 'ScanSeries',
RefreshMetadataProgress = 'RefreshMetadataProgress',
SeriesAdded = 'SeriesAdded', SeriesAdded = 'SeriesAdded',
SeriesRemoved = 'SeriesRemoved', SeriesRemoved = 'SeriesRemoved',
ScanLibraryProgress = 'ScanLibraryProgress', ScanLibraryProgress = 'ScanLibraryProgress',
@ -21,8 +18,22 @@ export enum EVENTS {
SeriesAddedToCollection = 'SeriesAddedToCollection', SeriesAddedToCollection = 'SeriesAddedToCollection',
ScanLibraryError = 'ScanLibraryError', ScanLibraryError = 'ScanLibraryError',
BackupDatabaseProgress = 'BackupDatabaseProgress', BackupDatabaseProgress = 'BackupDatabaseProgress',
/**
* A subtype of NotificationProgress that represents maintenance cleanup on server-owned resources
*/
CleanupProgress = 'CleanupProgress', CleanupProgress = 'CleanupProgress',
/**
* A subtype of NotificationProgress that represnts a user downloading a file or group of files
*/
DownloadProgress = 'DownloadProgress', DownloadProgress = 'DownloadProgress',
/**
* A generic progress event
*/
NotificationProgress = 'NotificationProgress',
/**
* A subtype of NotificationProgress that represents the underlying file being processed during a scan
*/
FileScanProgress = 'FileScanProgress',
/** /**
* A custom user site theme is added or removed during a scan * A custom user site theme is added or removed during a scan
*/ */
@ -30,7 +41,11 @@ export enum EVENTS {
/** /**
* A cover is updated * A cover is updated
*/ */
CoverUpdate = 'CoverUpdate' CoverUpdate = 'CoverUpdate',
/**
* A subtype of NotificationProgress that represents a file being processed for cover image extraction
*/
CoverUpdateProgress = 'CoverUpdateProgress',
} }
export interface Message<T> { export interface Message<T> {
@ -38,6 +53,7 @@ export interface Message<T> {
payload: T; payload: T;
} }
@Injectable({ @Injectable({
providedIn: 'root' providedIn: 'root'
}) })
@ -46,19 +62,36 @@ export class MessageHubService {
private hubConnection!: HubConnection; private hubConnection!: HubConnection;
private messagesSource = new ReplaySubject<Message<any>>(1); private messagesSource = new ReplaySubject<Message<any>>(1);
public messages$ = this.messagesSource.asObservable();
private onlineUsersSource = new BehaviorSubject<string[]>([]); private onlineUsersSource = new BehaviorSubject<string[]>([]);
onlineUsers$ = this.onlineUsersSource.asObservable();
public scanSeries: EventEmitter<ScanSeriesEvent> = new EventEmitter<ScanSeriesEvent>(); /**
public scanLibrary: EventEmitter<ProgressEvent> = new EventEmitter<ProgressEvent>(); // TODO: Refactor this name to be generic * Any events that come from the backend
public seriesAdded: EventEmitter<SeriesAddedEvent> = new EventEmitter<SeriesAddedEvent>(); */
public messages$ = this.messagesSource.asObservable();
/**
* Users that are online
*/
public onlineUsers$ = this.onlineUsersSource.asObservable();
isAdmin: boolean = false; isAdmin: boolean = false;
constructor(private toastr: ToastrService, private router: Router) { constructor(private toastr: ToastrService, private router: Router) {
}
/**
* Tests that an event is of the type passed
* @param event
* @param eventType
* @returns
*/
public isEventType(event: Message<any>, eventType: EVENTS) {
if (event.event == EVENTS.NotificationProgress) {
const notification = event.payload as NotificationProgressEvent;
return notification.eventType.toLowerCase() == eventType.toLowerCase();
}
return event.event === eventType;
} }
createHubConnection(user: User, isAdmin: boolean) { createHubConnection(user: User, isAdmin: boolean) {
@ -85,7 +118,6 @@ export class MessageHubService {
event: EVENTS.ScanSeries, event: EVENTS.ScanSeries,
payload: resp.body payload: resp.body
}); });
this.scanSeries.emit(resp.body);
}); });
this.hubConnection.on(EVENTS.ScanLibraryProgress, resp => { this.hubConnection.on(EVENTS.ScanLibraryProgress, resp => {
@ -93,34 +125,13 @@ export class MessageHubService {
event: EVENTS.ScanLibraryProgress, event: EVENTS.ScanLibraryProgress,
payload: resp.body payload: resp.body
}); });
this.scanLibrary.emit(resp.body);
}); });
this.hubConnection.on(EVENTS.BackupDatabaseProgress, resp => {
this.messagesSource.next({
event: EVENTS.BackupDatabaseProgress,
payload: resp.body
});
});
this.hubConnection.on(EVENTS.CleanupProgress, resp => { this.hubConnection.on(EVENTS.NotificationProgress, (resp: NotificationProgressEvent) => {
this.messagesSource.next({ this.messagesSource.next({
event: EVENTS.CleanupProgress, event: EVENTS.NotificationProgress,
payload: resp.body payload: resp
});
});
this.hubConnection.on(EVENTS.DownloadProgress, resp => {
this.messagesSource.next({
event: EVENTS.DownloadProgress,
payload: resp.body
});
});
this.hubConnection.on(EVENTS.RefreshMetadataProgress, resp => {
this.messagesSource.next({
event: EVENTS.RefreshMetadataProgress,
payload: resp.body
}); });
}); });
@ -144,6 +155,7 @@ export class MessageHubService {
payload: resp.body payload: resp.body
}); });
if (this.isAdmin) { if (this.isAdmin) {
// TODO: Just show the error, RBS is done in eventhub
this.toastr.error('Library Scan had a critical error. Some series were not saved. Check logs'); this.toastr.error('Library Scan had a critical error. Some series were not saved. Check logs');
} }
}); });
@ -153,7 +165,6 @@ export class MessageHubService {
event: EVENTS.SeriesAdded, event: EVENTS.SeriesAdded,
payload: resp.body payload: resp.body
}); });
this.seriesAdded.emit(resp.body);
}); });
this.hubConnection.on(EVENTS.SeriesRemoved, resp => { this.hubConnection.on(EVENTS.SeriesRemoved, resp => {
@ -163,14 +174,6 @@ export class MessageHubService {
}); });
}); });
// this.hubConnection.on(EVENTS.RefreshMetadata, resp => {
// this.messagesSource.next({
// event: EVENTS.RefreshMetadata,
// payload: resp.body
// });
// this.refreshMetadata.emit(resp.body); // TODO: Remove this
// });
this.hubConnection.on(EVENTS.CoverUpdate, resp => { this.hubConnection.on(EVENTS.CoverUpdate, resp => {
this.messagesSource.next({ this.messagesSource.next({
event: EVENTS.CoverUpdate, event: EVENTS.CoverUpdate,
@ -195,5 +198,5 @@ export class MessageHubService {
sendMessage(methodName: string, body?: any) { sendMessage(methodName: string, body?: any) {
return this.hubConnection.invoke(methodName, body); return this.hubConnection.invoke(methodName, body);
} }
} }

View File

@ -2,12 +2,13 @@ import { Component, OnDestroy, OnInit } from '@angular/core';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap'; import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { ToastrService } from 'ngx-toastr'; import { ToastrService } from 'ngx-toastr';
import { Subject } from 'rxjs'; import { Subject } from 'rxjs';
import { take, takeUntil } from 'rxjs/operators'; import { take, takeUntil, takeWhile } from 'rxjs/operators';
import { ConfirmService } from 'src/app/shared/confirm.service'; import { ConfirmService } from 'src/app/shared/confirm.service';
import { ProgressEvent } from 'src/app/_models/events/scan-library-progress-event'; import { NotificationProgressEvent } from 'src/app/_models/events/notification-progress-event';
import { ProgressEvent } from 'src/app/_models/events/progress-event';
import { Library, LibraryType } from 'src/app/_models/library'; import { Library, LibraryType } from 'src/app/_models/library';
import { LibraryService } from 'src/app/_services/library.service'; import { LibraryService } from 'src/app/_services/library.service';
import { EVENTS, MessageHubService } from 'src/app/_services/message-hub.service'; import { EVENTS, Message, MessageHubService } from 'src/app/_services/message-hub.service';
import { LibraryEditorModalComponent } from '../_modals/library-editor-modal/library-editor-modal.component'; import { LibraryEditorModalComponent } from '../_modals/library-editor-modal/library-editor-modal.component';
@Component({ @Component({
@ -37,18 +38,20 @@ export class ManageLibraryComponent implements OnInit, OnDestroy {
this.getLibraries(); this.getLibraries();
// when a progress event comes in, show it on the UI next to library // when a progress event comes in, show it on the UI next to library
this.hubService.messages$.pipe(takeUntil(this.onDestroy)).subscribe((event) => { this.hubService.messages$.pipe(takeUntil(this.onDestroy), takeWhile(event => event.event === EVENTS.NotificationProgress))
if (event.event !== EVENTS.ScanLibraryProgress) return; .subscribe((event: Message<NotificationProgressEvent>) => {
if (event.event !== EVENTS.NotificationProgress && (event.payload as NotificationProgressEvent).name === EVENTS.ScanSeries) return;
console.log('scan event: ', event.payload); console.log('scan event: ', event.payload);
// TODO: Refactor this to use EventyType on NotificationProgress interface rather than float comparison
const scanEvent = event.payload as ProgressEvent; const scanEvent = event.payload.body as ProgressEvent;
this.scanInProgress[scanEvent.libraryId] = {progress: scanEvent.progress !== 1}; this.scanInProgress[scanEvent.libraryId] = {progress: scanEvent.progress !== 1};
if (scanEvent.progress === 0) { if (scanEvent.progress === 0) {
this.scanInProgress[scanEvent.libraryId].timestamp = scanEvent.eventTime; this.scanInProgress[scanEvent.libraryId].timestamp = scanEvent.eventTime;
} }
if (this.scanInProgress[scanEvent.libraryId].progress === false && scanEvent.progress === 1) { if (this.scanInProgress[scanEvent.libraryId].progress === false && (scanEvent.progress === 1 || event.payload.eventType === 'ended')) {
this.libraryService.getLibraries().pipe(take(1)).subscribe(libraries => { this.libraryService.getLibraries().pipe(take(1)).subscribe(libraries => {
const newLibrary = libraries.find(lib => lib.id === scanEvent.libraryId); const newLibrary = libraries.find(lib => lib.id === scanEvent.libraryId);
const existingLibrary = this.libraries.find(lib => lib.id === scanEvent.libraryId); const existingLibrary = this.libraries.find(lib => lib.id === scanEvent.libraryId);

View File

@ -13,7 +13,7 @@ import { Series } from '../_models/series';
import { FilterEvent, SeriesFilter } from '../_models/series-filter'; import { FilterEvent, SeriesFilter } from '../_models/series-filter';
import { ActionItem, Action } from '../_services/action-factory.service'; import { ActionItem, Action } from '../_services/action-factory.service';
import { ActionService } from '../_services/action.service'; import { ActionService } from '../_services/action.service';
import { MessageHubService } from '../_services/message-hub.service'; import { EVENTS, Message, MessageHubService } from '../_services/message-hub.service';
import { SeriesService } from '../_services/series.service'; import { SeriesService } from '../_services/series.service';
@Component({ @Component({
@ -82,7 +82,8 @@ export class AllSeriesComponent implements OnInit, OnDestroy {
} }
ngOnInit(): void { ngOnInit(): void {
this.hubService.seriesAdded.pipe(debounceTime(6000), takeUntil(this.onDestroy)).subscribe((event: SeriesAddedEvent) => { this.hubService.messages$.pipe(debounceTime(6000), takeUntil(this.onDestroy)).subscribe((event: Message<any>) => {
if (event.event !== EVENTS.SeriesAdded) return;
this.loadPage(); this.loadPage();
}); });
} }

View File

@ -108,7 +108,7 @@ export class CollectionDetailComponent implements OnInit, OnDestroy {
ngOnInit(): void { ngOnInit(): void {
this.collectionTagActions = this.actionFactoryService.getCollectionTagActions(this.handleCollectionActionCallback.bind(this)); this.collectionTagActions = this.actionFactoryService.getCollectionTagActions(this.handleCollectionActionCallback.bind(this));
this.messageHub.messages$.pipe(takeWhile(event => event.event === EVENTS.SeriesAddedToCollection), takeUntil(this.onDestory), debounceTime(2000)).subscribe(event => { this.messageHub.messages$.pipe(takeUntil(this.onDestory), debounceTime(2000)).subscribe(event => {
if (event.event == EVENTS.SeriesAddedToCollection) { if (event.event == EVENTS.SeriesAddedToCollection) {
const collectionEvent = event.payload as SeriesAddedToCollectionEvent; const collectionEvent = event.payload as SeriesAddedToCollectionEvent;
if (collectionEvent.tagId === this.collectionTag.id) { if (collectionEvent.tagId === this.collectionTag.id) {

View File

@ -14,7 +14,7 @@ import { FilterEvent, SeriesFilter } from '../_models/series-filter';
import { Action, ActionFactoryService, ActionItem } from '../_services/action-factory.service'; import { Action, ActionFactoryService, ActionItem } from '../_services/action-factory.service';
import { ActionService } from '../_services/action.service'; import { ActionService } from '../_services/action.service';
import { LibraryService } from '../_services/library.service'; import { LibraryService } from '../_services/library.service';
import { MessageHubService } from '../_services/message-hub.service'; import { EVENTS, MessageHubService } from '../_services/message-hub.service';
import { SeriesService } from '../_services/series.service'; import { SeriesService } from '../_services/series.service';
@Component({ @Component({
@ -92,12 +92,13 @@ export class LibraryDetailComponent implements OnInit, OnDestroy {
[this.filterSettings.presets, this.filterSettings.openByDefault] = this.utilityService.filterPresetsFromUrl(this.route.snapshot, this.seriesService.createSeriesFilter()); [this.filterSettings.presets, this.filterSettings.openByDefault] = this.utilityService.filterPresetsFromUrl(this.route.snapshot, this.seriesService.createSeriesFilter());
this.filterSettings.presets.libraries = [this.libraryId]; this.filterSettings.presets.libraries = [this.libraryId];
//this.loadPage();
} }
ngOnInit(): void { ngOnInit(): void {
this.hubService.seriesAdded.pipe(takeWhile(event => event.libraryId === this.libraryId), debounceTime(6000), takeUntil(this.onDestroy)).subscribe((event: SeriesAddedEvent) => { this.hubService.messages$.pipe(debounceTime(6000), takeUntil(this.onDestroy)).subscribe((event) => {
if (event.event !== EVENTS.SeriesAdded) return;
const seriesAdded = event.payload as SeriesAddedEvent;
if (seriesAdded.libraryId !== this.libraryId) return;
this.loadPage(); this.loadPage();
}); });
} }

View File

@ -1,25 +1,90 @@
<ng-container *ngIf="isAdmin"> <ng-container *ngIf="isAdmin">
<button type="button" class="btn btn-icon {{(progressEventsSource.getValue().length > 0 || updateAvailable) ? 'colored' : ''}}" <button type="button" class="btn btn-icon {{activeEvents > 0 ? 'colored' : ''}}"
[ngbPopover]="popContent" title="Activity" placement="bottom" [popoverClass]="'nav-events'"> [ngbPopover]="popContent" title="Activity" placement="bottom" [popoverClass]="'nav-events'">
<i aria-hidden="true" class="fa fa-wave-square nav"></i> <i aria-hidden="true" class="fa fa-wave-square nav"></i>
</button> </button>
<ng-template #popContent> <ng-template #popContent>
<ul class="list-group list-group-flush dark-menu"> <ul class="list-group list-group-flush dark-menu">
<li class="list-group-item dark-menu-item" *ngFor="let event of progressEvents$ | async">
<div class="spinner-border text-primary small-spinner" <ng-container *ngIf="debugMode">
role="status" title="Started at {{event.timestamp | date: 'short'}}" <li class="list-group-item dark-menu-item">
attr.aria-valuetext="{{prettyPrintProgress(event.progress)}}%" [attr.aria-valuenow]="prettyPrintProgress(event.progress)"> <!-- <div class="spinner-grow text-primary small-spinner" role="status"></div> -->
<span class="visually-hidden">Scan for {{event.libraryName}} in progress</span> <div class="h6 mb-1">Title goes here</div>
</div> <div class="accent-text mb-1">Subtitle goes here</div>
{{prettyPrintProgress(event.progress)}}% <div class="progress-container row g-0 align-items-center">
{{prettyPrintEvent(event.eventType, event)}} {{event.libraryName}} <div class="progress" style="height: 5px;">
</li> <div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" style="width: 100%" [attr.aria-valuenow]="100" aria-valuemin="0" aria-valuemax="100"></div>
<li class="list-group-item dark-menu-item" *ngIf="progressEventsSource.getValue().length === 0 && !updateAvailable">Not much going on here</li> </div>
<li class="list-group-item dark-menu-item update-available" *ngIf="updateAvailable" (click)="handleUpdateAvailableClick()"> </div>
<i class="fa fa-chevron-circle-up" aria-hidden="true"></i>&nbsp;Update available </li>
</li> <li class="list-group-item dark-menu-item">
<div class="h6 mb-1">Title goes here</div>
<div class="accent-text mb-1">Subtitle goes here</div>
</li>
<li class="list-group-item dark-menu-item">
<div>
<div class="h6 mb-1">Scanning Books</div>
<div class="accent-text mb-1">E:\\Books\\Demon King Daimaou\\Demon King Daimaou - Volume 11.epub</div>
<div class="progress-container row g-0 align-items-center">
<div class="col-2">{{prettyPrintProgress(0.1)}}%</div>
<div class="col-10 progress" style="height: 5px;">
<div class="progress-bar" role="progressbar" [ngStyle]="{'width': 0.1 * 100 + '%'}" [attr.aria-valuenow]="0.1 * 100" aria-valuemin="0" aria-valuemax="100"></div>
</div>
</div>
</div>
</li>
</ng-container>
<!-- Progress Events-->
<ng-container *ngIf="progressEvents$ | async as progressUpdates">
<ng-container *ngFor="let message of progressUpdates">
<li class="list-group-item dark-menu-item" *ngIf="message.progress === 'indeterminate' || message.progress === 'none'; else progressEvent">
<div class="h6 mb-1">{{message.title}}</div>
<div class="accent-text mb-1" *ngIf="message.subTitle !== ''">{{message.subTitle}}</div>
<div class="progress-container row g-0 align-items-center">
<div class="progress" style="height: 5px;" *ngIf="message.progress === 'indeterminate'">
<div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" style="width: 100%" [attr.aria-valuenow]="100" aria-valuemin="0" aria-valuemax="100"></div>
</div>
</div>
</li>
<ng-template #progressEvent>
<li class="list-group-item dark-menu-item">
<div class="h6 mb-1">{{message.title}}</div>
<div class="accent-text mb-1" *ngIf="message.subTitle !== ''">{{message.subTitle}}</div>
<div class="progress-container row g-0 align-items-center">
<div class="col-2">{{prettyPrintProgress(message.body.progress) + '%'}}</div>
<div class="col-10 progress" style="height: 5px;">
<div class="progress-bar" role="progressbar" [ngStyle]="{'width': message.body.progress * 100 + '%'}" [attr.aria-valuenow]="message.body.progress * 100" aria-valuemin="0" aria-valuemax="100"></div>
</div>
</div>
</li>
</ng-template>
</ng-container>
</ng-container>
<!-- Single updates (Informational/Update available)-->
<ng-container *ngIf="singleUpdates$ | async as singleUpdates">
<ng-container *ngFor="let singleUpdate of singleUpdates">
<li class="list-group-item dark-menu-item update-available" *ngIf="singleUpdate.name === EVENTS.UpdateAvailable" (click)="handleUpdateAvailableClick(singleUpdate)">
<i class="fa fa-chevron-circle-up" aria-hidden="true"></i>&nbsp;Update available
</li>
<li class="list-group-item dark-menu-item update-available" *ngIf="singleUpdate.name !== EVENTS.UpdateAvailable">
<div>{{singleUpdate.title}}</div>
<div class="accent-text" *ngIf="singleUpdate.subTitle !== ''">{{singleUpdate.subTitle}}</div>
</li>
</ng-container>
</ng-container>
<!-- Online Users -->
<ng-container *ngIf="messageHub.onlineUsers$ | async as onlineUsers">
<li class="list-group-item dark-menu-item" *ngIf="onlineUsers.length > 1">
<div>{{onlineUsers.length}} Users online</div>
</li>
<li class="list-group-item dark-menu-item" *ngIf="activeEvents < 1 && onlineUsers.length <= 1">Not much going on here</li>
</ng-container>
</ul> </ul>
</ng-template> </ng-template>
</ng-container> </ng-container>

View File

@ -8,7 +8,7 @@
.dark-menu-item { .dark-menu-item {
color: var(--body-text-color); color: var(--body-text-color);
background-color: rgb(1, 4, 9); background-color: rgb(1, 4, 9);
border-color: rgba(1, 4, 9, 0.5); border-color: rgba(53, 53, 53, 0.5);
} }
// Popovers need to be their own component // Popovers need to be their own component
@ -16,17 +16,37 @@
border-bottom-color: transparent; border-bottom-color: transparent;
} }
.nav-events { ::ng-deep .nav-events {
background-color: var(--navbar-bg-color);
.popover-body {
min-width: 250px;
max-width: 250px;
padding: 0px;
box-shadow: 0px 0px 12px rgb(0 0 0 / 75%);
}
.popover {
min-width: 300px;
}
} }
// .nav-events { .progress-container {
// background-color: white; width: 100%;
// } }
.progress {
padding: 0;
}
.accent-text {
width: 100%;
text-overflow: ellipsis;
overflow:hidden;
white-space:nowrap;
}
.btn:focus, .btn:hover { .btn:focus, .btn:hover {
box-shadow: 0 0 0 0.1rem rgba(255, 255, 255, 1); // TODO: Used in nav as well, move to dark for btn-icon focus box-shadow: 0 0 0 0.1rem var(--navbar-btn-hover-outline-color);
} }
.small-spinner { .small-spinner {
@ -36,9 +56,6 @@
.nav-events .popover-body {
padding: 0px;
}
.btn-icon { .btn-icon {
color: white; color: white;

View File

@ -1,25 +1,16 @@
import { Component, Input, OnDestroy, OnInit } from '@angular/core'; import { Component, Input, OnDestroy, OnInit, ViewChild } from '@angular/core';
import { NgbModal, NgbModalRef } from '@ng-bootstrap/ng-bootstrap'; import { NgbModal, NgbModalRef } from '@ng-bootstrap/ng-bootstrap';
import { BehaviorSubject, Subject } from 'rxjs'; import { BehaviorSubject, Subject } from 'rxjs';
import { takeUntil } from 'rxjs/operators'; import { takeUntil } from 'rxjs/operators';
import { UpdateNotificationModalComponent } from '../shared/update-notification/update-notification-modal.component'; import { UpdateNotificationModalComponent } from '../shared/update-notification/update-notification-modal.component';
import { ProgressEvent } from '../_models/events/scan-library-progress-event'; import { NotificationProgressEvent } from '../_models/events/notification-progress-event';
import { UpdateVersionEvent } from '../_models/events/update-version-event';
import { User } from '../_models/user'; import { User } from '../_models/user';
import { AccountService } from '../_services/account.service'; import { AccountService } from '../_services/account.service';
import { LibraryService } from '../_services/library.service';
import { EVENTS, Message, MessageHubService } from '../_services/message-hub.service'; import { EVENTS, Message, MessageHubService } from '../_services/message-hub.service';
interface ProcessedEvent {
eventType: string;
timestamp?: string;
progress: number;
libraryId: number;
libraryName: string;
}
type ProgressType = EVENTS.ScanLibraryProgress | EVENTS.RefreshMetadataProgress | EVENTS.BackupDatabaseProgress | EVENTS.CleanupProgress;
const acceptedEvents = [EVENTS.ScanLibraryProgress, EVENTS.RefreshMetadataProgress, EVENTS.BackupDatabaseProgress, EVENTS.CleanupProgress, EVENTS.DownloadProgress, EVENTS.SiteThemeProgress];
// TODO: Rename this to events widget // TODO: Rename this to events widget
@Component({ @Component({
@ -28,37 +19,48 @@ const acceptedEvents = [EVENTS.ScanLibraryProgress, EVENTS.RefreshMetadataProgre
styleUrls: ['./nav-events-toggle.component.scss'] styleUrls: ['./nav-events-toggle.component.scss']
}) })
export class NavEventsToggleComponent implements OnInit, OnDestroy { export class NavEventsToggleComponent implements OnInit, OnDestroy {
@Input() user!: User; @Input() user!: User;
isAdmin: boolean = false; isAdmin: boolean = false;
private readonly onDestroy = new Subject<void>(); private readonly onDestroy = new Subject<void>();
/** /**
* Events that come through and are merged (ie progress event gets merged into a progress event) * Progress events (Event Type: 'started', 'ended', 'updated' that have progress property)
*/ */
progressEventsSource = new BehaviorSubject<ProcessedEvent[]>([]); progressEventsSource = new BehaviorSubject<NotificationProgressEvent[]>([]);
progressEvents$ = this.progressEventsSource.asObservable(); progressEvents$ = this.progressEventsSource.asObservable();
updateAvailable: boolean = false; singleUpdateSource = new BehaviorSubject<NotificationProgressEvent[]>([]);
updateBody: any; singleUpdates$ = this.singleUpdateSource.asObservable();
private updateNotificationModalRef: NgbModalRef | null = null; private updateNotificationModalRef: NgbModalRef | null = null;
constructor(private messageHub: MessageHubService, private libraryService: LibraryService, private modalService: NgbModal, private accountService: AccountService) { } activeEvents: number = 0;
debugMode: boolean = false;
get EVENTS() {
return EVENTS;
}
constructor(public messageHub: MessageHubService, private modalService: NgbModal, private accountService: AccountService) { }
ngOnDestroy(): void { ngOnDestroy(): void {
this.onDestroy.next(); this.onDestroy.next();
this.onDestroy.complete(); this.onDestroy.complete();
this.progressEventsSource.complete(); this.progressEventsSource.complete();
this.singleUpdateSource.complete();
} }
ngOnInit(): void { ngOnInit(): void {
// Debounce for testing. Kavita's too fast
this.messageHub.messages$.pipe(takeUntil(this.onDestroy)).subscribe(event => { this.messageHub.messages$.pipe(takeUntil(this.onDestroy)).subscribe(event => {
if (acceptedEvents.includes(event.event)) { if (event.event.endsWith('error')) {
this.processProgressEvent(event, event.event); // TODO: Show an error handle
} else if (event.event === EVENTS.UpdateAvailable) { } else if (event.event === EVENTS.NotificationProgress) {
this.updateAvailable = true; this.processNotificationProgressEvent(event);
this.updateBody = event.payload;
} }
}); });
this.accountService.currentUser$.pipe(takeUntil(this.onDestroy)).subscribe(user => { this.accountService.currentUser$.pipe(takeUntil(this.onDestroy)).subscribe(user => {
@ -70,32 +72,49 @@ export class NavEventsToggleComponent implements OnInit, OnDestroy {
}); });
} }
processNotificationProgressEvent(event: Message<NotificationProgressEvent>) {
const message = event.payload as NotificationProgressEvent;
let data;
processProgressEvent(event: Message<ProgressEvent>, eventType: string) { switch (event.payload.eventType) {
const scanEvent = event.payload as ProgressEvent; case 'single':
const values = this.singleUpdateSource.getValue();
this.libraryService.getLibraryNames().subscribe(names => { values.push(message);
const data = this.progressEventsSource.getValue(); this.singleUpdateSource.next(values);
const index = data.findIndex(item => item.eventType === eventType && item.libraryId === event.payload.libraryId); this.activeEvents += 1;
if (index >= 0) { break;
data.splice(index, 1); case 'started':
} data = this.progressEventsSource.getValue();
data.push(message);
if (scanEvent.progress !== 1) { this.progressEventsSource.next(data);
const libraryName = names[scanEvent.libraryId] || ''; this.activeEvents += 1;
const newEvent = {eventType: eventType, timestamp: scanEvent.eventTime, progress: scanEvent.progress, libraryId: scanEvent.libraryId, libraryName, rawBody: event.payload}; break;
data.push(newEvent); case 'updated':
} data = this.progressEventsSource.getValue();
const index = data.findIndex(m => m.name === message.name);
if (index < 0) {
this.progressEventsSource.next(data); data.push(message);
}); } else {
data[index] = message;
}
this.progressEventsSource.next(data);
break;
case 'ended':
data = this.progressEventsSource.getValue();
data = data.filter(m => m.name !== message.name); // This does not work // && m.title !== message.title
this.progressEventsSource.next(data);
this.activeEvents = Math.max(this.activeEvents - 1, 0);
break;
default:
break;
}
} }
handleUpdateAvailableClick() {
handleUpdateAvailableClick(message: NotificationProgressEvent) {
if (this.updateNotificationModalRef != null) { return; } if (this.updateNotificationModalRef != null) { return; }
this.updateNotificationModalRef = this.modalService.open(UpdateNotificationModalComponent, { scrollable: true, size: 'lg' }); this.updateNotificationModalRef = this.modalService.open(UpdateNotificationModalComponent, { scrollable: true, size: 'lg' });
this.updateNotificationModalRef.componentInstance.updateData = this.updateBody; this.updateNotificationModalRef.componentInstance.updateData = message.body as UpdateVersionEvent;
this.updateNotificationModalRef.closed.subscribe(() => { this.updateNotificationModalRef.closed.subscribe(() => {
this.updateNotificationModalRef = null; this.updateNotificationModalRef = null;
}); });
@ -107,16 +126,4 @@ export class NavEventsToggleComponent implements OnInit, OnDestroy {
prettyPrintProgress(progress: number) { prettyPrintProgress(progress: number) {
return Math.trunc(progress * 100); return Math.trunc(progress * 100);
} }
prettyPrintEvent(eventType: string, event: any) {
switch(eventType) {
case (EVENTS.ScanLibraryProgress): return 'Scanning ';
case (EVENTS.RefreshMetadataProgress): return 'Refreshing Covers for ';
case (EVENTS.CleanupProgress): return 'Clearing Cache';
case (EVENTS.BackupDatabaseProgress): return 'Backing up Database';
case (EVENTS.DownloadProgress): return event.rawBody.userName.charAt(0).toUpperCase() + event.rawBody.userName.substr(1) + ' is downloading ' + event.rawBody.downloadName;
default: return eventType;
}
}
} }

View File

@ -12,7 +12,7 @@ import { Series } from '../_models/series';
import { FilterEvent, SeriesFilter } from '../_models/series-filter'; import { FilterEvent, SeriesFilter } from '../_models/series-filter';
import { Action } from '../_services/action-factory.service'; import { Action } from '../_services/action-factory.service';
import { ActionService } from '../_services/action.service'; import { ActionService } from '../_services/action.service';
import { MessageHubService } from '../_services/message-hub.service'; import { EVENTS, Message, MessageHubService } from '../_services/message-hub.service';
import { SeriesService } from '../_services/series.service'; import { SeriesService } from '../_services/series.service';
/** /**
@ -63,7 +63,10 @@ export class RecentlyAddedComponent implements OnInit, OnDestroy {
} }
ngOnInit() { ngOnInit() {
this.hubService.seriesAdded.pipe(takeWhile(event => event.libraryId === this.libraryId), debounceTime(6000), takeUntil(this.onDestroy)).subscribe((event: SeriesAddedEvent) => { this.hubService.messages$.pipe(debounceTime(6000), takeUntil(this.onDestroy)).subscribe((event) => {
if (event.event !== EVENTS.SeriesAdded) return;
const seriesAdded = event.payload as SeriesAddedEvent;
if (seriesAdded.libraryId !== this.libraryId) return;
this.loadPage(); this.loadPage();
}); });
} }

View File

@ -4,7 +4,7 @@ import { ActivatedRoute, Router } from '@angular/router';
import { NgbModal, NgbNavChangeEvent, NgbRatingConfig } from '@ng-bootstrap/ng-bootstrap'; import { NgbModal, NgbNavChangeEvent, NgbRatingConfig } from '@ng-bootstrap/ng-bootstrap';
import { ToastrService } from 'ngx-toastr'; import { ToastrService } from 'ngx-toastr';
import { forkJoin, Subject } from 'rxjs'; import { forkJoin, Subject } from 'rxjs';
import { finalize, take, takeUntil, takeWhile } from 'rxjs/operators'; import { finalize, map, take, takeUntil, takeWhile } from 'rxjs/operators';
import { BulkSelectionService } from '../cards/bulk-selection.service'; import { BulkSelectionService } from '../cards/bulk-selection.service';
import { CardDetailsModalComponent } from '../cards/_modals/card-details-modal/card-details-modal.component'; import { CardDetailsModalComponent } from '../cards/_modals/card-details-modal/card-details-modal.component';
import { EditSeriesModalComponent } from '../cards/_modals/edit-series-modal/edit-series-modal.component'; import { EditSeriesModalComponent } from '../cards/_modals/edit-series-modal/edit-series-modal.component';
@ -185,12 +185,13 @@ export class SeriesDetailComponent implements OnInit, OnDestroy {
return; return;
} }
this.messageHub.scanSeries.pipe(takeUntil(this.onDestroy)).subscribe((event: ScanSeriesEvent) => { // this.messageHub.messages$.pipe(takeUntil(this.onDestroy), takeWhile(e => this.messageHub.isEventType(e, EVENTS.ScanSeries))).subscribe((e) => {
if (event.seriesId == this.series.id) // const event = e.payload as ScanSeriesEvent;
this.loadSeries(seriesId); // if (event.seriesId == this.series.id)
this.seriesImage = this.imageService.randomize(this.imageService.getSeriesCoverImage(this.series.id)); // this.loadSeries(seriesId);
this.toastr.success('Scan series completed'); // this.seriesImage = this.imageService.randomize(this.imageService.getSeriesCoverImage(this.series.id));
}); // this.toastr.success('Scan series completed');
// });
this.messageHub.messages$.pipe(takeUntil(this.onDestroy)).subscribe(event => { this.messageHub.messages$.pipe(takeUntil(this.onDestroy)).subscribe(event => {
if (event.event === EVENTS.SeriesRemoved) { if (event.event === EVENTS.SeriesRemoved) {
@ -203,6 +204,7 @@ export class SeriesDetailComponent implements OnInit, OnDestroy {
const seriesCoverUpdatedEvent = event.payload as ScanSeriesEvent; const seriesCoverUpdatedEvent = event.payload as ScanSeriesEvent;
if (seriesCoverUpdatedEvent.seriesId === this.series.id) { if (seriesCoverUpdatedEvent.seriesId === this.series.id) {
this.loadSeries(seriesId); this.loadSeries(seriesId);
this.seriesImage = this.imageService.randomize(this.imageService.getSeriesCoverImage(this.series.id)); // NOTE: Is this needed as cover update will update the image for us
} }
} }
}); });

View File

@ -56,7 +56,6 @@ export class ImageComponent implements OnChanges, OnDestroy {
//...seriesId=123&random= //...seriesId=123&random=
const id = tokens[0].replace(enityType + 'Id=', ''); const id = tokens[0].replace(enityType + 'Id=', '');
if (id === (updateEvent.id + '')) { if (id === (updateEvent.id + '')) {
console.log('Image url: ', this.imageUrl, ' matches update event: ', updateEvent);
this.imageUrl = this.imageService.randomize(this.imageUrl); this.imageUrl = this.imageService.randomize(this.imageUrl);
} }
} }

View File

@ -5,6 +5,7 @@ import { DomSanitizer } from '@angular/platform-browser';
import { map, ReplaySubject, Subject, takeUntil } from 'rxjs'; import { map, ReplaySubject, Subject, takeUntil } from 'rxjs';
import { environment } from 'src/environments/environment'; import { environment } from 'src/environments/environment';
import { ConfirmService } from './shared/confirm.service'; import { ConfirmService } from './shared/confirm.service';
import { NotificationProgressEvent } from './_models/events/notification-progress-event';
import { SiteThemeProgressEvent } from './_models/events/site-theme-progress-event'; import { SiteThemeProgressEvent } from './_models/events/site-theme-progress-event';
import { SiteTheme, ThemeProvider } from './_models/preferences/site-theme'; import { SiteTheme, ThemeProvider } from './_models/preferences/site-theme';
import { EVENTS, MessageHubService } from './_services/message-hub.service'; import { EVENTS, MessageHubService } from './_services/message-hub.service';
@ -41,10 +42,13 @@ export class ThemeService implements OnDestroy {
this.getThemes(); this.getThemes();
messageHub.messages$.pipe(takeUntil(this.onDestroy)).subscribe(message => { messageHub.messages$.pipe(takeUntil(this.onDestroy)).subscribe(message => {
if (message.event === EVENTS.SiteThemeProgress) {
if ((message.payload as SiteThemeProgressEvent).progress === 1) { if (message.event !== EVENTS.NotificationProgress) return;
this.getThemes().subscribe(() => {}); const notificationEvent = (message.payload as NotificationProgressEvent);
} if (notificationEvent.name !== EVENTS.SiteThemeProgress) return;
if (notificationEvent.eventType === 'ended') {
this.getThemes().subscribe(() => {});
} }
}); });
} }
@ -59,7 +63,6 @@ export class ThemeService implements OnDestroy {
} }
isDarkTheme() { isDarkTheme() {
console.log('color scheme: ', getComputedStyle(this.document.body).getPropertyValue('--color-scheme').trim().toLowerCase());
return this.getColorScheme().toLowerCase() === 'dark'; return this.getColorScheme().toLowerCase() === 'dark';
} }

View File

@ -34,6 +34,7 @@
@import './theme/components/slider'; @import './theme/components/slider';
@import './theme/components/radios'; @import './theme/components/radios';
@import './theme/components/selects'; @import './theme/components/selects';
@import './theme/components/progress';
@import './theme/utilities/utilities'; @import './theme/utilities/utilities';

View File

@ -1,5 +1,22 @@
.popover {
background-color: var(--popover-bg-color);
border-color: var(--popover-border-color);
}
.bs-popover-bottom {
> .popover-arrow {
&::before {
border-bottom-color: var(--popover-outerarrow-color);
}
&::after {
border-bottom-color: var(--popover-arrow-color);
}
}
}
.popover-body { .popover-body {
background-color: var(--popover-body-bg-color); background-color: var(--popover-body-bg-color);
color: var(--popover-body-text-color) color: var(--popover-body-text-color)
} }

View File

@ -0,0 +1,13 @@
.progress {
background-color: var(--progress-bg-color);
}
.progress-bar {
background-color: var(--progress-bar-color);
}
.progress-bar-striped {
background-image: var(--progress-striped-animated-color);
background-color: unset;
}

View File

@ -96,6 +96,10 @@
/* Popover */ /* Popover */
--popover-body-bg-color: var(--navbar-bg-color); --popover-body-bg-color: var(--navbar-bg-color);
--popover-body-text-color: var(--navbar-text-color); --popover-body-text-color: var(--navbar-text-color);
--popover-outerarrow-color: transparent;
--popover-arrow-color: transparent;
--popover-bg-color: black;
--popover-border-color: black;
/* Pagination */ /* Pagination */
--pagination-active-link-border-color: var(--primary-color); --pagination-active-link-border-color: var(--primary-color);
@ -106,9 +110,14 @@
--pagination-link-bg-color: rgba(1, 4, 9, 0.5); --pagination-link-bg-color: rgba(1, 4, 9, 0.5);
--pagination-focus-border-color: var(--primary-color); --pagination-focus-border-color: var(--primary-color);
/* Progress Bar */
--progress-striped-animated-color: linear-gradient(45deg, rgba(74,198,148, 0.75) 25%, rgba(51, 138, 103, 0.75) 25%, rgba(51, 138, 103, 0.75) 50%, rgba(74,198,148, 0.75) 50%, rgba(74,198,148, 0.75) 75%, rgba(51, 138, 103, 0.75) 75%, rgba(51, 138, 103, 0.75));
--progress-bg-color: var(--nav-header-bg-color);
--progress-bar-color: var(--primary-color-dark-shade);
/* Dropdown */ /* Dropdown */
--dropdown-item-hover-text-color: white; --dropdown-item-hover-text-color: white;
--dropdown-item-hover-bg-color: var(--primary-color); --dropdown-item-hover-bg-color: var(--primary-color-dark-shade);
--dropdown-item-text-color: var(--navbar-text-color); --dropdown-item-text-color: var(--navbar-text-color);
--dropdown-item-bg-color: var(--navbar-bg-color); --dropdown-item-bg-color: var(--navbar-bg-color);
--dropdown-overlay-color: rgba(0,0,0,0.5); --dropdown-overlay-color: rgba(0,0,0,0.5);