diff --git a/API.Tests/Parsers/DefaultParserTests.cs b/API.Tests/Parsers/DefaultParserTests.cs index fcedc779e..9dc926ef5 100644 --- a/API.Tests/Parsers/DefaultParserTests.cs +++ b/API.Tests/Parsers/DefaultParserTests.cs @@ -123,7 +123,7 @@ public class DefaultParserTests FullFilePath = filepath }); - filepath = @"E:\Manga\Beelzebub\Beelzebub_01_[Noodles].zip"; + filepath = @"E:/Manga/Beelzebub/Beelzebub_01_[Noodles].zip"; expected.Add(filepath, new ParserInfo { Series = "Beelzebub", Volumes = Parser.LooseLeafVolume, @@ -132,7 +132,7 @@ public class DefaultParserTests }); // Note: Lots of duplicates here. I think I can move them to the ParserTests itself - filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip"; + filepath = @"E:/Manga/Ichinensei ni Nacchattara/Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip"; expected.Add(filepath, new ParserInfo { Series = "Ichinensei ni Nacchattara", Volumes = "1", @@ -140,7 +140,7 @@ public class DefaultParserTests FullFilePath = filepath }); - filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz"; + filepath = @"E:/Manga/Tenjo Tenge (Color)/Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz"; expected.Add(filepath, new ParserInfo { Series = "Tenjo Tenge {Full Contact Edition}", Volumes = "1", Edition = "", @@ -148,7 +148,7 @@ public class DefaultParserTests FullFilePath = filepath }); - filepath = @"E:\Manga\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz"; + filepath = @"E:/Manga/Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)/Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz"; expected.Add(filepath, new ParserInfo { Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "", @@ -156,7 +156,7 @@ public class DefaultParserTests FullFilePath = filepath }); - filepath = @"E:\Manga\Dorohedoro\Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz"; + filepath = @"E:/Manga/Dorohedoro/Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz"; expected.Add(filepath, new ParserInfo { Series = "Dorohedoro", Volumes = "1", Edition = "", @@ -164,7 +164,7 @@ public class DefaultParserTests FullFilePath = filepath }); - filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz"; + filepath = @"E:/Manga/APOSIMZ/APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz"; expected.Add(filepath, new ParserInfo { Series = "APOSIMZ", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "", @@ -172,7 +172,7 @@ public class DefaultParserTests FullFilePath = filepath }); - filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz"; + filepath = @"E:/Manga/Corpse Party Musume/Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz"; expected.Add(filepath, new ParserInfo { Series = "Kedouin Makoto - Corpse Party Musume", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "", @@ -180,7 +180,7 @@ public class DefaultParserTests FullFilePath = filepath }); - filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz"; + filepath = @"E:/Manga/Goblin Slayer/Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz"; expected.Add(filepath, new ParserInfo { Series = "Goblin Slayer - Brand New Day", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "", @@ -188,7 +188,7 @@ public class DefaultParserTests FullFilePath = filepath }); - filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr"; + filepath = @"E:/Manga/Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr"; expected.Add(filepath, new ParserInfo { Series = "Summer Time Rendering", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, Edition = "", @@ -196,7 +196,7 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = true }); - filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz"; + filepath = @"E:/Manga/Seraph of the End/Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz"; expected.Add(filepath, new ParserInfo { Series = "Seraph of the End - Vampire Reign", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "", @@ -204,7 +204,7 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }); - filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz"; + filepath = @"E:/Manga/Kono Subarashii Sekai ni Bakuen wo!/Vol. 00 Ch. 000.cbz"; expected.Add(filepath, new ParserInfo { Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "", @@ -212,7 +212,7 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }); - filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz"; + filepath = @"E:/Manga/Toukyou Akazukin/Vol. 01 Ch. 001.cbz"; expected.Add(filepath, new ParserInfo { Series = "Toukyou Akazukin", Volumes = "1", Edition = "", @@ -221,10 +221,10 @@ public class DefaultParserTests }); // If an image is cover exclusively, ignore it - filepath = @"E:\Manga\Seraph of the End\cover.png"; + filepath = @"E:/Manga/Seraph of the End/cover.png"; expected.Add(filepath, null); - filepath = @"E:\Manga\The Beginning After the End\Chapter 001.cbz"; + filepath = @"E:/Manga/The Beginning After the End/Chapter 001.cbz"; expected.Add(filepath, new ParserInfo { Series = "The Beginning After the End", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "", @@ -232,7 +232,7 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }); - filepath = @"E:\Manga\Air Gear\Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz"; + filepath = @"E:/Manga/Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz"; expected.Add(filepath, new ParserInfo { Series = "Air Gear", Volumes = "1", Edition = "Omnibus", @@ -240,7 +240,7 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }); - filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub"; + filepath = @"E:/Manga/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub"; expected.Add(filepath, new ParserInfo { Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "", @@ -279,17 +279,17 @@ public class DefaultParserTests //[Fact] public void Parse_ParseInfo_Manga_ImageOnly() { - // Images don't have root path as E:\Manga, but rather as the path of the folder + // Images don't have root path as E:/Manga, but rather as the path of the folder // Note: Fallback to folder will parse Monster #8 and get Monster - var filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg"; + var filepath = @"E:/Manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg"; var expectedInfo2 = new ParserInfo { Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "", Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image, FullFilePath = filepath, IsSpecial = false }; - var actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Monster #8", "E:/Manga", LibraryType.Manga, null); + var actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Monster #8", "E:/Manga", LibraryType.Manga, null); Assert.NotNull(actual2); _testOutputHelper.WriteLine($"Validating {filepath}"); Assert.Equal(expectedInfo2.Format, actual2.Format); @@ -307,7 +307,7 @@ public class DefaultParserTests Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath); _testOutputHelper.WriteLine("FullFilePath ✓"); - filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch. 186\Vol. 19 p106.gif"; + filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Vol19/ch. 186/Vol. 19 p106.gif"; expectedInfo2 = new ParserInfo { Series = "Just Images the second", Volumes = "19", Edition = "", @@ -315,7 +315,7 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }; - actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga",LibraryType.Manga, null); + actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga",LibraryType.Manga, null); Assert.NotNull(actual2); _testOutputHelper.WriteLine($"Validating {filepath}"); Assert.Equal(expectedInfo2.Format, actual2.Format); @@ -333,7 +333,7 @@ public class DefaultParserTests Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath); _testOutputHelper.WriteLine("FullFilePath ✓"); - filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch. 186\Vol. 19 p106.gif"; + filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Blank Folder/Vol19/ch. 186/Vol. 19 p106.gif"; expectedInfo2 = new ParserInfo { Series = "Just Images the second", Volumes = "19", Edition = "", @@ -341,7 +341,7 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }; - actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Manga, null); + actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga", LibraryType.Manga, null); Assert.NotNull(actual2); _testOutputHelper.WriteLine($"Validating {filepath}"); Assert.Equal(expectedInfo2.Format, actual2.Format); @@ -448,7 +448,7 @@ public class DefaultParserTests }); // Fallback test with bad naming - filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr"; + filepath = @"E:/Comics/Comics/Babe/Babe Vol.1 #1-4/Babe 01.cbr"; expected.Add(filepath, new ParserInfo { Series = "Babe", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "", @@ -456,7 +456,7 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }); - filepath = @"E:\Comics\Comics\Publisher\Batman the Detective (2021)\Batman the Detective - v6 - 11 - (2021).cbr"; + filepath = @"E:/Comics/Comics/Publisher/Batman the Detective (2021)/Batman the Detective - v6 - 11 - (2021).cbr"; expected.Add(filepath, new ParserInfo { Series = "Batman the Detective", Volumes = "6", Edition = "", @@ -464,7 +464,7 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }); - filepath = @"E:\Comics\Comics\Batman - The Man Who Laughs #1 (2005)\Batman - The Man Who Laughs #1 (2005).cbr"; + filepath = @"E:/Comics/Comics/Batman - The Man Who Laughs #1 (2005)/Batman - The Man Who Laughs #1 (2005).cbr"; expected.Add(filepath, new ParserInfo { Series = "Batman - The Man Who Laughs", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "", diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index b4b86dccf..eb467ab9f 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -166,11 +166,36 @@ public class LibraryController : BaseApiController return Ok(_directoryService.ListDirectory(path)); } + /// + /// Return a specific library + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpGet] + public async Task> GetLibrary(int libraryId) + { + var username = User.GetUsername(); + if (string.IsNullOrEmpty(username)) return Unauthorized(); + + var cacheKey = CacheKey + username; + var result = await _libraryCacheProvider.GetAsync>(cacheKey); + if (result.HasValue) + { + return Ok(result.Value.FirstOrDefault(l => l.Id == libraryId)); + } + + var ret = _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(username).ToList(); + await _libraryCacheProvider.SetAsync(CacheKey, ret, TimeSpan.FromHours(24)); + _logger.LogDebug("Caching libraries for {Key}", cacheKey); + + return Ok(ret.Find(l => l.Id == libraryId)); + } + /// /// Return all libraries in the Server /// /// - [HttpGet] + [HttpGet("libraries")] public async Task>> GetLibraries() { var username = User.GetUsername(); diff --git a/API/Controllers/ServerController.cs b/API/Controllers/ServerController.cs index d4e1ed59b..802edebf2 100644 --- a/API/Controllers/ServerController.cs +++ b/API/Controllers/ServerController.cs @@ -221,18 +221,18 @@ public class ServerController : BaseApiController /// /// [HttpGet("jobs")] - public ActionResult> GetJobs() + public async Task>> GetJobs() { - var recurringJobs = JobStorage.Current.GetConnection().GetRecurringJobs().Select( - dto => - new JobDto() { - Id = dto.Id, - Title = dto.Id.Replace('-', ' '), - Cron = dto.Cron, - LastExecutionUtc = dto.LastExecution.HasValue ? new DateTime(dto.LastExecution.Value.Ticks, DateTimeKind.Utc) : null - }); + var jobDtoTasks = JobStorage.Current.GetConnection().GetRecurringJobs().Select(async dto => + new JobDto() + { + Id = dto.Id, + Title = await _localizationService.Translate(User.GetUserId(), dto.Id), + Cron = dto.Cron, + LastExecutionUtc = dto.LastExecution.HasValue ? new DateTime(dto.LastExecution.Value.Ticks, DateTimeKind.Utc) : null + }); - return Ok(recurringJobs); + return Ok(await Task.WhenAll(jobDtoTasks)); } /// diff --git a/API/Data/ManualMigrations/MigrateMangaFilePath.cs b/API/Data/ManualMigrations/MigrateMangaFilePath.cs new file mode 100644 index 000000000..ccf9aa773 --- /dev/null +++ b/API/Data/ManualMigrations/MigrateMangaFilePath.cs @@ -0,0 +1,45 @@ +using System; +using System.Threading.Tasks; +using API.Entities; +using API.Services.Tasks.Scanner.Parser; +using Kavita.Common.EnvironmentInfo; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; + +namespace API.Data.ManualMigrations; + +/// +/// v0.8.0 ensured that MangaFile Path is normalized. This will normalize existing data to avoid churn. +/// +public static class MigrateMangaFilePath +{ + public static async Task Migrate(DataContext dataContext, ILogger logger) + { + if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateMangaFilePath")) + { + return; + } + + logger.LogCritical( + "Running MigrateMangaFilePath migration - Please be patient, this may take some time. This is not an error"); + + + foreach(var file in dataContext.MangaFile) + { + file.FilePath = Parser.NormalizePath(file.FilePath); + } + + await dataContext.SaveChangesAsync(); + + dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory() + { + Name = "MigrateMangaFilePath", + ProductVersion = BuildInfo.Version.ToString(), + RanAt = DateTime.UtcNow + }); + await dataContext.SaveChangesAsync(); + + logger.LogCritical( + "Running MigrateMangaFilePath migration - Completed. This is not an error"); + } +} diff --git a/API/Data/ManualMigrations/MigrateWantToReadExport.cs b/API/Data/ManualMigrations/MigrateWantToReadExport.cs index eb788f1e8..95a86c370 100644 --- a/API/Data/ManualMigrations/MigrateWantToReadExport.cs +++ b/API/Data/ManualMigrations/MigrateWantToReadExport.cs @@ -20,6 +20,7 @@ public static class MigrateWantToReadExport { try { + if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateWantToReadExport")) { return; diff --git a/API/Helpers/OrderableHelper.cs b/API/Helpers/OrderableHelper.cs index d936eb588..3313ca658 100644 --- a/API/Helpers/OrderableHelper.cs +++ b/API/Helpers/OrderableHelper.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using API.Entities; namespace API.Helpers; @@ -46,6 +47,7 @@ public static class OrderableHelper public static void ReorderItems(List items, int readingListItemId, int toPosition) { + if (toPosition < 0) throw new ArgumentException("toPosition cannot be less than 0"); var item = items.Find(r => r.Id == readingListItemId); if (item != null) { diff --git a/API/I18N/en.json b/API/I18N/en.json index f24e76d9d..9d10a5c55 100644 --- a/API/I18N/en.json +++ b/API/I18N/en.json @@ -200,8 +200,19 @@ "volume-num": "Volume {0}", "book-num": "Book {0}", "issue-num": "Issue {0}{1}", - "chapter-num": "Chapter {0}" - + "chapter-num": "Chapter {0}", + "check-updates": "Check Updates", + "license-check": "License Check", + "process-scrobbling-events": "Process Scrobbling Events", + "report-stats": "Report Stats", + "check-scrobbling-tokens": "Check Scrobbling Tokens", + "cleanup": "Cleanup", + "process-processed-scrobbling-events": "Process Processed Scrobbling Events", + "remove-from-want-to-read": "Want to Read Cleanup", + "scan-libraries": "Scan Libraries", + "kavita+-data-refresh": "Kavita+ Data Refresh", + "backup": "Backup", + "update-yearly-stats": "Update Yearly Stats" } diff --git a/API/Program.cs b/API/Program.cs index 548e57859..51b09c1c7 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -88,7 +88,7 @@ public class Program } // Apply Before manual migrations that need to run before actual migrations - try + if (isDbCreated) { Task.Run(async () => { @@ -96,17 +96,22 @@ public class Program logger.LogInformation("Running Migrations"); // v0.7.14 - await MigrateWantToReadExport.Migrate(context, directoryService, logger); + try + { + await MigrateWantToReadExport.Migrate(context, directoryService, logger); + } + catch (Exception ex) + { + /* Swallow */ + } await unitOfWork.CommitAsync(); logger.LogInformation("Running Migrations - complete"); }).GetAwaiter() .GetResult(); } - catch (Exception ex) - { - logger.LogCritical(ex, "An error occurred during migration"); - } + + await context.Database.MigrateAsync(); diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index 873545742..120cbf3f7 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -353,7 +353,15 @@ public class ArchiveService : IArchiveService { var tempPath = Path.Join(tempLocation, _directoryService.FileSystem.Path.GetFileNameWithoutExtension(_directoryService.FileSystem.FileInfo.New(path).Name)); progressCallback(Tuple.Create(_directoryService.FileSystem.FileInfo.New(path).Name, (1.0f * totalFiles) / count)); - ExtractArchive(path, tempPath); + if (Tasks.Scanner.Parser.Parser.IsArchive(path)) + { + ExtractArchive(path, tempPath); + } + else + { + _directoryService.CopyFileToDirectory(path, tempPath); + } + count++; } } @@ -392,7 +400,7 @@ public class ArchiveService : IArchiveService return false; } - if (Tasks.Scanner.Parser.Parser.IsArchive(archivePath) || Tasks.Scanner.Parser.Parser.IsEpub(archivePath)) return true; + if (Tasks.Scanner.Parser.Parser.IsArchive(archivePath)) return true; _logger.LogWarning("Archive {ArchivePath} is not a valid archive", archivePath); return false; diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs index 9d50b1fe9..b9c76df1f 100644 --- a/API/Services/BookService.cs +++ b/API/Services/BookService.cs @@ -781,7 +781,7 @@ public class BookService : IBookService /// public ParserInfo? ParseInfo(string filePath) { - if (!Parser.IsEpub(filePath)) return null; + if (!Parser.IsEpub(filePath) || !_directoryService.FileSystem.File.Exists(filePath)) return null; try { @@ -848,7 +848,7 @@ public class BookService : IBookService Format = MangaFormat.Epub, Filename = Path.GetFileName(filePath), Title = specialName?.Trim() ?? string.Empty, - FullFilePath = filePath, + FullFilePath = Parser.NormalizePath(filePath), IsSpecial = false, Series = series.Trim(), SeriesSort = series.Trim(), @@ -870,7 +870,7 @@ public class BookService : IBookService Format = MangaFormat.Epub, Filename = Path.GetFileName(filePath), Title = epubBook.Title.Trim(), - FullFilePath = filePath, + FullFilePath = Parser.NormalizePath(filePath), IsSpecial = false, Series = epubBook.Title.Trim(), Volumes = Parser.LooseLeafVolume, diff --git a/API/Services/Plus/ScrobblingService.cs b/API/Services/Plus/ScrobblingService.cs index c3ff607cc..801ac2b33 100644 --- a/API/Services/Plus/ScrobblingService.cs +++ b/API/Services/Plus/ScrobblingService.cs @@ -440,22 +440,25 @@ public class ScrobblingService : IScrobblingService // Might want to log this under ScrobbleError if (response.ErrorMessage != null && response.ErrorMessage.Contains("Too Many Requests")) { - _logger.LogInformation("Hit Too many requests, sleeping to regain requests"); + _logger.LogInformation("Hit Too many requests, sleeping to regain requests and retrying"); await Task.Delay(TimeSpan.FromMinutes(10)); - } else if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unauthorized")) + return await PostScrobbleUpdate(data, license, evt); + } + if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unauthorized")) { _logger.LogCritical("Kavita+ responded with Unauthorized. Please check your subscription"); await _licenseService.HasActiveLicense(true); evt.IsErrored = true; evt.ErrorDetails = "Kavita+ subscription no longer active"; throw new KavitaException("Kavita+ responded with Unauthorized. Please check your subscription"); - } else if (response.ErrorMessage != null && response.ErrorMessage.Contains("Access token is invalid")) + } + if (response.ErrorMessage != null && response.ErrorMessage.Contains("Access token is invalid")) { evt.IsErrored = true; evt.ErrorDetails = AccessTokenErrorMessage; throw new KavitaException("Access token is invalid"); } - else if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unknown Series")) + if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unknown Series")) { // Log the Series name and Id in ScrobbleErrors _logger.LogInformation("Kavita+ was unable to match the series"); @@ -490,10 +493,6 @@ public class ScrobblingService : IScrobblingService evt.IsErrored = true; evt.ErrorDetails = "Review was unable to be saved due to upstream requirements"; } - - evt.IsErrored = true; - _logger.LogError("Scrobbling failed due to {ErrorMessage}: {SeriesName}", response.ErrorMessage, data.SeriesName); - throw new KavitaException($"Scrobbling failed due to {response.ErrorMessage}: {data.SeriesName}"); } return response.RateLeft; diff --git a/API/Services/ReadingItemService.cs b/API/Services/ReadingItemService.cs index cd1c508e2..34360efa5 100644 --- a/API/Services/ReadingItemService.cs +++ b/API/Services/ReadingItemService.cs @@ -38,11 +38,12 @@ public class ReadingItemService : IReadingItemService _directoryService = directoryService; _logger = logger; - _comicVineParser = new ComicVineParser(directoryService); _imageParser = new ImageParser(directoryService); - _bookParser = new BookParser(directoryService, bookService, _basicParser); - _pdfParser = new PdfParser(directoryService); _basicParser = new BasicParser(directoryService, _imageParser); + _bookParser = new BookParser(directoryService, bookService, _basicParser); + _comicVineParser = new ComicVineParser(directoryService); + _pdfParser = new PdfParser(directoryService); + } /// @@ -73,14 +74,22 @@ public class ReadingItemService : IReadingItemService /// Library type to determine parsing to perform public ParserInfo? ParseFile(string path, string rootPath, string libraryRoot, LibraryType type) { - var info = Parse(path, rootPath, libraryRoot, type); - if (info == null) + try { - _logger.LogError("Unable to parse any meaningful information out of file {FilePath}", path); + var info = Parse(path, rootPath, libraryRoot, type); + if (info == null) + { + _logger.LogError("Unable to parse any meaningful information out of file {FilePath}", path); + return null; + } + + return info; + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an exception when parsing file {FilePath}", path); return null; } - - return info; } /// diff --git a/API/Services/SeriesService.cs b/API/Services/SeriesService.cs index e6cd2b3e3..c19ef8878 100644 --- a/API/Services/SeriesService.cs +++ b/API/Services/SeriesService.cs @@ -578,6 +578,13 @@ public class SeriesService : ISeriesService return !chapter.IsSpecial && chapter.MinNumber.IsNot(Parser.DefaultChapterNumber); } + /// + /// Should the volume be included and if so, this renames + /// + /// + /// + /// + /// public static bool RenameVolumeName(VolumeDto volume, LibraryType libraryType, string volumeLabel = "Volume") { if (libraryType is LibraryType.Book or LibraryType.LightNovel) diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index 83f9eee67..c9ae918ec 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -336,7 +336,7 @@ public class TaskScheduler : ITaskScheduler _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, force, true)); // When we do a scan, force cache to re-unpack in case page numbers change - BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheAndTempDirectories()); + BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory()); } public void TurnOnScrobbling(int userId = 0) diff --git a/API/Services/Tasks/BackupService.cs b/API/Services/Tasks/BackupService.cs index 765d3ca73..60e0e8dc3 100644 --- a/API/Services/Tasks/BackupService.cs +++ b/API/Services/Tasks/BackupService.cs @@ -104,8 +104,13 @@ public class BackupService : IBackupService _directoryService.ExistOrCreate(tempDirectory); _directoryService.ClearDirectory(tempDirectory); + await SendProgress(0.1F, "Copying config files"); _directoryService.CopyFilesToDirectory( - _backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)).ToList(), tempDirectory); + _backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)), tempDirectory); + + // Copy any csv's as those are used for manual migrations + _directoryService.CopyFilesToDirectory( + _directoryService.GetFilesWithCertainExtensions(_directoryService.ConfigDirectory, @"\.csv"), tempDirectory); await SendProgress(0.2F, "Copying logs"); CopyLogsToBackupDirectory(tempDirectory); diff --git a/API/Services/Tasks/CleanupService.cs b/API/Services/Tasks/CleanupService.cs index 3aaa2c837..6271df312 100644 --- a/API/Services/Tasks/CleanupService.cs +++ b/API/Services/Tasks/CleanupService.cs @@ -20,6 +20,7 @@ public interface ICleanupService Task Cleanup(); Task CleanupDbEntries(); void CleanupCacheAndTempDirectories(); + void CleanupCacheDirectory(); Task DeleteSeriesCoverImages(); Task DeleteChapterCoverImages(); Task DeleteTagCoverImages(); @@ -178,6 +179,23 @@ public class CleanupService : ICleanupService _logger.LogInformation("Cache and temp directory purged"); } + public void CleanupCacheDirectory() + { + _logger.LogInformation("Performing cleanup of Cache directories"); + _directoryService.ExistOrCreate(_directoryService.CacheDirectory); + + try + { + _directoryService.ClearDirectory(_directoryService.CacheDirectory); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup"); + } + + _logger.LogInformation("Cache directory purged"); + } + /// /// Removes Database backups older than configured total backups. If all backups are older than total backups days, only the latest is kept. /// diff --git a/API/Services/Tasks/Scanner/ParseScannedFiles.cs b/API/Services/Tasks/Scanner/ParseScannedFiles.cs index 84c45f07a..293b37c96 100644 --- a/API/Services/Tasks/Scanner/ParseScannedFiles.cs +++ b/API/Services/Tasks/Scanner/ParseScannedFiles.cs @@ -170,6 +170,7 @@ public class ParseScannedFiles library.Folders.FirstOrDefault(f => Parser.Parser.NormalizePath(folderPath).Contains(Parser.Parser.NormalizePath(f.Path)))?.Path ?? folderPath; + if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck)) { result.Add(new ScanResult() @@ -313,6 +314,7 @@ public class ParseScannedFiles await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", library.Name, ProgressEventType.Started)); var processedScannedSeries = new List(); + //var processedScannedSeries = new ConcurrentBag(); foreach (var folderPath in folders) { try @@ -321,45 +323,15 @@ public class ParseScannedFiles foreach (var scanResult in scanResults) { - // scanResult is updated with the parsed infos - await ProcessScanResult(scanResult, seriesPaths, library); - - // We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps - var scannedSeries = new ConcurrentDictionary>(); - - // Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series - MergeLocalizedSeriesWithSeries(scanResult.ParserInfos); - - // Combine everything into scannedSeries - foreach (var info in scanResult.ParserInfos) - { - try - { - TrackSeries(scannedSeries, info); - } - catch (Exception ex) - { - _logger.LogError(ex, - "[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file", - info?.FullFilePath); - } - } - - foreach (var series in scannedSeries.Keys) - { - if (scannedSeries[series].Count <= 0) continue; - - UpdateSortOrder(scannedSeries, series); - - processedScannedSeries.Add(new ScannedSeriesResult() - { - HasChanged = scanResult.HasChanged, - ParsedSeries = series, - ParsedInfos = scannedSeries[series] - }); - } + await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries); } + // This reduced a 1.1k series networked scan by a little more than 1 hour, but the order series were added to Kavita was not alphabetical + // await Task.WhenAll(scanResults.Select(async scanResult => + // { + // await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries); + // })); + } catch (ArgumentException ex) { @@ -369,10 +341,52 @@ public class ParseScannedFiles await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", library.Name, ProgressEventType.Ended)); - return processedScannedSeries; + return processedScannedSeries.ToList(); } + private async Task ParseAndTrackSeries(Library library, IDictionary> seriesPaths, ScanResult scanResult, + List processedScannedSeries) + { + // scanResult is updated with the parsed infos + await ProcessScanResult(scanResult, seriesPaths, library); // NOTE: This may be able to be parallelized + + // We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps + var scannedSeries = new ConcurrentDictionary>(); + + // Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series + MergeLocalizedSeriesWithSeries(scanResult.ParserInfos); + + // Combine everything into scannedSeries + foreach (var info in scanResult.ParserInfos) + { + try + { + TrackSeries(scannedSeries, info); + } + catch (Exception ex) + { + _logger.LogError(ex, + "[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file", + info?.FullFilePath); + } + } + + foreach (var series in scannedSeries.Keys) + { + if (scannedSeries[series].Count <= 0) continue; + + UpdateSortOrder(scannedSeries, series); + + processedScannedSeries.Add(new ScannedSeriesResult() + { + HasChanged = scanResult.HasChanged, + ParsedSeries = series, + ParsedInfos = scannedSeries[series] + }); + } + } + /// /// For a given ScanResult, sets the ParserInfos on the result /// diff --git a/API/Services/Tasks/Scanner/Parser/BasicParser.cs b/API/Services/Tasks/Scanner/Parser/BasicParser.cs index 9df68abb9..363d4aaff 100644 --- a/API/Services/Tasks/Scanner/Parser/BasicParser.cs +++ b/API/Services/Tasks/Scanner/Parser/BasicParser.cs @@ -27,7 +27,7 @@ public class BasicParser(IDirectoryService directoryService, IDefaultParser imag Filename = Path.GetFileName(filePath), Format = Parser.ParseFormat(filePath), Title = Parser.RemoveExtensionIfSupported(fileName), - FullFilePath = filePath, + FullFilePath = Parser.NormalizePath(filePath), Series = string.Empty, ComicInfo = comicInfo }; diff --git a/API/Services/Tasks/Scanner/Parser/BookParser.cs b/API/Services/Tasks/Scanner/Parser/BookParser.cs index 9c7ed9a16..8c7c00b83 100644 --- a/API/Services/Tasks/Scanner/Parser/BookParser.cs +++ b/API/Services/Tasks/Scanner/Parser/BookParser.cs @@ -3,13 +3,15 @@ using API.Entities.Enums; namespace API.Services.Tasks.Scanner.Parser; -public class BookParser(IDirectoryService directoryService, IBookService bookService, IDefaultParser basicParser) : DefaultParser(directoryService) +public class BookParser(IDirectoryService directoryService, IBookService bookService, BasicParser basicParser) : DefaultParser(directoryService) { public override ParserInfo Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo comicInfo = null) { var info = bookService.ParseInfo(filePath); if (info == null) return null; + info.ComicInfo = comicInfo; + // This catches when original library type is Manga/Comic and when parsing with non if (Parser.ParseVolume(info.Series) != Parser.LooseLeafVolume) // Shouldn't this be info.Volume != DefaultVolume? { diff --git a/API/Services/Tasks/Scanner/Parser/ComicVineParser.cs b/API/Services/Tasks/Scanner/Parser/ComicVineParser.cs index 79830c86c..f16b93643 100644 --- a/API/Services/Tasks/Scanner/Parser/ComicVineParser.cs +++ b/API/Services/Tasks/Scanner/Parser/ComicVineParser.cs @@ -34,7 +34,7 @@ public class ComicVineParser(IDirectoryService directoryService) : DefaultParser Filename = Path.GetFileName(filePath), Format = Parser.ParseFormat(filePath), Title = Parser.RemoveExtensionIfSupported(fileName)!, - FullFilePath = filePath, + FullFilePath = Parser.NormalizePath(filePath), Series = string.Empty, ComicInfo = comicInfo, Chapters = Parser.ParseComicChapter(fileName), @@ -102,4 +102,33 @@ public class ComicVineParser(IDirectoryService directoryService) : DefaultParser { return type == LibraryType.ComicVine; } + + private new static void UpdateFromComicInfo(ParserInfo info) + { + if (info.ComicInfo == null) return; + + if (!string.IsNullOrEmpty(info.ComicInfo.Volume)) + { + info.Volumes = info.ComicInfo.Volume; + } + if (string.IsNullOrEmpty(info.LocalizedSeries) && !string.IsNullOrEmpty(info.ComicInfo.LocalizedSeries)) + { + info.LocalizedSeries = info.ComicInfo.LocalizedSeries.Trim(); + } + if (!string.IsNullOrEmpty(info.ComicInfo.Number)) + { + info.Chapters = info.ComicInfo.Number; + if (info.IsSpecial && Parser.DefaultChapter != info.Chapters) + { + info.IsSpecial = false; + info.Volumes = $"{Parser.SpecialVolumeNumber}"; + } + } + + // Patch is SeriesSort from ComicInfo + if (!string.IsNullOrEmpty(info.ComicInfo.TitleSort)) + { + info.SeriesSort = info.ComicInfo.TitleSort.Trim(); + } + } } diff --git a/API/Services/Tasks/Scanner/Parser/ImageParser.cs b/API/Services/Tasks/Scanner/Parser/ImageParser.cs index a1227d841..4834a6ed5 100644 --- a/API/Services/Tasks/Scanner/Parser/ImageParser.cs +++ b/API/Services/Tasks/Scanner/Parser/ImageParser.cs @@ -21,7 +21,7 @@ public class ImageParser(IDirectoryService directoryService) : DefaultParser(dir ComicInfo = comicInfo, Format = MangaFormat.Image, Filename = Path.GetFileName(filePath), - FullFilePath = filePath, + FullFilePath = Parser.NormalizePath(filePath), Title = fileName, }; ParseFromFallbackFolders(filePath, libraryRoot, LibraryType.Image, ref ret); diff --git a/API/Services/Tasks/Scanner/Parser/PdfParser.cs b/API/Services/Tasks/Scanner/Parser/PdfParser.cs index 1b90a95e7..da71124a9 100644 --- a/API/Services/Tasks/Scanner/Parser/PdfParser.cs +++ b/API/Services/Tasks/Scanner/Parser/PdfParser.cs @@ -14,7 +14,7 @@ public class PdfParser(IDirectoryService directoryService) : DefaultParser(direc Filename = Path.GetFileName(filePath), Format = Parser.ParseFormat(filePath), Title = Parser.RemoveExtensionIfSupported(fileName)!, - FullFilePath = filePath, + FullFilePath = Parser.NormalizePath(filePath), Series = string.Empty, ComicInfo = comicInfo, Chapters = type == LibraryType.Comic diff --git a/API/Services/Tasks/Scanner/ProcessSeries.cs b/API/Services/Tasks/Scanner/ProcessSeries.cs index dcddcee85..8f9a58088 100644 --- a/API/Services/Tasks/Scanner/ProcessSeries.cs +++ b/API/Services/Tasks/Scanner/ProcessSeries.cs @@ -203,15 +203,16 @@ public class ProcessSeries : IProcessSeries // Process reading list after commit as we need to commit per list - BackgroundJob.Enqueue(() => _readingListService.CreateReadingListsFromSeries(library.Id, series.Id)); + await _readingListService.CreateReadingListsFromSeries(library.Id, series.Id); if (seriesAdded) { // See if any recommendations can link up to the series and pre-fetch external metadata for the series _logger.LogInformation("Linking up External Recommendations new series (if applicable)"); - BackgroundJob.Enqueue(() => - _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type)); + // BackgroundJob.Enqueue(() => + // _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type)); + await _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type); await _eventHub.SendMessageAsync(MessageFactory.SeriesAdded, MessageFactory.SeriesAddedEvent(series.Id, series.Name, series.LibraryId), false); @@ -232,9 +233,11 @@ public class ProcessSeries : IProcessSeries var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); await _metadataService.GenerateCoversForSeries(series, settings.EncodeMediaAs, settings.CoverImageSize); - BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate)); + // BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate)); + await _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate); } + private async Task ReportDuplicateSeriesLookup(Library library, ParserInfo firstInfo, Exception ex) { var seriesCollisions = await _unitOfWork.SeriesRepository.GetAllSeriesByAnyName(firstInfo.LocalizedSeries, string.Empty, library.Id, firstInfo.Format); @@ -581,7 +584,7 @@ public class ProcessSeries : IProcessSeries { // TODO: Push this to UI in some way if (!ex.Message.Equals("Sequence contains more than one matching element")) throw; - _logger.LogCritical("[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name); + _logger.LogCritical(ex, "[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name); throw new KavitaException( $"Kavita found corrupted volume entries on {series.Name}. Please delete the series from Kavita via UI and rescan"); } @@ -705,7 +708,7 @@ public class ProcessSeries : IProcessSeries { // Ensure we remove any files that no longer exist AND order existingChapter.Files = existingChapter.Files - .Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath)) + .Where(f => parsedInfos.Any(p => Parser.Parser.NormalizePath(p.FullFilePath) == Parser.Parser.NormalizePath(f.FilePath))) .OrderByNatural(f => f.FilePath).ToList(); existingChapter.Pages = existingChapter.Files.Sum(f => f.Pages); } diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index e50ff7b7f..ddc319e70 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -325,7 +325,7 @@ public class ScannerService : IScannerService await _metadataService.RemoveAbandonedMetadataKeys(); BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(existingChapterIdsToClean)); - BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.TempDirectory)); + BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.CacheDirectory)); } private void TrackFoundSeriesAndFiles(Dictionary> parsedSeries, IList seenSeries) @@ -485,7 +485,8 @@ public class ScannerService : IScannerService public async Task ScanLibrary(int libraryId, bool forceUpdate = false, bool isSingleScan = true) { var sw = Stopwatch.StartNew(); - var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders | LibraryIncludes.FileTypes | LibraryIncludes.ExcludePatterns); + var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, + LibraryIncludes.Folders | LibraryIncludes.FileTypes | LibraryIncludes.ExcludePatterns); var libraryFolderPaths = library!.Folders.Select(fp => fp.Path).ToList(); if (!await CheckMounts(library.Name, libraryFolderPaths)) return; @@ -501,48 +502,16 @@ public class ScannerService : IScannerService } - var totalFiles = 0; - var parsedSeries = new Dictionary>(); - var (scanElapsedTime, processedSeries) = await ScanFiles(library, libraryFolderPaths, shouldUseLibraryScan, forceUpdate); + var parsedSeries = new Dictionary>(); TrackFoundSeriesAndFiles(parsedSeries, processedSeries); // We need to remove any keys where there is no actual parser info - var toProcess = parsedSeries.Keys - .Where(k => parsedSeries[k].Any() && !string.IsNullOrEmpty(parsedSeries[k][0].Filename)) - .ToList(); + var totalFiles = await ProcessParsedSeries(forceUpdate, parsedSeries, library, scanElapsedTime); - if (toProcess.Count > 0) - { - // This grabs all the shared entities, like tags, genre, people. To be solved later in this refactor on how to not have blocking access. - await _processSeries.Prime(); - } - - var tasks = new List(); - foreach (var pSeries in toProcess) - { - totalFiles += parsedSeries[pSeries].Count; - //tasks.Add(); - await _processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate); - } - - //await Task.WhenAll(tasks); - - - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, - MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended)); - - _logger.LogInformation("[ScannerService] Finished file scan in {ScanAndUpdateTime} milliseconds. Updating database", scanElapsedTime); - - var time = DateTime.Now; - foreach (var folderPath in library.Folders) - { - folderPath.UpdateLastScanned(time); - } - - library.UpdateLastScanned(time); + UpdateLastScanned(library); _unitOfWork.LibraryRepository.Update(library); @@ -566,28 +535,7 @@ public class ScannerService : IScannerService totalFiles, parsedSeries.Count, sw.ElapsedMilliseconds, library.Name); } - try - { - // Could I delete anything in a Library's Series where the LastScan date is before scanStart? - // NOTE: This implementation is expensive - _logger.LogDebug("[ScannerService] Removing Series that were not found during the scan"); - var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(parsedSeries.Keys.ToList(), library.Id); - _logger.LogDebug("[ScannerService] Found {Count} series that needs to be removed: {SeriesList}", - removedSeries.Count, removedSeries.Select(s => s.Name)); - _logger.LogDebug("[ScannerService] Removing Series that were not found during the scan - complete"); - - await _unitOfWork.CommitAsync(); - - foreach (var s in removedSeries) - { - await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved, - MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false); - } - } - catch (Exception ex) - { - _logger.LogCritical(ex, "[ScannerService] There was an issue deleting series for cleanup. Please check logs and rescan"); - } + await RemoveSeriesNotFound(parsedSeries, library); } else { @@ -598,7 +546,77 @@ public class ScannerService : IScannerService await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, string.Empty)); await _metadataService.RemoveAbandonedMetadataKeys(); - BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.TempDirectory)); + BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.CacheDirectory)); + } + + private async Task RemoveSeriesNotFound(Dictionary> parsedSeries, Library library) + { + try + { + // Could I delete anything in a Library's Series where the LastScan date is before scanStart? + // NOTE: This implementation is expensive + _logger.LogDebug("[ScannerService] Removing Series that were not found during the scan"); + var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(parsedSeries.Keys.ToList(), library.Id); + _logger.LogDebug("[ScannerService] Found {Count} series that needs to be removed: {SeriesList}", + removedSeries.Count, removedSeries.Select(s => s.Name)); + _logger.LogDebug("[ScannerService] Removing Series that were not found during the scan - complete"); + + await _unitOfWork.CommitAsync(); + + foreach (var s in removedSeries) + { + await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved, + MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false); + } + } + catch (Exception ex) + { + _logger.LogCritical(ex, "[ScannerService] There was an issue deleting series for cleanup. Please check logs and rescan"); + } + } + + private async Task ProcessParsedSeries(bool forceUpdate, Dictionary> parsedSeries, Library library, long scanElapsedTime) + { + var toProcess = parsedSeries.Keys + .Where(k => parsedSeries[k].Any() && !string.IsNullOrEmpty(parsedSeries[k][0].Filename)) + .ToList(); + + if (toProcess.Count > 0) + { + // This grabs all the shared entities, like tags, genre, people. To be solved later in this refactor on how to not have blocking access. + await _processSeries.Prime(); + } + + var totalFiles = 0; + //var tasks = new List(); + foreach (var pSeries in toProcess) + { + totalFiles += parsedSeries[pSeries].Count; + //tasks.Add(_processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate)); + // We can't do Task.WhenAll because of concurrency issues. + await _processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate); + } + + //await Task.WhenAll(tasks); + + + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, + MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended)); + + _logger.LogInformation("[ScannerService] Finished file scan in {ScanAndUpdateTime} milliseconds. Updating database", scanElapsedTime); + + return totalFiles; + } + + private static void UpdateLastScanned(Library library) + { + var time = DateTime.Now; + foreach (var folderPath in library.Folders) + { + folderPath.UpdateLastScanned(time); + } + + library.UpdateLastScanned(time); } private async Task>> ScanFiles(Library library, IEnumerable dirs, diff --git a/API/Startup.cs b/API/Startup.cs index a1aec4ab6..be3fa29e6 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -260,6 +260,7 @@ public class Startup await MigrateLooseLeafChapters.Migrate(dataContext, unitOfWork, directoryService, logger); await MigrateChapterFields.Migrate(dataContext, unitOfWork, logger); await MigrateChapterRange.Migrate(dataContext, unitOfWork, logger); + await MigrateMangaFilePath.Migrate(dataContext, logger); // Update the version in the DB after all migrations are run var installVersion = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion); diff --git a/UI/Web/src/app/_services/library.service.ts b/UI/Web/src/app/_services/library.service.ts index 75abf3a03..f4d7dbc34 100644 --- a/UI/Web/src/app/_services/library.service.ts +++ b/UI/Web/src/app/_services/library.service.ts @@ -32,7 +32,7 @@ export class LibraryService { return of(this.libraryNames); } - return this.httpClient.get(this.baseUrl + 'library').pipe(map(libraries => { + return this.httpClient.get(this.baseUrl + 'library/libraries').pipe(map(libraries => { this.libraryNames = {}; libraries.forEach(lib => { if (this.libraryNames !== undefined) { @@ -47,7 +47,7 @@ export class LibraryService { if (this.libraryNames != undefined && this.libraryNames.hasOwnProperty(libraryId)) { return of(this.libraryNames[libraryId]); } - return this.httpClient.get(this.baseUrl + 'library').pipe(map(l => { + return this.httpClient.get(this.baseUrl + 'library/libraries').pipe(map(l => { this.libraryNames = {}; l.forEach(lib => { if (this.libraryNames !== undefined) { @@ -75,8 +75,12 @@ export class LibraryService { return this.httpClient.get(this.baseUrl + 'library/jump-bar?libraryId=' + libraryId); } + getLibrary(libraryId: number) { + return this.httpClient.get(this.baseUrl + 'library?libraryId=' + libraryId); + } + getLibraries() { - return this.httpClient.get(this.baseUrl + 'library'); + return this.httpClient.get(this.baseUrl + 'library/libraries'); } updateLibrariesForMember(username: string, selectedLibraries: Library[]) { diff --git a/UI/Web/src/app/admin/manage-tasks-settings/manage-tasks-settings.component.ts b/UI/Web/src/app/admin/manage-tasks-settings/manage-tasks-settings.component.ts index 68d07fd21..69448ad1d 100644 --- a/UI/Web/src/app/admin/manage-tasks-settings/manage-tasks-settings.component.ts +++ b/UI/Web/src/app/admin/manage-tasks-settings/manage-tasks-settings.component.ts @@ -55,13 +55,6 @@ export class ManageTasksSettingsComponent implements OnInit { api: this.serverService.convertMedia(), successMessage: 'convert-media-task-success' }, - // I removed this as it's not really needed, given that External Recs are the only thing that fill this cache now - // { - // name: 'bust-cache-task', - // description: 'bust-cache-task-desc', - // api: this.serverService.bustCache(), - // successMessage: 'bust-cache-task-success' - // }, { name: 'bust-locale-task', description: 'bust-locale-task-desc', @@ -245,20 +238,20 @@ export class ManageTasksSettingsComponent implements OnInit { modelSettings.taskScan = this.settingsForm.get('taskScan')?.value; modelSettings.taskCleanup = this.settingsForm.get('taskCleanup')?.value; - if (this.serverSettings.taskBackup === this.customOption) { + if (modelSettings.taskBackup === this.customOption) { modelSettings.taskBackup = this.settingsForm.get('taskBackupCustom')?.value; } - if (this.serverSettings.taskScan === this.customOption) { + if (modelSettings.taskScan === this.customOption) { modelSettings.taskScan = this.settingsForm.get('taskScanCustom')?.value; } - if (this.serverSettings.taskScan === this.customOption) { + if (modelSettings.taskCleanup === this.customOption) { modelSettings.taskCleanup = this.settingsForm.get('taskCleanupCustom')?.value; } - this.settingsService.updateServerSettings(modelSettings).pipe(take(1)).subscribe(async (settings: ServerSettings) => { + this.settingsService.updateServerSettings(modelSettings).pipe(take(1)).subscribe((settings: ServerSettings) => { this.serverSettings = settings; this.resetForm(); this.recurringTasks$ = this.serverService.getRecurringJobs().pipe(shareReplay()); diff --git a/UI/Web/src/app/dashboard/_components/dashboard.component.html b/UI/Web/src/app/dashboard/_components/dashboard.component.html index 47a2cb9d3..21bf468ab 100644 --- a/UI/Web/src/app/dashboard/_components/dashboard.component.html +++ b/UI/Web/src/app/dashboard/_components/dashboard.component.html @@ -7,7 +7,7 @@
@if (isAdmin) {
-

{{t('no-libraries')}} {{t('server-settings-link')}}

+

{{t('no-libraries')}} {{t('server-settings-link')}}.

} @else {
diff --git a/UI/Web/src/app/library-detail/library-detail.component.ts b/UI/Web/src/app/library-detail/library-detail.component.ts index 635b2a3cd..e5bd3e604 100644 --- a/UI/Web/src/app/library-detail/library-detail.component.ts +++ b/UI/Web/src/app/library-detail/library-detail.component.ts @@ -231,7 +231,23 @@ export class LibraryDetailComponent implements OnInit { async handleAction(action: ActionItem, library: Library) { let lib: Partial = library; if (library === undefined) { - lib = {id: this.libraryId, name: this.libraryName}; + //lib = {id: this.libraryId, name: this.libraryName}; // BUG: We need the whole library for editLibrary + this.libraryService.getLibrary(this.libraryId).subscribe(async library => { + switch (action.action) { + case(Action.Scan): + await this.actionService.scanLibrary(library); + break; + case(Action.RefreshMetadata): + await this.actionService.refreshMetadata(library); + break; + case(Action.Edit): + this.actionService.editLibrary(library); + break; + default: + break; + } + }); + return } switch (action.action) { case(Action.Scan): diff --git a/UI/Web/src/app/sidenav/_modals/library-settings-modal/library-settings-modal.component.html b/UI/Web/src/app/sidenav/_modals/library-settings-modal/library-settings-modal.component.html index 22884717e..b3771bbb6 100644 --- a/UI/Web/src/app/sidenav/_modals/library-settings-modal/library-settings-modal.component.html +++ b/UI/Web/src/app/sidenav/_modals/library-settings-modal/library-settings-modal.component.html @@ -33,11 +33,11 @@
- + @if(IsKavitaPlusEligible) { {{t('kavitaplus-eligible-label')}} - + } {{t('type-tooltip')}} diff --git a/UI/Web/src/app/statistics/_components/reading-activity/reading-activity.component.html b/UI/Web/src/app/statistics/_components/reading-activity/reading-activity.component.html index a2ef443a8..53c16644b 100644 --- a/UI/Web/src/app/statistics/_components/reading-activity/reading-activity.component.html +++ b/UI/Web/src/app/statistics/_components/reading-activity/reading-activity.component.html @@ -11,7 +11,7 @@
diff --git a/UI/Web/src/app/statistics/_components/server-stats/server-stats.component.html b/UI/Web/src/app/statistics/_components/server-stats/server-stats.component.html index cd90281d5..5d82541a2 100644 --- a/UI/Web/src/app/statistics/_components/server-stats/server-stats.component.html +++ b/UI/Web/src/app/statistics/_components/server-stats/server-stats.component.html @@ -4,7 +4,7 @@
- {{stats.seriesCount | compactNumber}} Series + {{t('series-count', {num: stats.seriesCount | number})}}
@@ -13,7 +13,7 @@
- {{stats.volumeCount | compactNumber}} Volumes + {{t('volume-count', {num: stats.volumeCount | number})}}
@@ -22,7 +22,7 @@
- {{stats.totalFiles | compactNumber}} Files + {{t('file-count', {num: stats.totalFiles | number})}}
@@ -39,7 +39,7 @@
- + {{t('genre-count', {num: stats.totalGenres | compactNumber})}}
@@ -88,7 +88,7 @@
- +
diff --git a/UI/Web/src/assets/langs/en.json b/UI/Web/src/assets/langs/en.json index e372b65f4..97a9eb504 100644 --- a/UI/Web/src/assets/langs/en.json +++ b/UI/Web/src/assets/langs/en.json @@ -706,7 +706,7 @@ "description": "Complete the form to register an admin account", "username-label": "{{common.username}}", "email-label": "{{common.email}}", - "email-tooltip": "Email does not need to be a real address, but provides access to forgot password. It is not sent outside the server unless forgot password is used without a custom email service host.", + "email-tooltip": "Email does not need to be a real address, but provides access to forgot password. It is not sent outside the server unless Kavita emails you.", "password-label": "{{common.password}}", "required-field": "{{validation.required-field}}", "valid-email": "{{validation.valid-email}}", @@ -1779,6 +1779,7 @@ "y-axis-label": "Hours Read", "no-data": "No Reading Progress", "time-frame-label": "Time Frame", + "all-users": "All Users", "this-week": "{{time-periods.this-week}}", "last-7-days": "{{time-periods.last-7-days}}", "last-30-days": "{{time-periods.last-30-days}}", @@ -1841,6 +1842,9 @@ "popular-libraries-title": "Popular Libraries", "popular-series-title": "Popular Series", "recently-read-title": "Recently Read", + "series-count": "{{num}} Series", + "volume-count": "{{num}} Volumes", + "file-count": "{{num}} Files", "genre-count": "{{num}} Genres", "tag-count": "{{num}} Tags", "people-count": "{{num}} People", diff --git a/openapi.json b/openapi.json index 10b37c81f..4ee08b420 100644 --- a/openapi.json +++ b/openapi.json @@ -2502,6 +2502,46 @@ } }, "/api/Library": { + "get": { + "tags": [ + "Library" + ], + "summary": "Return a specific library", + "parameters": [ + { + "name": "libraryId", + "in": "query", + "schema": { + "type": "integer", + "format": "int32" + } + } + ], + "responses": { + "200": { + "description": "Success", + "content": { + "text/plain": { + "schema": { + "$ref": "#/components/schemas/LibraryDto" + } + }, + "application/json": { + "schema": { + "$ref": "#/components/schemas/LibraryDto" + } + }, + "text/json": { + "schema": { + "$ref": "#/components/schemas/LibraryDto" + } + } + } + } + } + } + }, + "/api/Library/libraries": { "get": { "tags": [ "Library"