From b0df67cdda0bd04a91db850bc2ff187816b210df Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Thu, 22 Jul 2021 21:13:24 -0500 Subject: [PATCH] PDF Support + MORE!!!! (#416) # Added - Added support for PDFs within Kavita. PDFs will open in the Manga reader and you can read through them as images. PDFs are heavier than archives, so they may take longer to open for reading. (Fixes #187) # Changed - Changed: Major change in how Kavita libraries work. Kavita libraries will now allow for mixed media types, that means you can have raw images, archives, epubs, and pdfs all within your Manga library. In the case that the same Series exists between 2 different types of medias, they will be separated and an icon will show to help you identify the types. The correct reader will open regardless of what library you are on. Note: Nightly users need to delete their Raw Images libraries before updating. # Fixed - Fixed: Fixed an issue where checking if a file was modified since last scan always returned true, meaning we would do more I/O than was needed (Fixes #415) - Fixed: There wasn't enough spacing on the top menu bar on the Manga reader - Fixed: Fixed a bug where user preferences dark mode control always showed true, even if you were not using dark mode # Dev stuff - For image extraction, if there is only 1 image we will extract just that, else we will extract only images - Refactored all the Parser code out of the ScannerService into a self contained class. The class should be created for any scans, allowing multiple tasks to run without any chance of cross over. * Fixed indentation for cs files * Fixed an issue where the logic for if a file had been modified or not was not working and always saying modified, meaning we were doing more file I/O than needed. * Implemented the ability to have PDF books. No reader functionality. * Implemented a basic form of scanning for PDF files. Reworked Image based libraries to remove the need to separate in a special library and instead just work within the Manga/Comic library. * Removed the old library types. * Removed some extra code around old raw library types * Fully implemented PDF support into Kavita by using docnet. Removed old libraries we tried that did not work. PDFs take about 200ms to save the file to disk, so they are much slower than reading archives. * Refactored Libraries so that they can have any file extension and the UI will decide which reader to use. * Reworked the Series Parsing code. We now use a separate instance for each task call, so there should be no cross over if 2 tasks are running at the same time. Second, we now store Format with the Series, so we can have duplicate Series with the same name, but a different type of files underneath. * Fixed PDF transparency issues - Used this code to fix an issue when a PDF page doesn't have a background. https://github.com/GowenGit/docnet/issues/8#issuecomment-538985672 - This also fixes the same issue for cover images * Fixed an issue where if a raw image was in a directory with non-image files, those would get moved to cache when trying to open the file. * For image extraction, if there is only 1 image, just copy that to cache instead of everything else in the directory that is an image. * Add some spacing to the top menu bar * Added an icon to the card to showcase the type of file * Added a tag badge to the series detail page * Fixed a bug in user preferences where dark mode control would default to true, even if you weren't on it * Fixed some tests up * Some code smells Co-authored-by: Robbie Davis --- .editorconfig | 3 - API.Tests/Parser/ParserTest.cs | 32 +- API.Tests/Services/ScannerServiceTests.cs | 184 ++-- API/API.csproj | 2 + API/Controllers/.editorconfig | 0 API/Controllers/BookController.cs | 47 +- API/DTOs/SeriesDto.cs | 6 +- API/Data/DbFactory.cs | 2 +- ...210722223304_AddedSeriesFormat.Designer.cs | 872 ++++++++++++++++++ .../20210722223304_AddedSeriesFormat.cs | 44 + .../Migrations/DataContextModelSnapshot.cs | 5 +- API/Entities/Chapter.cs | 2 +- API/Entities/Enums/LibraryType.cs | 4 - API/Entities/Enums/MangaFormat.cs | 8 +- API/Entities/MangaFile.cs | 7 +- API/Entities/Series.cs | 14 +- API/Extensions/FileInfoExtensions.cs | 4 +- API/Extensions/ParserInfoListExtensions.cs | 16 +- API/Extensions/SeriesExtensions.cs | 14 + API/Extensions/VolumeListExtensions.cs | 5 +- API/Interfaces/Services/IBookService.cs | 6 + API/Interfaces/Services/IDirectoryService.cs | 2 +- API/Parser/Parser.cs | 27 +- API/Services/BookService.cs | 149 ++- API/Services/CacheService.cs | 10 +- API/Services/DirectoryService.cs | 12 +- API/Services/MetadataService.cs | 330 +++---- .../Tasks/Scanner/ParseScannedFiles.cs | 198 ++++ API/Services/Tasks/ScannerService.cs | 204 +--- API/Startup.cs | 1 - .../edit-series-modal.component.html | 1 + UI/Web/src/app/_models/library.ts | 2 - UI/Web/src/app/_models/manga-format.ts | 3 +- UI/Web/src/app/_models/series.ts | 2 + .../manage-library.component.ts | 4 - .../manga-reader/manga-reader.component.html | 2 +- .../series-detail.component.html | 15 +- .../series-detail/series-detail.component.ts | 12 +- .../app/shared/_services/utility.service.ts | 30 + .../shared/card-item/card-item.component.html | 3 +- .../shared/card-item/card-item.component.ts | 10 +- .../shared/tag-badge/tag-badge.component.scss | 4 +- .../user-preferences.component.ts | 4 +- 43 files changed, 1725 insertions(+), 577 deletions(-) delete mode 100644 API/Controllers/.editorconfig create mode 100644 API/Data/Migrations/20210722223304_AddedSeriesFormat.Designer.cs create mode 100644 API/Data/Migrations/20210722223304_AddedSeriesFormat.cs create mode 100644 API/Services/Tasks/Scanner/ParseScannedFiles.cs diff --git a/.editorconfig b/.editorconfig index 23e3b61fc..cd6d2ec06 100644 --- a/.editorconfig +++ b/.editorconfig @@ -8,9 +8,6 @@ indent_size = 4 insert_final_newline = true trim_trailing_whitespace = true -[*.cs] -indent_size = 3 - [*.ts] quote_type = single indent_size = 2 diff --git a/API.Tests/Parser/ParserTest.cs b/API.Tests/Parser/ParserTest.cs index 4a1a1babd..5857a50c9 100644 --- a/API.Tests/Parser/ParserTest.cs +++ b/API.Tests/Parser/ParserTest.cs @@ -5,7 +5,7 @@ namespace API.Tests.Parser { public class ParserTests { - + [Theory] [InlineData("Beastars - SP01", true)] [InlineData("Beastars SP01", true)] @@ -44,8 +44,8 @@ namespace API.Tests.Parser { Assert.Equal(expected, CleanTitle(input)); } - - + + // [Theory] // //[InlineData("@font-face{font-family:\"PaytoneOne\";src:url(\"..\\/Fonts\\/PaytoneOne.ttf\")}", "@font-face{font-family:\"PaytoneOne\";src:url(\"PaytoneOne.ttf\")}")] // [InlineData("@font-face{font-family:\"PaytoneOne\";src:url(\"..\\/Fonts\\/PaytoneOne.ttf\")}", "..\\/Fonts\\/PaytoneOne.ttf")] @@ -60,7 +60,7 @@ namespace API.Tests.Parser // Assert.Equal(!string.IsNullOrEmpty(expected), FontSrcUrlRegex.Match(input).Success); // } - + [Theory] [InlineData("test.cbz", true)] [InlineData("test.cbr", true)] @@ -72,10 +72,10 @@ namespace API.Tests.Parser { Assert.Equal(expected, IsArchive(input)); } - + [Theory] [InlineData("test.epub", true)] - [InlineData("test.pdf", false)] + [InlineData("test.pdf", true)] [InlineData("test.mobi", false)] [InlineData("test.djvu", false)] [InlineData("test.zip", false)] @@ -86,7 +86,7 @@ namespace API.Tests.Parser { Assert.Equal(expected, IsBook(input)); } - + [Theory] [InlineData("test.epub", true)] [InlineData("test.EPUB", true)] @@ -111,7 +111,7 @@ namespace API.Tests.Parser // { // Assert.Equal(expected, ParseEdition(input)); // } - + // [Theory] // [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)] // [InlineData("Beelzebub_Omake_June_2012_RHS", true)] @@ -124,7 +124,7 @@ namespace API.Tests.Parser // { // Assert.Equal(expected, ParseMangaSpecial(input) != ""); // } - + [Theory] [InlineData("12-14", 12)] [InlineData("24", 24)] @@ -147,8 +147,8 @@ namespace API.Tests.Parser { Assert.Equal(expected, Normalize(input)); } - - + + [Theory] [InlineData("test.jpg", true)] @@ -160,7 +160,7 @@ namespace API.Tests.Parser { Assert.Equal(expected, IsImage(filename)); } - + [Theory] [InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")] [InlineData("C:/", "C:/Love Hina/Specials/Ani-Hina Art Collection.cbz", "Love Hina")] @@ -173,10 +173,10 @@ namespace API.Tests.Parser Assert.NotNull(actual); return; } - + Assert.Equal(expectedSeries, actual.Series); } - + [Theory] [InlineData("Love Hina - Special.jpg", false)] [InlineData("folder.jpg", true)] @@ -190,7 +190,7 @@ namespace API.Tests.Parser { Assert.Equal(expected, IsCoverImage(inputPath)); } - + [Theory] [InlineData("__MACOSX/Love Hina - Special.jpg", true)] [InlineData("TEST/Love Hina - Special.jpg", false)] @@ -201,4 +201,4 @@ namespace API.Tests.Parser Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath)); } } -} \ No newline at end of file +} diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs index e8034cdca..8eb9bc60f 100644 --- a/API.Tests/Services/ScannerServiceTests.cs +++ b/API.Tests/Services/ScannerServiceTests.cs @@ -7,11 +7,13 @@ using System.Linq; using System.Threading.Tasks; using API.Data; using API.Entities; +using API.Entities.Enums; using API.Interfaces; using API.Interfaces.Services; using API.Parser; using API.Services; using API.Services.Tasks; +using API.Services.Tasks.Scanner; using API.Tests.Helpers; using AutoMapper; using Microsoft.Data.Sqlite; @@ -47,15 +49,6 @@ namespace API.Tests.Services _context = new DataContext(contextOptions); Task.Run(SeedDb).GetAwaiter().GetResult(); - - //BackgroundJob.Enqueue is what I need to mock or something (it's static...) - // ICacheService cacheService, ILogger logger, IScannerService scannerService, - // IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService, - // IBackgroundJobClient jobClient - //var taskScheduler = new TaskScheduler(Substitute.For(), Substitute.For>(), Substitute.For<) - - - // Substitute.For>() - Not needed because only for UserService IUnitOfWork unitOfWork = new UnitOfWork(_context, Substitute.For(), null); @@ -82,66 +75,64 @@ namespace API.Tests.Services return await _context.SaveChangesAsync() > 0; } - // [Fact] - // public void Test() - // { - // _scannerService.ScanLibrary(1, false); - // - // var series = _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1).Result.Series; - // } - [Fact] public void FindSeriesNotOnDisk_Should_RemoveNothing_Test() { - var infos = new Dictionary>(); + var infos = new Dictionary>(); - AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black"}); - AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1"}); - AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10"}); + AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Format = MangaFormat.Archive}); + AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1", Format = MangaFormat.Archive}); + AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10", Format = MangaFormat.Archive}); - var existingSeries = new List(); - existingSeries.Add(new Series() + var existingSeries = new List { - Name = "Cage of Eden", - LocalizedName = "Cage of Eden", - OriginalName = "Cage of Eden", - NormalizedName = API.Parser.Parser.Normalize("Cage of Eden"), - Metadata = new SeriesMetadata() - }); - existingSeries.Add(new Series() - { - Name = "Darker Than Black", - LocalizedName = "Darker Than Black", - OriginalName = "Darker Than Black", - NormalizedName = API.Parser.Parser.Normalize("Darker Than Black"), - Metadata = new SeriesMetadata() - }); + new Series() + { + Name = "Cage of Eden", + LocalizedName = "Cage of Eden", + OriginalName = "Cage of Eden", + NormalizedName = API.Parser.Parser.Normalize("Cage of Eden"), + Metadata = new SeriesMetadata(), + Format = MangaFormat.Archive + }, + new Series() + { + Name = "Darker Than Black", + LocalizedName = "Darker Than Black", + OriginalName = "Darker Than Black", + NormalizedName = API.Parser.Parser.Normalize("Darker Than Black"), + Metadata = new SeriesMetadata(), + Format = MangaFormat.Archive + } + }; Assert.Empty(_scannerService.FindSeriesNotOnDisk(existingSeries, infos)); } - [Theory] - [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")] - [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")] - [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")] - [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")] - public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected) - { - var collectedSeries = new ConcurrentDictionary>(); - foreach (var seriesName in existingSeriesNames) - { - AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName}); - } - - var actualName = _scannerService.MergeName(collectedSeries, new ParserInfo() - { - Series = parsedInfoName - }); - - Assert.Equal(expected, actualName); - } + // TODO: Figure out how to do this with ParseScannedFiles + // [Theory] + // [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")] + // [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")] + // [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")] + // [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")] + // public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected) + // { + // var collectedSeries = new ConcurrentDictionary>(); + // foreach (var seriesName in existingSeriesNames) + // { + // AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName, Format = MangaFormat.Archive}); + // } + // + // var actualName = new ParseScannedFiles(_bookService, _logger).MergeName(collectedSeries, new ParserInfo() + // { + // Series = parsedInfoName, + // Format = MangaFormat.Archive + // }); + // + // Assert.Equal(expected, actualName); + // } [Fact] public void RemoveMissingSeries_Should_RemoveSeries() @@ -162,11 +153,19 @@ namespace API.Tests.Services Assert.Equal(missingSeries.Count, removeCount); } - private void AddToParsedInfo(IDictionary> collectedSeries, ParserInfo info) + private void AddToParsedInfo(IDictionary> collectedSeries, ParserInfo info) { + var existingKey = collectedSeries.Keys.FirstOrDefault(ps => + ps.Format == info.Format && ps.NormalizedName == API.Parser.Parser.Normalize(info.Series)); + existingKey ??= new ParsedSeries() + { + Format = info.Format, + Name = info.Series, + NormalizedName = API.Parser.Parser.Normalize(info.Series) + }; if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>)) { - ((ConcurrentDictionary>) collectedSeries).AddOrUpdate(info.Series, new List() {info}, (_, oldValue) => + ((ConcurrentDictionary>) collectedSeries).AddOrUpdate(existingKey, new List() {info}, (_, oldValue) => { oldValue ??= new List(); if (!oldValue.Contains(info)) @@ -179,84 +178,25 @@ namespace API.Tests.Services } else { - if (!collectedSeries.ContainsKey(info.Series)) + if (!collectedSeries.ContainsKey(existingKey)) { - collectedSeries.Add(info.Series, new List() {info}); + collectedSeries.Add(existingKey, new List() {info}); } else { - var list = collectedSeries[info.Series]; + var list = collectedSeries[existingKey]; if (!list.Contains(info)) { list.Add(info); } - collectedSeries[info.Series] = list; + collectedSeries[existingKey] = list; } } } - - - // [Fact] - // public void ExistingOrDefault_Should_BeFromLibrary() - // { - // var allSeries = new List() - // { - // new Series() {Id = 2, Name = "Darker Than Black"}, - // new Series() {Id = 3, Name = "Darker Than Black - Some Extension"}, - // new Series() {Id = 4, Name = "Akame Ga Kill"}, - // }; - // Assert.Equal(_libraryMock.Series.ElementAt(0).Id, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black").Id); - // Assert.Equal(_libraryMock.Series.ElementAt(0).Id, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker than Black").Id); - // } - // - // [Fact] - // public void ExistingOrDefault_Should_BeFromAllSeries() - // { - // var allSeries = new List() - // { - // new Series() {Id = 2, Name = "Darker Than Black"}, - // new Series() {Id = 3, Name = "Darker Than Black - Some Extension"}, - // new Series() {Id = 4, Name = "Akame Ga Kill"}, - // }; - // Assert.Equal(3, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black - Some Extension").Id); - // } - // - // [Fact] - // public void ExistingOrDefault_Should_BeNull() - // { - // var allSeries = new List() - // { - // new Series() {Id = 2, Name = "Darker Than Black"}, - // new Series() {Id = 3, Name = "Darker Than Black - Some Extension"}, - // new Series() {Id = 4, Name = "Akame Ga Kill"}, - // }; - // Assert.Null(ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Non existing series")); - // } - - [Fact] - public void Should_CreateSeries_Test() - { - // var allSeries = new List(); - // var parsedSeries = new Dictionary>(); - // - // parsedSeries.Add("Darker Than Black", new List() - // { - // new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker Than Black", Volumes = "1"}, - // new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker than Black", Volumes = "2"} - // }); - // - // _scannerService.UpsertSeries(_libraryMock, parsedSeries, allSeries); - // - // Assert.Equal(1, _libraryMock.Series.Count); - // Assert.Equal(2, _libraryMock.Series.ElementAt(0).Volumes.Count); - // _testOutputHelper.WriteLine(_libraryMock.ToString()); - Assert.True(true); - } - private static DbConnection CreateInMemoryDatabase() { var connection = new SqliteConnection("Filename=:memory:"); diff --git a/API/API.csproj b/API/API.csproj index c912a6b01..ab727275b 100644 --- a/API/API.csproj +++ b/API/API.csproj @@ -31,6 +31,7 @@ + @@ -57,6 +58,7 @@ runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/API/Controllers/.editorconfig b/API/Controllers/.editorconfig deleted file mode 100644 index e69de29bb..000000000 diff --git a/API/Controllers/BookController.cs b/API/Controllers/BookController.cs index 84f9338d7..0efa6c71e 100644 --- a/API/Controllers/BookController.cs +++ b/API/Controllers/BookController.cs @@ -45,7 +45,7 @@ namespace API.Controllers var key = BookService.CleanContentKeys(file); if (!book.Content.AllFiles.ContainsKey(key)) return BadRequest("File was not found in book"); - + var bookFile = book.Content.AllFiles[key]; var content = await bookFile.ReadContentAsBytesAsync(); @@ -62,7 +62,7 @@ namespace API.Controllers var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); using var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath); var mappings = await _bookService.CreateKeyToPageMappingAsync(book); - + var navItems = await book.GetNavigationAsync(); var chaptersList = new List(); @@ -70,9 +70,8 @@ namespace API.Controllers { if (navigationItem.NestedItems.Count > 0) { - _logger.LogDebug("Header: {Header}", navigationItem.Title); - var nestedChapters = new List(); - + var nestedChapters = new List(); + foreach (var nestedChapter in navigationItem.NestedItems) { if (nestedChapter.Link == null) continue; @@ -93,7 +92,7 @@ namespace API.Controllers { var item = new BookChapterItem() { - Title = navigationItem.Title, + Title = navigationItem.Title, Children = nestedChapters }; if (nestedChapters.Count > 0) @@ -110,7 +109,7 @@ namespace API.Controllers chaptersList.Add(new BookChapterItem() { Title = navigationItem.Title, - Page = mappings[groupKey], + Page = mappings[groupKey], Children = nestedChapters }); } @@ -123,14 +122,14 @@ namespace API.Controllers // Generate from TOC var tocPage = book.Content.Html.Keys.FirstOrDefault(k => k.ToUpper().Contains("TOC")); if (tocPage == null) return Ok(chaptersList); - + // Find all anchor tags, for each anchor we get inner text, to lower then titlecase on UI. Get href and generate page content var doc = new HtmlDocument(); var content = await book.Content.Html[tocPage].ReadContentAsync(); doc.LoadHtml(content); var anchors = doc.DocumentNode.SelectNodes("//a"); if (anchors == null) return Ok(chaptersList); - + foreach (var anchor in anchors) { if (anchor.Attributes.Contains("href")) @@ -162,11 +161,11 @@ namespace API.Controllers } } } - + } return Ok(chaptersList); } - + [HttpGet("{chapterId}/book-page")] public async Task> GetBookPage(int chapterId, [FromQuery] int page) { @@ -186,10 +185,10 @@ namespace API.Controllers { var content = await contentFileRef.ReadContentAsync(); if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) return Ok(content); - + // In more cases than not, due to this being XML not HTML, we need to escape the script tags. content = BookService.EscapeTags(content); - + doc.LoadHtml(content); var body = doc.DocumentNode.SelectSingleNode("//body"); @@ -219,7 +218,7 @@ namespace API.Controllers body.PrependChild(HtmlNode.CreateNode($"")); } } - + var styleNodes = doc.DocumentNode.SelectNodes("/html/head/link"); if (styleNodes != null) { @@ -239,7 +238,7 @@ namespace API.Controllers key = correctedKey; } - + var styleContent = await _bookService.ScopeStyles(await book.Content.Css[key].ReadContentAsync(), apiBase, book.Content.Css[key].FileName, book); body.PrependChild(HtmlNode.CreateNode($"")); } @@ -253,14 +252,14 @@ namespace API.Controllers BookService.UpdateLinks(anchor, mappings, page); } } - + var images = doc.DocumentNode.SelectNodes("//img"); if (images != null) { foreach (var image in images) { if (image.Name != "img") continue; - + // Need to do for xlink:href if (image.Attributes["src"] != null) { @@ -278,14 +277,14 @@ namespace API.Controllers } } } - + images = doc.DocumentNode.SelectNodes("//image"); if (images != null) { foreach (var image in images) { if (image.Name != "image") continue; - + if (image.Attributes["xlink:href"] != null) { var imageFile = image.Attributes["xlink:href"].Value; @@ -302,7 +301,7 @@ namespace API.Controllers } } } - + // Check if any classes on the html node (some r2l books do this) and move them to body tag for scoping var htmlNode = doc.DocumentNode.SelectSingleNode("//html"); if (htmlNode != null && htmlNode.Attributes.Contains("class")) @@ -313,9 +312,9 @@ namespace API.Controllers // I actually need the body tag itself for the classes, so i will create a div and put the body stuff there. return Ok($"
{body.InnerHtml}
"); } - - - return Ok(body.InnerHtml); + + + return Ok(body.InnerHtml); } counter++; @@ -324,4 +323,4 @@ namespace API.Controllers return BadRequest("Could not find the appropriate html for that page"); } } -} \ No newline at end of file +} diff --git a/API/DTOs/SeriesDto.cs b/API/DTOs/SeriesDto.cs index 0f8f4263c..933bf0408 100644 --- a/API/DTOs/SeriesDto.cs +++ b/API/DTOs/SeriesDto.cs @@ -1,4 +1,5 @@ using System; +using API.Entities.Enums; namespace API.DTOs { @@ -23,10 +24,11 @@ namespace API.DTOs /// Review from logged in user. Calculated at API-time. /// public string UserReview { get; set; } - + public MangaFormat Format { get; set; } + public DateTime Created { get; set; } public int LibraryId { get; set; } public string LibraryName { get; set; } } -} \ No newline at end of file +} diff --git a/API/Data/DbFactory.cs b/API/Data/DbFactory.cs index 804cd75bb..877aa7581 100644 --- a/API/Data/DbFactory.cs +++ b/API/Data/DbFactory.cs @@ -45,7 +45,7 @@ namespace API.Data { Number = specialTreatment ? "0" : Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty, Range = specialTreatment ? info.Filename : info.Chapters, - Title = (specialTreatment && info.Format == MangaFormat.Book) + Title = (specialTreatment && info.Format == MangaFormat.Epub) ? info.Title : specialTitle, Files = new List(), diff --git a/API/Data/Migrations/20210722223304_AddedSeriesFormat.Designer.cs b/API/Data/Migrations/20210722223304_AddedSeriesFormat.Designer.cs new file mode 100644 index 000000000..dff2d3868 --- /dev/null +++ b/API/Data/Migrations/20210722223304_AddedSeriesFormat.Designer.cs @@ -0,0 +1,872 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210722223304_AddedSeriesFormat")] + partial class AddedSeriesFormat + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.4"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.Property("SiteDarkMode") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookScrollId") + .HasColumnType("TEXT"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.CollectionTag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("Id", "Promoted") + .IsUnique(); + + b.ToTable("CollectionTag"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId", "Format") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.SeriesMetadata", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId") + .IsUnique(); + + b.HasIndex("Id", "SeriesId") + .IsUnique(); + + b.ToTable("SeriesMetadata"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.Property("CollectionTagsId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("CollectionTagsId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("CollectionTagSeriesMetadata"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.SeriesMetadata", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithOne("Metadata") + .HasForeignKey("API.Entities.SeriesMetadata", "SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.HasOne("API.Entities.CollectionTag", null) + .WithMany() + .HasForeignKey("CollectionTagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Metadata"); + + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210722223304_AddedSeriesFormat.cs b/API/Data/Migrations/20210722223304_AddedSeriesFormat.cs new file mode 100644 index 000000000..f236b6ec2 --- /dev/null +++ b/API/Data/Migrations/20210722223304_AddedSeriesFormat.cs @@ -0,0 +1,44 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class AddedSeriesFormat : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropIndex( + name: "IX_Series_Name_NormalizedName_LocalizedName_LibraryId", + table: "Series"); + + migrationBuilder.AddColumn( + name: "Format", + table: "Series", + type: "INTEGER", + nullable: false, + defaultValue: 2); + + migrationBuilder.CreateIndex( + name: "IX_Series_Name_NormalizedName_LocalizedName_LibraryId_Format", + table: "Series", + columns: new[] { "Name", "NormalizedName", "LocalizedName", "LibraryId", "Format" }, + unique: true); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropIndex( + name: "IX_Series_Name_NormalizedName_LocalizedName_LibraryId_Format", + table: "Series"); + + migrationBuilder.DropColumn( + name: "Format", + table: "Series"); + + migrationBuilder.CreateIndex( + name: "IX_Series_Name_NormalizedName_LocalizedName_LibraryId", + table: "Series", + columns: new[] { "Name", "NormalizedName", "LocalizedName", "LibraryId" }, + unique: true); + } + } +} diff --git a/API/Data/Migrations/DataContextModelSnapshot.cs b/API/Data/Migrations/DataContextModelSnapshot.cs index c6d49fc2a..ebf940768 100644 --- a/API/Data/Migrations/DataContextModelSnapshot.cs +++ b/API/Data/Migrations/DataContextModelSnapshot.cs @@ -412,6 +412,9 @@ namespace API.Data.Migrations b.Property("Created") .HasColumnType("TEXT"); + b.Property("Format") + .HasColumnType("INTEGER"); + b.Property("LastModified") .HasColumnType("TEXT"); @@ -443,7 +446,7 @@ namespace API.Data.Migrations b.HasIndex("LibraryId"); - b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId", "Format") .IsUnique(); b.ToTable("Series"); diff --git a/API/Entities/Chapter.cs b/API/Entities/Chapter.cs index 31f4dc513..39ce68477 100644 --- a/API/Entities/Chapter.cs +++ b/API/Entities/Chapter.cs @@ -49,7 +49,7 @@ namespace API.Entities { Number = "0"; } - Title = (IsSpecial && info.Format == MangaFormat.Book) + Title = (IsSpecial && info.Format == MangaFormat.Epub) ? info.Title : Range; diff --git a/API/Entities/Enums/LibraryType.cs b/API/Entities/Enums/LibraryType.cs index baaceb9ca..23bb8df25 100644 --- a/API/Entities/Enums/LibraryType.cs +++ b/API/Entities/Enums/LibraryType.cs @@ -10,9 +10,5 @@ namespace API.Entities.Enums Comic = 1, [Description("Book")] Book = 2, - [Description("Images (Manga)")] - MangaImages = 3, - [Description("Images (Comic)")] - ComicImages = 4 } } diff --git a/API/Entities/Enums/MangaFormat.cs b/API/Entities/Enums/MangaFormat.cs index 121aa3e1c..3688449fd 100644 --- a/API/Entities/Enums/MangaFormat.cs +++ b/API/Entities/Enums/MangaFormat.cs @@ -10,7 +10,9 @@ namespace API.Entities.Enums Archive = 1, [Description("Unknown")] Unknown = 2, - [Description("Book")] - Book = 3 + [Description("EPUB")] + Epub = 3, + [Description("PDF")] + Pdf = 4 } -} \ No newline at end of file +} diff --git a/API/Entities/MangaFile.cs b/API/Entities/MangaFile.cs index 2efb76bfb..85aa4d5b1 100644 --- a/API/Entities/MangaFile.cs +++ b/API/Entities/MangaFile.cs @@ -2,7 +2,6 @@ using System; using System.IO; using API.Entities.Enums; -using API.Extensions; namespace API.Entities { @@ -26,11 +25,11 @@ namespace API.Entities // Relationship Mapping public Chapter Chapter { get; set; } public int ChapterId { get; set; } - + // Methods public bool HasFileBeenModified() { - return new FileInfo(FilePath).DoesLastWriteMatch(LastModified); + return !File.GetLastWriteTime(FilePath).Equals(LastModified); } } -} \ No newline at end of file +} diff --git a/API/Entities/Series.cs b/API/Entities/Series.cs index 4d8a48be4..cbb1bba6e 100644 --- a/API/Entities/Series.cs +++ b/API/Entities/Series.cs @@ -1,11 +1,12 @@ using System; using System.Collections.Generic; +using API.Entities.Enums; using API.Entities.Interfaces; using Microsoft.EntityFrameworkCore; namespace API.Entities { - [Index(nameof(Name), nameof(NormalizedName), nameof(LocalizedName), nameof(LibraryId), IsUnique = true)] + [Index(nameof(Name), nameof(NormalizedName), nameof(LocalizedName), nameof(LibraryId), nameof(Format), IsUnique = true)] public class Series : IEntityDate { public int Id { get; set; } @@ -22,7 +23,7 @@ namespace API.Entities /// public string SortName { get; set; } /// - /// Name in Japanese. By default, will be same as Name. + /// Name in Japanese. By default, will be same as Name. /// public string LocalizedName { get; set; } /// @@ -40,7 +41,12 @@ namespace API.Entities /// Sum of all Volume page counts /// public int Pages { get; set; } - + + /// + /// The type of all the files attached to this series + /// + public MangaFormat Format { get; set; } = MangaFormat.Unknown; + public SeriesMetadata Metadata { get; set; } // Relationships @@ -49,4 +55,4 @@ namespace API.Entities public int LibraryId { get; set; } } -} \ No newline at end of file +} diff --git a/API/Extensions/FileInfoExtensions.cs b/API/Extensions/FileInfoExtensions.cs index 82f6e663f..6730b6f05 100644 --- a/API/Extensions/FileInfoExtensions.cs +++ b/API/Extensions/FileInfoExtensions.cs @@ -9,10 +9,10 @@ namespace API.Extensions { return comparison.Equals(fileInfo.LastWriteTime); } - + public static bool IsLastWriteLessThan(this FileInfo fileInfo, DateTime comparison) { return fileInfo.LastWriteTime < comparison; } } -} \ No newline at end of file +} diff --git a/API/Extensions/ParserInfoListExtensions.cs b/API/Extensions/ParserInfoListExtensions.cs index 2043583bb..0ea098b20 100644 --- a/API/Extensions/ParserInfoListExtensions.cs +++ b/API/Extensions/ParserInfoListExtensions.cs @@ -1,6 +1,7 @@ using System.Collections.Generic; using System.Linq; using API.Entities; +using API.Entities.Enums; using API.Parser; namespace API.Extensions @@ -26,8 +27,19 @@ namespace API.Extensions /// public static bool HasInfo(this IList infos, Chapter chapter) { - return chapter.IsSpecial ? infos.Any(v => v.Filename == chapter.Range) + return chapter.IsSpecial ? infos.Any(v => v.Filename == chapter.Range) : infos.Any(v => v.Chapters == chapter.Range); } + + /// + /// Returns the MangaFormat that is common to all the files. Unknown if files are mixed (should never happen) or no infos + /// + /// + /// + public static MangaFormat GetFormat(this IList infos) + { + if (infos.Count == 0) return MangaFormat.Unknown; + return infos.DistinctBy(x => x.Format).First().Format; + } } -} \ No newline at end of file +} diff --git a/API/Extensions/SeriesExtensions.cs b/API/Extensions/SeriesExtensions.cs index 376abc525..cedeb3905 100644 --- a/API/Extensions/SeriesExtensions.cs +++ b/API/Extensions/SeriesExtensions.cs @@ -2,6 +2,8 @@ using System.Linq; using API.Entities; using API.Parser; +using API.Services; +using API.Services.Tasks.Scanner; namespace API.Extensions { @@ -19,6 +21,18 @@ namespace API.Extensions || name == series.Name || name == series.LocalizedName || name == series.OriginalName || Parser.Parser.Normalize(name) == Parser.Parser.Normalize(series.OriginalName)); } + /// + /// Checks against all the name variables of the Series if it matches anything in the list. Includes a check against the Format of the Series + /// + /// + /// + /// + public static bool NameInList(this Series series, IEnumerable list) + { + return list.Any(name => Parser.Parser.Normalize(name.Name) == series.NormalizedName || Parser.Parser.Normalize(name.Name) == Parser.Parser.Normalize(series.Name) + || name.Name == series.Name || name.Name == series.LocalizedName || name.Name == series.OriginalName || Parser.Parser.Normalize(name.Name) == Parser.Parser.Normalize(series.OriginalName) && series.Format == name.Format); + } + /// /// Checks against all the name variables of the Series if it matches the /// diff --git a/API/Extensions/VolumeListExtensions.cs b/API/Extensions/VolumeListExtensions.cs index 5b50d382f..3647cc21c 100644 --- a/API/Extensions/VolumeListExtensions.cs +++ b/API/Extensions/VolumeListExtensions.cs @@ -23,6 +23,7 @@ namespace API.Extensions /// public static Volume GetCoverImage(this IList volumes, LibraryType libraryType) { + // TODO: Refactor this to use MangaFormat Epub instead if (libraryType == LibraryType.Book) { return volumes.OrderBy(x => x.Number).FirstOrDefault(); @@ -30,9 +31,9 @@ namespace API.Extensions if (volumes.Any(x => x.Number != 0)) { - return volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); + return volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); } return volumes.OrderBy(x => x.Number).FirstOrDefault(); } } -} \ No newline at end of file +} diff --git a/API/Interfaces/Services/IBookService.cs b/API/Interfaces/Services/IBookService.cs index 63d0aa712..b3afc13a8 100644 --- a/API/Interfaces/Services/IBookService.cs +++ b/API/Interfaces/Services/IBookService.cs @@ -22,5 +22,11 @@ namespace API.Interfaces.Services Task ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book); string GetSummaryInfo(string filePath); ParserInfo ParseInfo(string filePath); + /// + /// Extracts a PDF file's pages as images to an target directory + /// + /// + /// Where the files will be extracted to. If doesn't exist, will be created. + void ExtractPdfImages(string fileFilePath, string targetDirectory); } } diff --git a/API/Interfaces/Services/IDirectoryService.cs b/API/Interfaces/Services/IDirectoryService.cs index 638d8eb71..4e6979bd5 100644 --- a/API/Interfaces/Services/IDirectoryService.cs +++ b/API/Interfaces/Services/IDirectoryService.cs @@ -27,6 +27,6 @@ namespace API.Interfaces.Services SearchOption searchOption = SearchOption.TopDirectoryOnly); void CopyFileToDirectory(string fullFilePath, string targetDirectory); - public bool CopyDirectoryToDirectory(string sourceDirName, string destDirName); + public bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "*"); } } diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index 2bc120700..9c6fccbe9 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -12,9 +12,14 @@ namespace API.Parser public const string DefaultChapter = "0"; public const string DefaultVolume = "0"; - public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|\.cb7|\.cbt"; - public const string BookFileExtensions = @"\.epub"; public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)"; + public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|\.cb7|\.cbt"; + public const string BookFileExtensions = @"\.epub|\.pdf"; + public const string MangaComicFileExtensions = ArchiveFileExtensions + "|" + ImageFileExtensions + @"|\.pdf"; + + public const string SupportedExtensions = + ArchiveFileExtensions + "|" + ImageFileExtensions + "|" + BookFileExtensions; + public static readonly Regex FontSrcUrlRegex = new Regex(@"(src:url\(.{1})" + "([^\"']*)" + @"(.{1}\))", RegexOptions.IgnoreCase | RegexOptions.Compiled); public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?[\\w\\d/\\._-]+)([\"|'];?)", RegexOptions.IgnoreCase | RegexOptions.Compiled); @@ -422,7 +427,7 @@ namespace API.Parser var fileName = Path.GetFileName(filePath); ParserInfo ret; - if (type == LibraryType.Book) + if (IsEpub(filePath)) { ret = new ParserInfo() { @@ -448,7 +453,7 @@ namespace API.Parser }; } - if (type is LibraryType.ComicImages or LibraryType.MangaImages) + if (IsImage(filePath)) { // Reset Chapters, Volumes, and Series as images are not good to parse information out of. Better to use folders. ret.Volumes = DefaultVolume; @@ -456,7 +461,7 @@ namespace API.Parser ret.Series = string.Empty; } - if (ret.Series == string.Empty || (type is LibraryType.ComicImages or LibraryType.MangaImages)) + if (ret.Series == string.Empty || IsImage(filePath)) { // Try to parse information out of each folder all the way to rootPath ParseFromFallbackFolders(filePath, rootPath, type, ref ret); @@ -511,8 +516,8 @@ namespace API.Parser var folder = fallbackFolders[i]; if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue; - var parsedVolume = (type is LibraryType.Manga or LibraryType.MangaImages) ? ParseVolume(folder) : ParseComicVolume(folder); - var parsedChapter = (type is LibraryType.Manga or LibraryType.MangaImages) ? ParseChapter(folder) : ParseComicChapter(folder); + var parsedVolume = type is LibraryType.Manga ? ParseVolume(folder) : ParseComicVolume(folder); + var parsedChapter = type is LibraryType.Manga ? ParseChapter(folder) : ParseComicChapter(folder); if (!parsedVolume.Equals(DefaultVolume) || !parsedChapter.Equals(DefaultChapter)) { @@ -548,7 +553,8 @@ namespace API.Parser { if (IsArchive(filePath)) return MangaFormat.Archive; if (IsImage(filePath)) return MangaFormat.Image; - if (IsBook(filePath)) return MangaFormat.Book; + if (IsEpub(filePath)) return MangaFormat.Epub; + if (IsPdf(filePath)) return MangaFormat.Pdf; return MangaFormat.Unknown; } @@ -936,5 +942,10 @@ namespace API.Parser { return Path.GetExtension(filePath).ToLower() == ".epub"; } + + public static bool IsPdf(string filePath) + { + return Path.GetExtension(filePath).ToLower() == ".pdf"; + } } } diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs index 36c59a789..513ff8db8 100644 --- a/API/Services/BookService.cs +++ b/API/Services/BookService.cs @@ -1,7 +1,10 @@ using System; using System.Collections.Generic; +using System.Drawing; +using System.Drawing.Imaging; using System.IO; using System.Linq; +using System.Runtime.InteropServices; using System.Text; using System.Text.RegularExpressions; using System.Threading.Tasks; @@ -9,11 +12,14 @@ using System.Web; using API.Entities.Enums; using API.Interfaces.Services; using API.Parser; +using Docnet.Core; +using Docnet.Core.Models; using ExCSS; using HtmlAgilityPack; using Microsoft.Extensions.Logging; -using NetVips; using VersOne.Epub; +using Image = NetVips.Image; +using Point = System.Drawing.Point; namespace API.Services { @@ -25,6 +31,7 @@ namespace API.Services public BookService(ILogger logger) { _logger = logger; + } private static bool HasClickableHrefPart(HtmlNode anchor) @@ -157,7 +164,8 @@ namespace API.Services public string GetSummaryInfo(string filePath) { - if (!IsValidFile(filePath)) return string.Empty; + if (!IsValidFile(filePath) || Parser.Parser.IsPdf(filePath)) return string.Empty; + try { @@ -182,18 +190,24 @@ namespace API.Services if (Parser.Parser.IsBook(filePath)) return true; - _logger.LogWarning("[BookService] Book {EpubFile} is not a valid EPUB", filePath); + _logger.LogWarning("[BookService] Book {EpubFile} is not a valid EPUB/PDF", filePath); return false; } public int GetNumberOfPages(string filePath) { - if (!IsValidFile(filePath) || !Parser.Parser.IsEpub(filePath)) return 0; + if (!IsValidFile(filePath)) return 0; try { - using var epubBook = EpubReader.OpenBook(filePath); - return epubBook.Content.Html.Count; + if (Parser.Parser.IsPdf(filePath)) + { + using var docReader = DocLib.Instance.GetDocReader(filePath, new PageDimensions(1080, 1920)); + return docReader.GetPageCount(); + } + + using var epubBook = EpubReader.OpenBook(filePath); + return epubBook.Content.Html.Count; } catch (Exception ex) { @@ -231,14 +245,16 @@ namespace API.Services /// /// Parses out Title from book. Chapters and Volumes will always be "0". If there is any exception reading book (malformed books) - /// then null is returned. + /// then null is returned. This expects only an epub file /// /// /// public ParserInfo ParseInfo(string filePath) { - try - { + if (!Parser.Parser.IsEpub(filePath)) return null; + + try + { using var epubBook = EpubReader.OpenBook(filePath); // If the epub has the following tags, we can group the books as Volumes @@ -301,9 +317,9 @@ namespace API.Services } return new ParserInfo() { - Chapters = "0", - Edition = "", - Format = MangaFormat.Book, + Chapters = Parser.Parser.DefaultChapter, + Edition = string.Empty, + Format = MangaFormat.Epub, Filename = Path.GetFileName(filePath), Title = specialName, FullFilePath = filePath, @@ -320,23 +336,63 @@ namespace API.Services return new ParserInfo() { - Chapters = "0", - Edition = "", - Format = MangaFormat.Book, + Chapters = Parser.Parser.DefaultChapter, + Edition = string.Empty, + Format = MangaFormat.Epub, Filename = Path.GetFileName(filePath), Title = epubBook.Title, FullFilePath = filePath, IsSpecial = false, Series = epubBook.Title, - Volumes = "0" + Volumes = Parser.Parser.DefaultVolume }; - } - catch (Exception ex) - { - _logger.LogWarning(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath); - } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath); + } - return null; + return null; + } + + private static void AddBytesToBitmap(Bitmap bmp, byte[] rawBytes) + { + var rect = new Rectangle(0, 0, bmp.Width, bmp.Height); + + var bmpData = bmp.LockBits(rect, ImageLockMode.WriteOnly, bmp.PixelFormat); + var pNative = bmpData.Scan0; + + Marshal.Copy(rawBytes, 0, pNative, rawBytes.Length); + bmp.UnlockBits(bmpData); + } + + public void ExtractPdfImages(string fileFilePath, string targetDirectory) + { + DirectoryService.ExistOrCreate(targetDirectory); + + using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920)); + var pages = docReader.GetPageCount(); + for (var pageNumber = 0; pageNumber < pages; pageNumber++) + { + using var pageReader = docReader.GetPageReader(pageNumber); + var rawBytes = pageReader.GetImage(); + var width = pageReader.GetPageWidth(); + var height = pageReader.GetPageHeight(); + using var doc = new Bitmap(width, height, PixelFormat.Format32bppArgb); + using var bmp = new Bitmap(width, height, PixelFormat.Format32bppArgb); + AddBytesToBitmap(bmp, rawBytes); + for (int y = 0; y < bmp.Height; y++) + { + bmp.SetPixel(bmp.Width - 1, y, bmp.GetPixel(bmp.Width - 2, y)); + } + var g = Graphics.FromImage(doc); + g.FillRegion(Brushes.White, new Region(new Rectangle(0, 0, width, height))); + g.DrawImage(bmp, new Point(0, 0)); + g.Save(); + using var stream = new MemoryStream(); + doc.Save(stream, ImageFormat.Jpeg); + File.WriteAllBytes(Path.Combine(targetDirectory, "Page-" + pageNumber + ".png"), stream.ToArray()); + } } @@ -344,6 +400,11 @@ namespace API.Services { if (!IsValidFile(fileFilePath)) return Array.Empty(); + if (Parser.Parser.IsPdf(fileFilePath)) + { + return GetPdfCoverImage(fileFilePath, createThumbnail); + } + using var epubBook = EpubReader.OpenBook(fileFilePath); @@ -374,6 +435,50 @@ namespace API.Services return Array.Empty(); } + private byte[] GetPdfCoverImage(string fileFilePath, bool createThumbnail) + { + try + { + using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920)); + if (docReader.GetPageCount() == 0) return Array.Empty(); + + using var pageReader = docReader.GetPageReader(0); + var rawBytes = pageReader.GetImage(); + var width = pageReader.GetPageWidth(); + var height = pageReader.GetPageHeight(); + using var doc = new Bitmap(width, height, PixelFormat.Format32bppArgb); + using var bmp = new Bitmap(width, height, PixelFormat.Format32bppArgb); + AddBytesToBitmap(bmp, rawBytes); + for (int y = 0; y < bmp.Height; y++) + { + bmp.SetPixel(bmp.Width - 1, y, bmp.GetPixel(bmp.Width - 2, y)); + } + var g = Graphics.FromImage(doc); + g.FillRegion(Brushes.White, new Region(new Rectangle(0, 0, width, height))); + g.DrawImage(bmp, new Point(0, 0)); + g.Save(); + using var stream = new MemoryStream(); + doc.Save(stream, ImageFormat.Jpeg); + stream.Seek(0, SeekOrigin.Begin); + + if (createThumbnail) + { + using var thumbnail = Image.ThumbnailStream(stream, MetadataService.ThumbnailWidth); + return thumbnail.WriteToBuffer(".png"); + } + + return stream.ToArray(); + } + catch (Exception ex) + { + _logger.LogWarning(ex, + "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", + fileFilePath); + } + + return Array.Empty(); + } + private static string RemoveWhiteSpaceFromStylesheets(string body) { body = Regex.Replace(body, @"[a-zA-Z]+#", "#"); diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs index bc3533481..39c9e963b 100644 --- a/API/Services/CacheService.cs +++ b/API/Services/CacheService.cs @@ -18,16 +18,18 @@ namespace API.Services private readonly IUnitOfWork _unitOfWork; private readonly IArchiveService _archiveService; private readonly IDirectoryService _directoryService; + private readonly IBookService _bookService; private readonly NumericComparer _numericComparer; public static readonly string CacheDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "cache/")); public CacheService(ILogger logger, IUnitOfWork unitOfWork, IArchiveService archiveService, - IDirectoryService directoryService) + IDirectoryService directoryService, IBookService bookService) { _logger = logger; _unitOfWork = unitOfWork; _archiveService = archiveService; _directoryService = directoryService; + _bookService = bookService; _numericComparer = new NumericComparer(); } @@ -58,7 +60,8 @@ namespace API.Services if (files.Count > 0 && files[0].Format == MangaFormat.Image) { DirectoryService.ExistOrCreate(extractPath); - _directoryService.CopyDirectoryToDirectory(Path.GetDirectoryName(files[0].FilePath), extractPath); + var pattern = (files.Count == 1) ? (@"\" + Path.GetExtension(files[0].FilePath)) : Parser.Parser.ImageFileExtensions; + _directoryService.CopyDirectoryToDirectory(Path.GetDirectoryName(files[0].FilePath), extractPath, pattern); extractDi.Flatten(); return chapter; } @@ -73,6 +76,9 @@ namespace API.Services if (file.Format == MangaFormat.Archive) { _archiveService.ExtractArchive(file.FilePath, Path.Join(extractPath, extraPath)); + } else if (file.Format == MangaFormat.Pdf) + { + _bookService.ExtractPdfImages(file.FilePath, Path.Join(extractPath, extraPath)); } } diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index cc7f2b6ca..8345b32d1 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -113,7 +113,15 @@ namespace API.Services } } - public bool CopyDirectoryToDirectory(string sourceDirName, string destDirName) + /// + /// Copies a Directory with all files and subdirectories to a target location + /// + /// + /// + /// Defaults to *, meaning all files + /// + /// + public bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "*") { if (string.IsNullOrEmpty(sourceDirName)) return false; @@ -136,7 +144,7 @@ namespace API.Services Directory.CreateDirectory(destDirName); // Get the files in the directory and copy them to the new location. - var files = dir.GetFiles(); + var files = GetFilesWithExtension(dir.FullName, searchPattern).Select(n => new FileInfo(n)); foreach (var file in files) { var tempPath = Path.Combine(destDirName, file.Name); diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs index 7d9b143b5..3be35cd86 100644 --- a/API/Services/MetadataService.cs +++ b/API/Services/MetadataService.cs @@ -16,190 +16,194 @@ namespace API.Services { public class MetadataService : IMetadataService { - private readonly IUnitOfWork _unitOfWork; - private readonly ILogger _logger; - private readonly IArchiveService _archiveService; - private readonly IBookService _bookService; - private readonly IDirectoryService _directoryService; - private readonly IImageService _imageService; - private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer(); - public static readonly int ThumbnailWidth = 320; // 153w x 230h + private readonly IUnitOfWork _unitOfWork; + private readonly ILogger _logger; + private readonly IArchiveService _archiveService; + private readonly IBookService _bookService; + private readonly IDirectoryService _directoryService; + private readonly IImageService _imageService; + private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer(); + public static readonly int ThumbnailWidth = 320; // 153w x 230h - public MetadataService(IUnitOfWork unitOfWork, ILogger logger, - IArchiveService archiveService, IBookService bookService, IDirectoryService directoryService, IImageService imageService) - { - _unitOfWork = unitOfWork; - _logger = logger; - _archiveService = archiveService; - _bookService = bookService; - _directoryService = directoryService; - _imageService = imageService; - } + public MetadataService(IUnitOfWork unitOfWork, ILogger logger, + IArchiveService archiveService, IBookService bookService, IDirectoryService directoryService, IImageService imageService) + { + _unitOfWork = unitOfWork; + _logger = logger; + _archiveService = archiveService; + _bookService = bookService; + _directoryService = directoryService; + _imageService = imageService; + } - private static bool ShouldFindCoverImage(byte[] coverImage, bool forceUpdate = false) - { - return forceUpdate || coverImage == null || !coverImage.Any(); - } + private static bool ShouldFindCoverImage(byte[] coverImage, bool forceUpdate = false) + { + return forceUpdate || coverImage == null || !coverImage.Any(); + } - private byte[] GetCoverImage(MangaFile file, bool createThumbnail = true) - { - switch (file.Format) - { - case MangaFormat.Book: - return _bookService.GetCoverImage(file.FilePath, createThumbnail); - case MangaFormat.Image: - var coverImage = _imageService.GetCoverFile(file); - return _imageService.GetCoverImage(coverImage, createThumbnail); - default: - return _archiveService.GetCoverImage(file.FilePath, createThumbnail); - } - } + private byte[] GetCoverImage(MangaFile file, bool createThumbnail = true) + { + switch (file.Format) + { + case MangaFormat.Pdf: + case MangaFormat.Epub: + return _bookService.GetCoverImage(file.FilePath, createThumbnail); + case MangaFormat.Image: + var coverImage = _imageService.GetCoverFile(file); + return _imageService.GetCoverImage(coverImage, createThumbnail); + case MangaFormat.Archive: + return _archiveService.GetCoverImage(file.FilePath, createThumbnail); + default: + return Array.Empty(); + } + } - public void UpdateMetadata(Chapter chapter, bool forceUpdate) - { - var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault(); - if (ShouldFindCoverImage(chapter.CoverImage, forceUpdate) && firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified)) - { - chapter.Files ??= new List(); - chapter.CoverImage = GetCoverImage(firstFile); - } - } + public void UpdateMetadata(Chapter chapter, bool forceUpdate) + { + var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault(); + if (ShouldFindCoverImage(chapter.CoverImage, forceUpdate) && firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified)) + { + chapter.Files ??= new List(); + chapter.CoverImage = GetCoverImage(firstFile); + } + } - public void UpdateMetadata(Volume volume, bool forceUpdate) - { - if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate)) - { - volume.Chapters ??= new List(); - var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).FirstOrDefault(); + public void UpdateMetadata(Volume volume, bool forceUpdate) + { + if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate)) + { + volume.Chapters ??= new List(); + var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).FirstOrDefault(); - // Skip calculating Cover Image (I/O) if the chapter already has it set - if (firstChapter == null || ShouldFindCoverImage(firstChapter.CoverImage)) - { - var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault(); - if (firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified)) + // Skip calculating Cover Image (I/O) if the chapter already has it set + if (firstChapter == null || ShouldFindCoverImage(firstChapter.CoverImage)) { - volume.CoverImage = GetCoverImage(firstFile); + var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault(); + if (firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified)) + { + volume.CoverImage = GetCoverImage(firstFile); + } } - } - else - { - volume.CoverImage = firstChapter.CoverImage; - } - } - } - - public void UpdateMetadata(Series series, bool forceUpdate) - { - if (series == null) return; - if (ShouldFindCoverImage(series.CoverImage, forceUpdate)) - { - series.Volumes ??= new List(); - var firstCover = series.Volumes.GetCoverImage(series.Library.Type); - byte[] coverImage = null; - if (firstCover == null && series.Volumes.Any()) - { - // If firstCover is null and one volume, the whole series is Chapters under Vol 0. - if (series.Volumes.Count == 1) + else { - coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparer) - .FirstOrDefault(c => !c.IsSpecial)?.CoverImage; + volume.CoverImage = firstChapter.CoverImage; + } + } + } + + public void UpdateMetadata(Series series, bool forceUpdate) + { + if (series == null) return; + if (ShouldFindCoverImage(series.CoverImage, forceUpdate)) + { + series.Volumes ??= new List(); + var firstCover = series.Volumes.GetCoverImage(series.Library.Type); + byte[] coverImage = null; + if (firstCover == null && series.Volumes.Any()) + { + // If firstCover is null and one volume, the whole series is Chapters under Vol 0. + if (series.Volumes.Count == 1) + { + coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparer) + .FirstOrDefault(c => !c.IsSpecial)?.CoverImage; + } + + if (coverImage == null) + { + coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparer) + .FirstOrDefault()?.CoverImage; + } + } + series.CoverImage = firstCover?.CoverImage ?? coverImage; + } + + UpdateSeriesSummary(series, forceUpdate); + } + + private void UpdateSeriesSummary(Series series, bool forceUpdate) + { + if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return; + + var isBook = series.Library.Type == LibraryType.Book; + var firstVolume = series.Volumes.FirstWithChapters(isBook); + var firstChapter = firstVolume?.Chapters.GetFirstChapterWithFiles(); + + var firstFile = firstChapter?.Files.FirstOrDefault(); + if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return; + if (Parser.Parser.IsPdf(firstFile.FilePath)) return; + + var summary = Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath); + if (string.IsNullOrEmpty(series.Summary)) + { + series.Summary = summary; + } + + firstFile.LastModified = DateTime.Now; + } + + + public void RefreshMetadata(int libraryId, bool forceUpdate = false) + { + var sw = Stopwatch.StartNew(); + var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult(); + + // TODO: See if we can break this up into multiple threads that process 20 series at a time then save so we can reduce amount of memory used + _logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name); + foreach (var series in library.Series) + { + foreach (var volume in series.Volumes) + { + foreach (var chapter in volume.Chapters) + { + UpdateMetadata(chapter, forceUpdate); + } + + UpdateMetadata(volume, forceUpdate); } - if (coverImage == null) - { - coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparer) - .FirstOrDefault()?.CoverImage; - } - } - series.CoverImage = firstCover?.CoverImage ?? coverImage; - } - - UpdateSeriesSummary(series, forceUpdate); - } - - private void UpdateSeriesSummary(Series series, bool forceUpdate) - { - if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return; - - var isBook = series.Library.Type == LibraryType.Book; - var firstVolume = series.Volumes.FirstWithChapters(isBook); - var firstChapter = firstVolume?.Chapters.GetFirstChapterWithFiles(); - - // NOTE: This suffers from code changes not taking effect due to stale data - var firstFile = firstChapter?.Files.FirstOrDefault(); - if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return; - var summary = isBook ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath); - if (string.IsNullOrEmpty(series.Summary)) - { - series.Summary = summary; - } - - firstFile.LastModified = DateTime.Now; - } + UpdateMetadata(series, forceUpdate); + _unitOfWork.SeriesRepository.Update(series); + } - public void RefreshMetadata(int libraryId, bool forceUpdate = false) - { - var sw = Stopwatch.StartNew(); - var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult(); + if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result) + { + _logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds); + } + } - // TODO: See if we can break this up into multiple threads that process 20 series at a time then save so we can reduce amount of memory used - _logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name); - foreach (var series in library.Series) - { - foreach (var volume in series.Volumes) - { + + public void RefreshMetadataForSeries(int libraryId, int seriesId) + { + var sw = Stopwatch.StartNew(); + var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult(); + + var series = library.Series.SingleOrDefault(s => s.Id == seriesId); + if (series == null) + { + _logger.LogError("Series {SeriesId} was not found on Library {LibraryName}", seriesId, libraryId); + return; + } + _logger.LogInformation("Beginning metadata refresh of {SeriesName}", series.Name); + foreach (var volume in series.Volumes) + { foreach (var chapter in volume.Chapters) { - UpdateMetadata(chapter, forceUpdate); + UpdateMetadata(chapter, true); } - UpdateMetadata(volume, forceUpdate); - } + UpdateMetadata(volume, true); + } - UpdateMetadata(series, forceUpdate); - _unitOfWork.SeriesRepository.Update(series); - } + UpdateMetadata(series, true); + _unitOfWork.SeriesRepository.Update(series); - if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result) - { - _logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds); - } - } - - - public void RefreshMetadataForSeries(int libraryId, int seriesId) - { - var sw = Stopwatch.StartNew(); - var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult(); - - var series = library.Series.SingleOrDefault(s => s.Id == seriesId); - if (series == null) - { - _logger.LogError("Series {SeriesId} was not found on Library {LibraryName}", seriesId, libraryId); - return; - } - _logger.LogInformation("Beginning metadata refresh of {SeriesName}", series.Name); - foreach (var volume in series.Volumes) - { - foreach (var chapter in volume.Chapters) - { - UpdateMetadata(chapter, true); - } - - UpdateMetadata(volume, true); - } - - UpdateMetadata(series, true); - _unitOfWork.SeriesRepository.Update(series); - - - if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result) - { - _logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds); - } - } + if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result) + { + _logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds); + } + } } } diff --git a/API/Services/Tasks/Scanner/ParseScannedFiles.cs b/API/Services/Tasks/Scanner/ParseScannedFiles.cs new file mode 100644 index 000000000..c646d1c60 --- /dev/null +++ b/API/Services/Tasks/Scanner/ParseScannedFiles.cs @@ -0,0 +1,198 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using API.Entities.Enums; +using API.Interfaces.Services; +using API.Parser; +using Microsoft.Extensions.Logging; + +namespace API.Services.Tasks.Scanner +{ + public class ParsedSeries + { + public string Name { get; init; } + public string NormalizedName { get; init; } + public MangaFormat Format { get; init; } + } + + + public class ParseScannedFiles + { + private readonly ConcurrentDictionary> _scannedSeries; + private readonly IBookService _bookService; + private readonly ILogger _logger; + + /// + /// An instance of a pipeline for processing files and returning a Map of Series -> ParserInfos. + /// Each instance is separate from other threads, allowing for no cross over. + /// + /// + /// + public ParseScannedFiles(IBookService bookService, ILogger logger) + { + _bookService = bookService; + _logger = logger; + _scannedSeries = new ConcurrentDictionary>(); + } + + /// + /// Processes files found during a library scan. + /// Populates a collection of for DB updates later. + /// + /// Path of a file + /// + /// Library type to determine parsing to perform + private void ProcessFile(string path, string rootPath, LibraryType type) + { + ParserInfo info; + + if (Parser.Parser.IsEpub(path)) + { + info = _bookService.ParseInfo(path); + } + else + { + info = Parser.Parser.Parse(path, rootPath, type); + } + + if (info == null) + { + _logger.LogWarning("[Scanner] Could not parse series from {Path}", path); + return; + } + + if (Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != Parser.Parser.DefaultVolume) + { + info = Parser.Parser.Parse(path, rootPath, type); + var info2 = _bookService.ParseInfo(path); + info.Merge(info2); + } + + TrackSeries(info); + } + + /// + /// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing. + /// This will check if the name matches an existing series name (multiple fields) + /// + /// + private void TrackSeries(ParserInfo info) + { + if (info.Series == string.Empty) return; + + // Check if normalized info.Series already exists and if so, update info to use that name instead + info.Series = MergeName(info); + + var existingKey = _scannedSeries.Keys.FirstOrDefault(ps => + ps.Format == info.Format && ps.NormalizedName == Parser.Parser.Normalize(info.Series)); + existingKey ??= new ParsedSeries() + { + Format = info.Format, + Name = info.Series, + NormalizedName = Parser.Parser.Normalize(info.Series) + }; + + + + _scannedSeries.AddOrUpdate(existingKey, new List() {info}, (_, oldValue) => + { + oldValue ??= new List(); + if (!oldValue.Contains(info)) + { + oldValue.Add(info); + } + + return oldValue; + }); + } + + /// + /// Using a normalized name from the passed ParserInfo, this checks against all found series so far and if an existing one exists with + /// same normalized name, it merges into the existing one. This is important as some manga may have a slight difference with punctuation or capitalization. + /// + /// + /// + public string MergeName(ParserInfo info) + { + var normalizedSeries = Parser.Parser.Normalize(info.Series); + _logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries); + var existingName = + _scannedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedSeries && p.Key.Format == info.Format) + .Key; + if (existingName != null && !string.IsNullOrEmpty(existingName.Name)) + { + _logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName.Name); + return existingName.Name; + } + + return info.Series; + } + + /// + /// + /// + /// Type of library. Used for selecting the correct file extensions to search for and parsing files + /// The folders to scan. By default, this should be library.Folders, however it can be overwritten to restrict folders + /// Total files scanned + /// Time it took to scan and parse files + /// + public Dictionary> ScanLibrariesForSeries(LibraryType libraryType, IEnumerable folders, out int totalFiles, + out long scanElapsedTime) + { + var sw = Stopwatch.StartNew(); + totalFiles = 0; + var searchPattern = GetLibrarySearchPattern(libraryType); + foreach (var folderPath in folders) + { + try + { + totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath, (f) => + { + try + { + ProcessFile(f, folderPath, libraryType); + } + catch (FileNotFoundException exception) + { + _logger.LogError(exception, "The file {Filename} could not be found", f); + } + }, searchPattern, _logger); + } + catch (ArgumentException ex) + { + _logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath); + } + } + + scanElapsedTime = sw.ElapsedMilliseconds; + _logger.LogInformation("Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles, + scanElapsedTime); + + return SeriesWithInfos(); + } + + /// + /// Given the Library Type, returns the regex pattern that restricts which files types will be found during a file scan. + /// + /// + /// + private static string GetLibrarySearchPattern(LibraryType libraryType) + { + return Parser.Parser.SupportedExtensions; + } + + /// + /// Returns any series where there were parsed infos + /// + /// + private Dictionary> SeriesWithInfos() + { + var filtered = _scannedSeries.Where(kvp => kvp.Value.Count > 0); + var series = filtered.ToDictionary(v => v.Key, v => v.Value); + return series; + } + } +} diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 7febcd9d3..763d3e712 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -14,6 +14,7 @@ using API.Extensions; using API.Interfaces; using API.Interfaces.Services; using API.Parser; +using API.Services.Tasks.Scanner; using Hangfire; using Microsoft.Extensions.Logging; @@ -26,7 +27,6 @@ namespace API.Services.Tasks private readonly IArchiveService _archiveService; private readonly IMetadataService _metadataService; private readonly IBookService _bookService; - private ConcurrentDictionary> _scannedSeries; private readonly NaturalSortComparer _naturalSort; public ScannerService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService, @@ -50,9 +50,8 @@ namespace API.Services.Tasks var dirs = FindHighestDirectoriesFromFiles(library, files); _logger.LogInformation("Beginning file scan on {SeriesName}", series.Name); - // TODO: We can't have a global variable if multiple scans are taking place. Refactor this. - _scannedSeries = new ConcurrentDictionary>(); - var parsedSeries = ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles, out var scanElapsedTime); + var scanner = new ParseScannedFiles(_bookService, _logger); + var parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles, out var scanElapsedTime); // If a root level folder scan occurs, then multiple series gets passed in and thus we get a unique constraint issue // Hence we clear out anything but what we selected for @@ -137,7 +136,6 @@ namespace API.Services.Tasks [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] public void ScanLibrary(int libraryId, bool forceUpdate) { - _scannedSeries = new ConcurrentDictionary>(); Library library; try { @@ -152,7 +150,9 @@ namespace API.Services.Tasks } _logger.LogInformation("Beginning file scan on {LibraryName}", library.Name); - var series = ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime); + var scanner = new ParseScannedFiles(_bookService, _logger); + var series = scanner.ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime); + foreach (var folderPath in library.Folders) { folderPath.LastScanned = DateTime.Now; @@ -188,75 +188,7 @@ namespace API.Services.Tasks _logger.LogInformation("Removed {Count} abandoned progress rows", cleanedUp); } - /// - /// - /// - /// Type of library. Used for selecting the correct file extensions to search for and parsing files - /// The folders to scan. By default, this should be library.Folders, however it can be overwritten to restrict folders - /// Total files scanned - /// Time it took to scan and parse files - /// - private Dictionary> ScanLibrariesForSeries(LibraryType libraryType, IEnumerable folders, out int totalFiles, - out long scanElapsedTime) - { - var sw = Stopwatch.StartNew(); - totalFiles = 0; - var searchPattern = GetLibrarySearchPattern(libraryType); - foreach (var folderPath in folders) - { - try - { - totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath, (f) => - { - try - { - ProcessFile(f, folderPath, libraryType); - } - catch (FileNotFoundException exception) - { - _logger.LogError(exception, "The file {Filename} could not be found", f); - } - }, searchPattern, _logger); - } - catch (ArgumentException ex) - { - _logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath); - } - } - - scanElapsedTime = sw.ElapsedMilliseconds; - _logger.LogInformation("Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles, - scanElapsedTime); - - return SeriesWithInfos(_scannedSeries); - } - - private static string GetLibrarySearchPattern(LibraryType libraryType) - { - var searchPattern = libraryType switch - { - LibraryType.Book => Parser.Parser.BookFileExtensions, - LibraryType.MangaImages or LibraryType.ComicImages => Parser.Parser.ImageFileExtensions, - _ => Parser.Parser.ArchiveFileExtensions - }; - - return searchPattern; - } - - /// - /// Returns any series where there were parsed infos - /// - /// - /// - private static Dictionary> SeriesWithInfos(IDictionary> scannedSeries) - { - var filtered = scannedSeries.Where(kvp => kvp.Value.Count > 0); - var series = filtered.ToDictionary(v => v.Key, v => v.Value); - return series; - } - - - private void UpdateLibrary(Library library, Dictionary> parsedSeries) + private void UpdateLibrary(Library library, Dictionary> parsedSeries) { if (parsedSeries == null) throw new ArgumentNullException(nameof(parsedSeries)); @@ -276,16 +208,18 @@ namespace API.Services.Tasks // Add new series that have parsedInfos foreach (var (key, infos) in parsedSeries) { - // Key is normalized already + // Key is normalized already Series existingSeries; try { - existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key); + existingSeries = library.Series.SingleOrDefault(s => + (s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName) + && (s.Format == key.Format || s.Format == MangaFormat.Unknown)); } catch (Exception e) { _logger.LogCritical(e, "There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it", key); - var duplicateSeries = library.Series.Where(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key).ToList(); + var duplicateSeries = library.Series.Where(s => s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName).ToList(); foreach (var series in duplicateSeries) { _logger.LogCritical("{Key} maps with {Series}", key, series.OriginalName); @@ -296,12 +230,14 @@ namespace API.Services.Tasks if (existingSeries == null) { existingSeries = DbFactory.Series(infos[0].Series); + existingSeries.Format = key.Format; library.Series.Add(existingSeries); } existingSeries.NormalizedName = Parser.Parser.Normalize(existingSeries.Name); existingSeries.OriginalName ??= infos[0].Series; existingSeries.Metadata ??= DbFactory.SeriesMetadata(new List()); + existingSeries.Format = key.Format; } // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series @@ -311,7 +247,7 @@ namespace API.Services.Tasks try { _logger.LogInformation("Processing series {SeriesName}", series.OriginalName); - UpdateVolumes(series, parsedSeries[Parser.Parser.Normalize(series.OriginalName)].ToArray()); + UpdateVolumes(series, GetInfosByName(parsedSeries, series).ToArray()); series.Pages = series.Volumes.Sum(v => v.Pages); } catch (Exception ex) @@ -321,9 +257,24 @@ namespace API.Services.Tasks }); } - public IEnumerable FindSeriesNotOnDisk(ICollection existingSeries, Dictionary> parsedSeries) + private static IList GetInfosByName(Dictionary> parsedSeries, Series series) { - var foundSeries = parsedSeries.Select(s => s.Key).ToList(); + // TODO: Move this into a common place + var existingKey = parsedSeries.Keys.FirstOrDefault(ps => + ps.Format == series.Format && ps.NormalizedName == Parser.Parser.Normalize(series.OriginalName)); + existingKey ??= new ParsedSeries() + { + Format = series.Format, + Name = series.OriginalName, + NormalizedName = Parser.Parser.Normalize(series.OriginalName) + }; + + return parsedSeries[existingKey]; + } + + public IEnumerable FindSeriesNotOnDisk(ICollection existingSeries, Dictionary> parsedSeries) + { + var foundSeries = parsedSeries.Select(s => s.Key.Name).ToList(); return existingSeries.Where(es => !es.NameInList(foundSeries)); } @@ -364,8 +315,6 @@ namespace API.Services.Tasks series.Volumes.Add(volume); } - // NOTE: Instead of creating and adding? Why Not Merge a new volume into an existing, so no matter what, new properties,etc get propagated? - _logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name); var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray(); UpdateChapters(volume, infos); @@ -473,88 +422,6 @@ namespace API.Services.Tasks } } - /// - /// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing. - /// This will check if the name matches an existing series name (multiple fields) - /// - /// - private void TrackSeries(ParserInfo info) - { - if (info.Series == string.Empty) return; - - // Check if normalized info.Series already exists and if so, update info to use that name instead - info.Series = MergeName(_scannedSeries, info); - - _scannedSeries.AddOrUpdate(Parser.Parser.Normalize(info.Series), new List() {info}, (_, oldValue) => - { - oldValue ??= new List(); - if (!oldValue.Contains(info)) - { - oldValue.Add(info); - } - - return oldValue; - }); - } - - /// - /// Using a normalized name from the passed ParserInfo, this checks against all found series so far and if an existing one exists with - /// same normalized name, it merges into the existing one. This is important as some manga may have a slight difference with punctuation or capitalization. - /// - /// - /// - /// - public string MergeName(ConcurrentDictionary> collectedSeries, ParserInfo info) - { - var normalizedSeries = Parser.Parser.Normalize(info.Series); - _logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries); - var existingName = collectedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries) - .Key; - if (!string.IsNullOrEmpty(existingName)) - { - _logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName); - return existingName; - } - - return info.Series; - } - - /// - /// Processes files found during a library scan. - /// Populates a collection of for DB updates later. - /// - /// Path of a file - /// - /// Library type to determine parsing to perform - private void ProcessFile(string path, string rootPath, LibraryType type) - { - ParserInfo info; - - if (type == LibraryType.Book && Parser.Parser.IsEpub(path)) - { - info = _bookService.ParseInfo(path); - } - else - { - info = Parser.Parser.Parse(path, rootPath, type); - } - - if (info == null) - { - _logger.LogWarning("[Scanner] Could not parse series from {Path}", path); - return; - } - - if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != Parser.Parser.DefaultVolume) - { - info = Parser.Parser.Parse(path, rootPath, type); - var info2 = _bookService.ParseInfo(path); - info.Merge(info2); - } - - TrackSeries(info); - } - private MangaFile CreateMangaFile(ParserInfo info) { switch (info.Format) @@ -568,7 +435,8 @@ namespace API.Services.Tasks Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath) }; } - case MangaFormat.Book: + case MangaFormat.Pdf: + case MangaFormat.Epub: { return new MangaFile() { @@ -601,9 +469,9 @@ namespace API.Services.Tasks if (existingFile != null) { existingFile.Format = info.Format; - if (!existingFile.HasFileBeenModified() && existingFile.Pages > 0) + if (existingFile.HasFileBeenModified() || existingFile.Pages == 0) { - existingFile.Pages = existingFile.Format == MangaFormat.Book + existingFile.Pages = (existingFile.Format == MangaFormat.Epub || existingFile.Format == MangaFormat.Pdf) ? _bookService.GetNumberOfPages(info.FullFilePath) : _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); } diff --git a/API/Startup.cs b/API/Startup.cs index 61a104c1c..f5cbfc734 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -7,7 +7,6 @@ using API.Services; using API.Services.HostedServices; using Hangfire; using Hangfire.MemoryStorage; -using Kavita.Common; using Kavita.Common.EnvironmentInfo; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; diff --git a/UI/Web/src/app/_modals/edit-series-modal/edit-series-modal.component.html b/UI/Web/src/app/_modals/edit-series-modal/edit-series-modal.component.html index fe11d2451..a9c2e559c 100644 --- a/UI/Web/src/app/_modals/edit-series-modal/edit-series-modal.component.html +++ b/UI/Web/src/app/_modals/edit-series-modal/edit-series-modal.component.html @@ -99,6 +99,7 @@

Information

Library: {{libraryName | titlecase}}
+
Format: {{utilityService.mangaFormat(series.format)}}

Volumes

diff --git a/UI/Web/src/app/_models/library.ts b/UI/Web/src/app/_models/library.ts index f4998747a..5b9f8ec28 100644 --- a/UI/Web/src/app/_models/library.ts +++ b/UI/Web/src/app/_models/library.ts @@ -2,8 +2,6 @@ export enum LibraryType { Manga = 0, Comic = 1, Book = 2, - MangaImages = 3, - ComicImages = 4 } export interface Library { diff --git a/UI/Web/src/app/_models/manga-format.ts b/UI/Web/src/app/_models/manga-format.ts index 21fd7d450..7f992314a 100644 --- a/UI/Web/src/app/_models/manga-format.ts +++ b/UI/Web/src/app/_models/manga-format.ts @@ -2,5 +2,6 @@ export enum MangaFormat { IMAGE = 0, ARCHIVE = 1, UNKNOWN = 2, - BOOK = 3 + EPUB = 3, + PDF = 4 } \ No newline at end of file diff --git a/UI/Web/src/app/_models/series.ts b/UI/Web/src/app/_models/series.ts index 73b843bfe..d48839ea6 100644 --- a/UI/Web/src/app/_models/series.ts +++ b/UI/Web/src/app/_models/series.ts @@ -1,3 +1,4 @@ +import { MangaFormat } from './manga-format'; import { Volume } from './volume'; export interface Series { @@ -15,4 +16,5 @@ export interface Series { userReview: string; // User review libraryId: number; created: string; // DateTime when entity was created + format: MangaFormat; } diff --git a/UI/Web/src/app/admin/manage-library/manage-library.component.ts b/UI/Web/src/app/admin/manage-library/manage-library.component.ts index 3825d25c8..f3353ed7f 100644 --- a/UI/Web/src/app/admin/manage-library/manage-library.component.ts +++ b/UI/Web/src/app/admin/manage-library/manage-library.component.ts @@ -88,10 +88,6 @@ export class ManageLibraryComponent implements OnInit, OnDestroy { return 'Comic'; case LibraryType.Manga: return 'Manga'; - case LibraryType.MangaImages: - return 'Images (Manga)'; - case LibraryType.ComicImages: - return 'Images (Comic)'; } } diff --git a/UI/Web/src/app/manga-reader/manga-reader.component.html b/UI/Web/src/app/manga-reader/manga-reader.component.html index 9b44da93b..f37d8a94c 100644 --- a/UI/Web/src/app/manga-reader/manga-reader.component.html +++ b/UI/Web/src/app/manga-reader/manga-reader.component.html @@ -1,6 +1,6 @@
-
+
-

{{series?.name}}

+
+ +

+ {{series?.name}} +

+
+
+
+
Type
+
+
+  {{utilityService.mangaFormat(series.format)}} +
+
diff --git a/UI/Web/src/app/series-detail/series-detail.component.ts b/UI/Web/src/app/series-detail/series-detail.component.ts index af72632b4..fec46f46f 100644 --- a/UI/Web/src/app/series-detail/series-detail.component.ts +++ b/UI/Web/src/app/series-detail/series-detail.component.ts @@ -14,6 +14,7 @@ import { EditSeriesModalComponent } from '../_modals/edit-series-modal/edit-seri import { ReviewSeriesModalComponent } from '../_modals/review-series-modal/review-series-modal.component'; import { Chapter } from '../_models/chapter'; import { LibraryType } from '../_models/library'; +import { MangaFormat } from '../_models/manga-format'; import { Series } from '../_models/series'; import { SeriesMetadata } from '../_models/series-metadata'; import { Volume } from '../_models/volume'; @@ -70,10 +71,14 @@ export class SeriesDetailComponent implements OnInit { return LibraryType; } + get MangaFormat(): typeof MangaFormat { + return MangaFormat; + } + constructor(private route: ActivatedRoute, private seriesService: SeriesService, - ratingConfig: NgbRatingConfig, private router: Router, + private ratingConfig: NgbRatingConfig, private router: Router, private modalService: NgbModal, public readerService: ReaderService, - private utilityService: UtilityService, private toastr: ToastrService, + public utilityService: UtilityService, private toastr: ToastrService, private accountService: AccountService, public imageService: ImageService, private actionFactoryService: ActionFactoryService, private libraryService: LibraryService, private confirmService: ConfirmService, private naturalSort: NaturalSortService, @@ -330,7 +335,8 @@ export class SeriesDetailComponent implements OnInit { this.toastr.error('There are no pages. Kavita was not able to read this archive.'); return; } - if (this.libraryType === LibraryType.Book) { + + if (chapter.files.length > 0 && chapter.files[0].format === MangaFormat.EPUB) { this.router.navigate(['library', this.libraryId, 'series', this.series?.id, 'book', chapter.id]); } else { this.router.navigate(['library', this.libraryId, 'series', this.series?.id, 'manga', chapter.id]); diff --git a/UI/Web/src/app/shared/_services/utility.service.ts b/UI/Web/src/app/shared/_services/utility.service.ts index bf6eefe9c..8d67bb7b6 100644 --- a/UI/Web/src/app/shared/_services/utility.service.ts +++ b/UI/Web/src/app/shared/_services/utility.service.ts @@ -55,4 +55,34 @@ export class UtilityService { return cleaned; } + mangaFormat(format: MangaFormat): string { + switch (format) { + case MangaFormat.EPUB: + return 'EPUB'; + case MangaFormat.ARCHIVE: + return 'Archive'; + case MangaFormat.IMAGE: + return 'Image'; + case MangaFormat.PDF: + return 'PDF'; + case MangaFormat.UNKNOWN: + return 'Unknown'; + } + } + + mangaFormatIcon(format: MangaFormat): string { + switch (format) { + case MangaFormat.EPUB: + return 'fa-book'; + case MangaFormat.ARCHIVE: + return 'fa-file-archive'; + case MangaFormat.IMAGE: + return 'fa-image'; + case MangaFormat.PDF: + return 'fa-file-pdf'; + case MangaFormat.UNKNOWN: + return 'fa-question'; + } + } + } diff --git a/UI/Web/src/app/shared/card-item/card-item.component.html b/UI/Web/src/app/shared/card-item/card-item.component.html index 0fa307f33..ffb22adb0 100644 --- a/UI/Web/src/app/shared/card-item/card-item.component.html +++ b/UI/Web/src/app/shared/card-item/card-item.component.html @@ -17,7 +17,8 @@ - {{title}} + {{utilityService.mangaFormat(format)}} +  {{title}} (promoted) diff --git a/UI/Web/src/app/shared/card-item/card-item.component.ts b/UI/Web/src/app/shared/card-item/card-item.component.ts index 212fa5078..dc631681b 100644 --- a/UI/Web/src/app/shared/card-item/card-item.component.ts +++ b/UI/Web/src/app/shared/card-item/card-item.component.ts @@ -3,11 +3,13 @@ import { Subject } from 'rxjs'; import { takeUntil } from 'rxjs/operators'; import { Chapter } from 'src/app/_models/chapter'; import { CollectionTag } from 'src/app/_models/collection-tag'; +import { MangaFormat } from 'src/app/_models/manga-format'; import { Series } from 'src/app/_models/series'; import { Volume } from 'src/app/_models/volume'; import { ActionItem } from 'src/app/_services/action-factory.service'; import { ImageService } from 'src/app/_services/image.service'; import { LibraryService } from 'src/app/_services/library.service'; +import { UtilityService } from '../_services/utility.service'; @Component({ selector: 'app-card-item', @@ -28,10 +30,15 @@ export class CardItemComponent implements OnInit, OnDestroy { libraryName: string | undefined = undefined; // Library name item belongs to libraryId: number | undefined = undefined; supressArchiveWarning: boolean = false; // This will supress the cannot read archive warning when total pages is 0 + format: MangaFormat = MangaFormat.UNKNOWN; + + get MangaFormat(): typeof MangaFormat { + return MangaFormat; + } private readonly onDestroy = new Subject(); - constructor(public imageSerivce: ImageService, private libraryService: LibraryService) { + constructor(public imageSerivce: ImageService, private libraryService: LibraryService, public utilityService: UtilityService) { } ngOnInit(): void { @@ -47,6 +54,7 @@ export class CardItemComponent implements OnInit, OnDestroy { } }); } + this.format = (this.entity as Series).format; } ngOnDestroy() { diff --git a/UI/Web/src/app/shared/tag-badge/tag-badge.component.scss b/UI/Web/src/app/shared/tag-badge/tag-badge.component.scss index 6cb5f83b6..5a05bed5c 100644 --- a/UI/Web/src/app/shared/tag-badge/tag-badge.component.scss +++ b/UI/Web/src/app/shared/tag-badge/tag-badge.component.scss @@ -7,7 +7,7 @@ $bdr-color: #f2f2f2; background-color: $bg-color; transition: all .3s ease-out; margin: 3px 5px 3px 0px; - padding: 2px 15px; + padding: 2px 10px; border-radius: 6px; font-size: 14px; border: 1px solid $bdr-color; @@ -15,10 +15,8 @@ $bdr-color: #f2f2f2; cursor: default; i { - color: $primary-color; font-size: 14px; font-weight: bold; - margin-left: 10px; margin-right: 0px; cursor: pointer; } diff --git a/UI/Web/src/app/user-preferences/user-preferences.component.ts b/UI/Web/src/app/user-preferences/user-preferences.component.ts index 2f66704c2..e9c694f89 100644 --- a/UI/Web/src/app/user-preferences/user-preferences.component.ts +++ b/UI/Web/src/app/user-preferences/user-preferences.component.ts @@ -71,9 +71,9 @@ export class UserPreferencesComponent implements OnInit, OnDestroy { this.settingsForm.addControl('bookReaderLineSpacing', new FormControl(user.preferences.bookReaderLineSpacing, [])); this.settingsForm.addControl('bookReaderMargin', new FormControl(user.preferences.bookReaderMargin, [])); this.settingsForm.addControl('bookReaderReadingDirection', new FormControl(user.preferences.bookReaderReadingDirection, [])); - this.settingsForm.addControl('bookReaderTapToPaginate', new FormControl(user.preferences.siteDarkMode || false, [])); + this.settingsForm.addControl('bookReaderTapToPaginate', new FormControl(!!user.preferences.siteDarkMode, [])); - this.settingsForm.addControl('siteDarkMode', new FormControl(user.preferences.siteDarkMode || true, [])); + this.settingsForm.addControl('siteDarkMode', new FormControl(!!user.preferences.siteDarkMode, [])); } });