diff --git a/API.Tests/ParserTest.cs b/API.Tests/ParserTest.cs index 0190e6dc7..2aa0f30ff 100644 --- a/API.Tests/ParserTest.cs +++ b/API.Tests/ParserTest.cs @@ -50,6 +50,8 @@ namespace API.Tests [InlineData("VanDread-v01-c001[MD].zip", "1")] [InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")] [InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")] + [InlineData("Kodomo no Jikan vol. 1.cbz", "1")] + [InlineData("Kodomo no Jikan vol. 10.cbz", "10")] public void ParseVolumeTest(string filename, string expected) { Assert.Equal(expected, ParseVolume(filename)); @@ -178,9 +180,9 @@ namespace API.Tests [Theory] [InlineData("test.cbz", true)] - [InlineData("test.cbr", false)] + [InlineData("test.cbr", true)] [InlineData("test.zip", true)] - [InlineData("test.rar", false)] + [InlineData("test.rar", true)] [InlineData("test.rar.!qb", false)] [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.jpg", false)] public void IsArchiveTest(string input, bool expected) diff --git a/API.Tests/Services/ArchiveServiceTests.cs b/API.Tests/Services/ArchiveServiceTests.cs index 8f30a675b..e448ddf1c 100644 --- a/API.Tests/Services/ArchiveServiceTests.cs +++ b/API.Tests/Services/ArchiveServiceTests.cs @@ -1,20 +1,25 @@ -using System.IO; +using System.Diagnostics; +using System.IO; using System.IO.Compression; +using API.Archive; using API.Interfaces.Services; using API.Services; using Microsoft.Extensions.Logging; using NSubstitute; using Xunit; +using Xunit.Abstractions; namespace API.Tests.Services { public class ArchiveServiceTests { + private readonly ITestOutputHelper _testOutputHelper; private readonly IArchiveService _archiveService; private readonly ILogger _logger = Substitute.For>(); - public ArchiveServiceTests() + public ArchiveServiceTests(ITestOutputHelper testOutputHelper) { + _testOutputHelper = testOutputHelper; _archiveService = new ArchiveService(_logger); } @@ -33,8 +38,8 @@ namespace API.Tests.Services [Theory] [InlineData("non existent file.zip", false)] - [InlineData("wrong extension.rar", false)] - [InlineData("empty.zip", false)] + [InlineData("winrar.rar", true)] + [InlineData("empty.zip", true)] [InlineData("flat file.zip", true)] [InlineData("file in folder in folder.zip", true)] [InlineData("file in folder.zip", true)] @@ -47,7 +52,7 @@ namespace API.Tests.Services [Theory] [InlineData("non existent file.zip", 0)] - [InlineData("wrong extension.rar", 0)] + [InlineData("winrar.rar", 0)] [InlineData("empty.zip", 0)] [InlineData("flat file.zip", 1)] [InlineData("file in folder in folder.zip", 1)] @@ -56,18 +61,89 @@ namespace API.Tests.Services public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected) { var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + var sw = Stopwatch.StartNew(); Assert.Equal(expected, _archiveService.GetNumberOfPagesFromArchive(Path.Join(testDirectory, archivePath))); + _testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms"); } + + + [Theory] + [InlineData("non existent file.zip", ArchiveLibrary.NotSupported)] + [InlineData("winrar.rar", ArchiveLibrary.SharpCompress)] + [InlineData("empty.zip", ArchiveLibrary.Default)] + [InlineData("flat file.zip", ArchiveLibrary.Default)] + [InlineData("file in folder in folder.zip", ArchiveLibrary.Default)] + [InlineData("file in folder.zip", ArchiveLibrary.Default)] + [InlineData("file in folder_alt.zip", ArchiveLibrary.Default)] + public void CanOpenArchive(string archivePath, ArchiveLibrary expected) + { + var sw = Stopwatch.StartNew(); + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + + Assert.Equal(expected, _archiveService.CanOpen(Path.Join(testDirectory, archivePath))); + _testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms"); + } + + + [Theory] + [InlineData("non existent file.zip", 0)] + [InlineData("winrar.rar", 0)] + [InlineData("empty.zip", 0)] + [InlineData("flat file.zip", 1)] + [InlineData("file in folder in folder.zip", 1)] + [InlineData("file in folder.zip", 1)] + [InlineData("file in folder_alt.zip", 1)] + public void CanExtractArchive(string archivePath, int expectedFileCount) + { + + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + var extractDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives/Extraction"); + + DirectoryService.ClearAndDeleteDirectory(extractDirectory); + + Stopwatch sw = Stopwatch.StartNew(); + _archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory); + var di1 = new DirectoryInfo(extractDirectory); + Assert.Equal(expectedFileCount, di1.Exists ? di1.GetFiles().Length : 0); + _testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms"); + + DirectoryService.ClearAndDeleteDirectory(extractDirectory); + } + + + [Theory] [InlineData("v10.cbz", "v10.expected.jpg")] [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")] [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")] + //[InlineData("png.zip", "png.PNG")] public void GetCoverImageTest(string inputFile, string expectedOutputFile) { var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"); var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); + Stopwatch sw = Stopwatch.StartNew(); Assert.Equal(expectedBytes, _archiveService.GetCoverImage(Path.Join(testDirectory, inputFile))); + _testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms"); + } + + [Theory] + [InlineData("06_v01[DMM].zip")] + public void CanParseCoverImage(string inputFile) + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + Assert.NotEmpty(_archiveService.GetCoverImage(Path.Join(testDirectory, inputFile))); + } + + [Fact] + public void ShouldHaveComicInfo() + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); + var archive = Path.Join(testDirectory, "file in folder.zip"); + var summaryInfo = "By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?"; + + Assert.Equal(summaryInfo, _archiveService.GetSummaryInfo(archive)); + } } } \ No newline at end of file diff --git a/API.Tests/Services/CacheServiceTests.cs b/API.Tests/Services/CacheServiceTests.cs index bdb2f4b62..2072dae1f 100644 --- a/API.Tests/Services/CacheServiceTests.cs +++ b/API.Tests/Services/CacheServiceTests.cs @@ -1,15 +1,4 @@ -using System.Collections.Generic; -using System.IO; -using API.Data; -using API.Entities; -using API.Interfaces; -using API.Interfaces.Services; -using API.Services; -using Microsoft.Extensions.Logging; -using NSubstitute; -using Xunit; - -namespace API.Tests.Services +namespace API.Tests.Services { public class CacheServiceTests { @@ -70,7 +59,7 @@ namespace API.Tests.Services // // Chapter = 0, // // FilePath = archivePath, // // Format = MangaFormat.Archive, - // // NumberOfPages = 1, + // // Pages = 1, // // } // // }, // // Name = "1", diff --git a/API.Tests/Services/Test Data/ArchiveService/Archives/06_v01[DMM].zip b/API.Tests/Services/Test Data/ArchiveService/Archives/06_v01[DMM].zip new file mode 100644 index 000000000..8fa84e0ac Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Archives/06_v01[DMM].zip differ diff --git a/API.Tests/Services/Test Data/ArchiveService/Archives/winrar.rar b/API.Tests/Services/Test Data/ArchiveService/Archives/winrar.rar new file mode 100644 index 000000000..2193513af Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Archives/winrar.rar differ diff --git a/API.Tests/Services/Test Data/ArchiveService/ComicInfos/file in folder.zip b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/file in folder.zip new file mode 100644 index 000000000..feee23120 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/file in folder.zip differ diff --git a/API.Tests/Services/Test Data/ArchiveService/Thumbnails/001.jpg b/API.Tests/Services/Test Data/ArchiveService/Thumbnails/001.jpg new file mode 100644 index 000000000..575b9e556 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Thumbnails/001.jpg differ diff --git a/API/API.csproj b/API/API.csproj index 51bb3becf..b3ac07324 100644 --- a/API/API.csproj +++ b/API/API.csproj @@ -28,6 +28,7 @@ + all runtime; build; native; contentfiles; analyzers; buildtransitive diff --git a/API/Archive/ArchiveLibrary.cs b/API/Archive/ArchiveLibrary.cs new file mode 100644 index 000000000..2d05a7a55 --- /dev/null +++ b/API/Archive/ArchiveLibrary.cs @@ -0,0 +1,12 @@ +namespace API.Archive +{ + /// + /// Represents which library should handle opening this library + /// + public enum ArchiveLibrary + { + NotSupported = 0, + SharpCompress = 1, + Default = 2 + } +} \ No newline at end of file diff --git a/API/Controllers/AccountController.cs b/API/Controllers/AccountController.cs index 924242c9e..04d0bf9d5 100644 --- a/API/Controllers/AccountController.cs +++ b/API/Controllers/AccountController.cs @@ -10,7 +10,6 @@ using API.Extensions; using API.Interfaces; using API.Interfaces.Services; using AutoMapper; -using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc; using Microsoft.EntityFrameworkCore; diff --git a/API/Controllers/FallbackController.cs b/API/Controllers/FallbackController.cs index 82ed0e3f6..56962a3d6 100644 --- a/API/Controllers/FallbackController.cs +++ b/API/Controllers/FallbackController.cs @@ -6,6 +6,7 @@ namespace API.Controllers { public class FallbackController : Controller { + // ReSharper disable once S4487 private readonly ITaskScheduler _taskScheduler; public FallbackController(ITaskScheduler taskScheduler) diff --git a/API/Controllers/ImageController.cs b/API/Controllers/ImageController.cs index c117abc9e..62fbd51ae 100644 --- a/API/Controllers/ImageController.cs +++ b/API/Controllers/ImageController.cs @@ -1,28 +1,16 @@ -using System.IO; -using System.Linq; -using System.Threading.Tasks; -using API.DTOs; +using System.Threading.Tasks; using API.Extensions; using API.Interfaces; -using API.Interfaces.Services; using Microsoft.AspNetCore.Mvc; -using Microsoft.Extensions.Logging; namespace API.Controllers { public class ImageController : BaseApiController { - private readonly IDirectoryService _directoryService; - private readonly ICacheService _cacheService; - private readonly ILogger _logger; private readonly IUnitOfWork _unitOfWork; - public ImageController(IDirectoryService directoryService, ICacheService cacheService, - ILogger logger, IUnitOfWork unitOfWork) + public ImageController(IUnitOfWork unitOfWork) { - _directoryService = directoryService; - _cacheService = cacheService; - _logger = logger; _unitOfWork = unitOfWork; } diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index 4b8c776df..bc085114b 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -147,7 +147,6 @@ namespace API.Controllers [HttpPost("scan")] public ActionResult Scan(int libraryId) { - // TODO: We shouldn't queue up a job if one is already in progress _taskScheduler.ScanLibrary(libraryId); return Ok(); } diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index 21dce8411..d157980ad 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -35,7 +35,7 @@ namespace API.Controllers var chapter = await _cacheService.Ensure(chapterId); if (chapter == null) return BadRequest("There was an issue finding image file for reading"); - var (path, mangaFile) = await _cacheService.GetCachedPagePath(chapter, page); + var (path, _) = await _cacheService.GetCachedPagePath(chapter, page); if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}"); var content = await _directoryService.ReadFileAsync(path); @@ -53,7 +53,7 @@ namespace API.Controllers var chapter = await _cacheService.Ensure(chapterId); if (chapter == null) return BadRequest("There was an issue finding image file for reading"); - var (path, mangaFile) = await _cacheService.GetCachedPagePath(chapter, 0); + var (_, mangaFile) = await _cacheService.GetCachedPagePath(chapter, 0); return Ok(mangaFile.FilePath); } diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs index c1e4c9b73..e63be3eb5 100644 --- a/API/Controllers/SeriesController.cs +++ b/API/Controllers/SeriesController.cs @@ -105,9 +105,9 @@ namespace API.Controllers if (series == null) return BadRequest("Series does not exist"); - // TODO: check if new name isn't an existing series - var existingSeries = await _unitOfWork.SeriesRepository.GetSeriesByNameAsync(updateSeries.Name); // NOTE: This isnt checking library - if (existingSeries != null && existingSeries.Id != series.Id) + // TODO: Ensure we check against Library for Series Name change + var existingSeries = await _unitOfWork.SeriesRepository.GetSeriesByNameAsync(updateSeries.Name); + if (existingSeries != null && existingSeries.Id != series.Id ) { return BadRequest("A series already exists with this name. Name must be unique."); } @@ -115,8 +115,7 @@ namespace API.Controllers series.LocalizedName = updateSeries.LocalizedName; series.SortName = updateSeries.SortName; series.Summary = updateSeries.Summary; - //series.CoverImage = updateSeries.CoverImage; - + _unitOfWork.SeriesRepository.Update(series); if (await _unitOfWork.Complete()) @@ -139,16 +138,5 @@ namespace API.Controllers var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); return Ok(await _unitOfWork.SeriesRepository.GetInProgress(user.Id, libraryId, limit)); } - - [HttpGet("continue-reading")] - public async Task>> GetContinueReading(int libraryId = 0, int limit = 20) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.VolumeRepository.GetContinueReading(user.Id, libraryId, limit)); - } - - - - } } \ No newline at end of file diff --git a/API/Controllers/ServerController.cs b/API/Controllers/ServerController.cs index 36491de4a..2889e2317 100644 --- a/API/Controllers/ServerController.cs +++ b/API/Controllers/ServerController.cs @@ -5,6 +5,7 @@ using System.Threading.Tasks; using API.Extensions; using API.Interfaces; using API.Interfaces.Services; +using API.Services; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Configuration; @@ -21,17 +22,15 @@ namespace API.Controllers private readonly IConfiguration _config; private readonly IDirectoryService _directoryService; private readonly IBackupService _backupService; - private readonly ITaskScheduler _taskScheduler; public ServerController(IHostApplicationLifetime applicationLifetime, ILogger logger, IConfiguration config, - IDirectoryService directoryService, IBackupService backupService, ITaskScheduler taskScheduler) + IDirectoryService directoryService, IBackupService backupService) { _applicationLifetime = applicationLifetime; _logger = logger; _config = config; _directoryService = directoryService; _backupService = backupService; - _taskScheduler = taskScheduler; } [HttpPost("restart")] @@ -52,7 +51,7 @@ namespace API.Controllers var dateString = DateTime.Now.ToShortDateString().Replace("/", "_"); var tempLocation = Path.Join(tempDirectory, "logs_" + dateString); - _directoryService.ExistOrCreate(tempLocation); + DirectoryService.ExistOrCreate(tempLocation); if (!_directoryService.CopyFilesToDirectory(files, tempLocation)) { return BadRequest("Unable to copy files to temp directory for log download."); @@ -70,7 +69,7 @@ namespace API.Controllers } var fileBytes = await _directoryService.ReadFileAsync(zipPath); - _directoryService.ClearAndDeleteDirectory(tempLocation); + DirectoryService.ClearAndDeleteDirectory(tempLocation); (new FileInfo(zipPath)).Delete(); return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index 2e0bbbfeb..d149aa0d4 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -10,7 +10,6 @@ using API.Helpers.Converters; using API.Interfaces; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; -using Microsoft.EntityFrameworkCore.Internal; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; @@ -110,7 +109,7 @@ namespace API.Controllers [HttpGet("log-levels")] public ActionResult> GetLogLevels() { - return Ok(new string[] {"Trace", "Debug", "Information", "Warning", "Critical", "None"}); + return Ok(new [] {"Trace", "Debug", "Information", "Warning", "Critical", "None"}); } } } \ No newline at end of file diff --git a/API/DTOs/BookmarkDto.cs b/API/DTOs/BookmarkDto.cs index de7f1b6a7..e2a1c6c2d 100644 --- a/API/DTOs/BookmarkDto.cs +++ b/API/DTOs/BookmarkDto.cs @@ -1,4 +1,4 @@ -namespace API.Data +namespace API.DTOs { public class BookmarkDto { diff --git a/API/DTOs/MangaFileDto.cs b/API/DTOs/MangaFileDto.cs index d7f5d5034..786f85df7 100644 --- a/API/DTOs/MangaFileDto.cs +++ b/API/DTOs/MangaFileDto.cs @@ -5,7 +5,7 @@ namespace API.DTOs public class MangaFileDto { public string FilePath { get; init; } - public int NumberOfPages { get; init; } + public int Pages { get; init; } public MangaFormat Format { get; init; } } diff --git a/API/Data/LibraryRepository.cs b/API/Data/LibraryRepository.cs index c33c42281..9a46b44a4 100644 --- a/API/Data/LibraryRepository.cs +++ b/API/Data/LibraryRepository.cs @@ -1,5 +1,4 @@ using System; -using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Linq; diff --git a/API/Data/Migrations/20210322212724_MangaFileToPages.Designer.cs b/API/Data/Migrations/20210322212724_MangaFileToPages.Designer.cs new file mode 100644 index 000000000..f5d2d7ef9 --- /dev/null +++ b/API/Data/Migrations/20210322212724_MangaFileToPages.Designer.cs @@ -0,0 +1,733 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210322212724_MangaFileToPages")] + partial class MangaFileToPages + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.1"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("HideReadOnDetails") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210322212724_MangaFileToPages.cs b/API/Data/Migrations/20210322212724_MangaFileToPages.cs new file mode 100644 index 000000000..63fecfb72 --- /dev/null +++ b/API/Data/Migrations/20210322212724_MangaFileToPages.cs @@ -0,0 +1,23 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class MangaFileToPages : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.RenameColumn( + name: "NumberOfPages", + table: "MangaFile", + newName: "Pages"); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.RenameColumn( + name: "Pages", + table: "MangaFile", + newName: "NumberOfPages"); + } + } +} diff --git a/API/Data/Migrations/DataContextModelSnapshot.cs b/API/Data/Migrations/DataContextModelSnapshot.cs index c0fcd6f87..11dff82eb 100644 --- a/API/Data/Migrations/DataContextModelSnapshot.cs +++ b/API/Data/Migrations/DataContextModelSnapshot.cs @@ -318,7 +318,7 @@ namespace API.Data.Migrations b.Property("Format") .HasColumnType("INTEGER"); - b.Property("NumberOfPages") + b.Property("Pages") .HasColumnType("INTEGER"); b.HasKey("Id"); diff --git a/API/Data/SeriesRepository.cs b/API/Data/SeriesRepository.cs index 52be7dac7..84b904b51 100644 --- a/API/Data/SeriesRepository.cs +++ b/API/Data/SeriesRepository.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections; -using System.Collections.Generic; +using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Threading.Tasks; @@ -12,7 +10,6 @@ using API.Interfaces; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; -using Microsoft.EntityFrameworkCore.Internal; using Microsoft.Extensions.Logging; namespace API.Data @@ -207,8 +204,7 @@ namespace API.Data .Include(s => s.Volumes) .ThenInclude(v => v.Chapters) .ToListAsync(); - - // TODO: refactor this + IList chapterIds = new List(); foreach (var s in series) { @@ -306,7 +302,6 @@ namespace API.Data /// public async Task> GetInProgress(int userId, int libraryId, int limit) { - // TODO: Idea: Put Total PagesRead and as return so that we can show a progress bar for full series read progress var series = await _context.Series .Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new { diff --git a/API/Data/UnitOfWork.cs b/API/Data/UnitOfWork.cs index aa3c9ab5f..ae2f909a9 100644 --- a/API/Data/UnitOfWork.cs +++ b/API/Data/UnitOfWork.cs @@ -26,7 +26,7 @@ namespace API.Data public IUserRepository UserRepository => new UserRepository(_context, _userManager); public ILibraryRepository LibraryRepository => new LibraryRepository(_context, _mapper); - public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper, _logger); + public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper); public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper); diff --git a/API/Data/VolumeRepository.cs b/API/Data/VolumeRepository.cs index f0e183805..6b9e541ea 100644 --- a/API/Data/VolumeRepository.cs +++ b/API/Data/VolumeRepository.cs @@ -1,15 +1,12 @@ using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; -using API.Comparators; using API.DTOs; using API.Entities; -using API.Extensions; using API.Interfaces; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; -using Microsoft.Extensions.Logging; namespace API.Data { @@ -17,13 +14,11 @@ namespace API.Data { private readonly DataContext _context; private readonly IMapper _mapper; - private readonly ILogger _logger; - public VolumeRepository(DataContext context, IMapper mapper, ILogger logger) + public VolumeRepository(DataContext context, IMapper mapper) { _context = context; _mapper = mapper; - _logger = logger; } public void Update(Volume volume) @@ -89,123 +84,5 @@ namespace API.Data .AsNoTracking() .ToListAsync(); } - - /// - /// Gets the first (ordered) volume/chapter in a series where the user has progress on it. Only completed volumes/chapters, next entity shouldn't - /// have any read progress on it. - /// - /// - /// - /// - /// - public async Task> GetContinueReading(int userId, int libraryId, int limit) - { - /** TODO: Fix this SQL - * SELECT * FROM - ( - SELECT * FROM Chapter C WHERE C.VolumeId IN (SELECT Id from Volume where SeriesId = 1912) - ) C INNER JOIN AppUserProgresses AUP ON AUP.ChapterId = C.Id - INNER JOIN Series S ON AUP.SeriesId = S.Id - WHERE AUP.AppUserId = 1 AND AUP.PagesRead < C.Pages - */ - _logger.LogInformation("Get Continue Reading"); - var volumeQuery = _context.Volume - .Join(_context.AppUserProgresses, v => v.Id, aup => aup.VolumeId, (volume, progress) => new - { - volume, - progress - }) - .Where(arg => arg.volume.SeriesId == arg.progress.SeriesId && arg.progress.AppUserId == userId) - .AsNoTracking() - .Select(arg => new - { - VolumeId = arg.volume.Id, - VolumeNumber = arg.volume.Number - }); // I think doing a join on this would be better - - var volumeIds = (await volumeQuery.ToListAsync()).Select(s => s.VolumeId); - - var chapters2 = await _context.Chapter.Where(c => volumeIds.Contains(c.VolumeId)) - .Join(_context.AppUserProgresses, chapter => chapter.Id, aup => aup.ChapterId, (chapter, progress) => - new - { - chapter, - progress - }) - .Join(_context.Series, arg => arg.progress.SeriesId, s => s.Id, (arg, series) => new - { - Chapter = arg.chapter, - Progress = arg.progress, - Series = series - }) - .Where(o => o.Progress.AppUserId == userId && o.Progress.PagesRead < o.Series.Pages) - .Select(arg => new - { - Chapter = arg.Chapter, - Progress = arg.Progress, - SeriesId = arg.Series.Id, - SeriesName = arg.Series.Name, - LibraryId = arg.Series.LibraryId, - TotalPages = arg.Series.Pages - }) - .OrderByDescending(d => d.Progress.LastModified) - .Take(limit) - .ToListAsync(); - - return chapters2 - .OrderBy(c => float.Parse(c.Chapter.Number), new ChapterSortComparer()) - .DistinctBy(p => p.SeriesId) - .Select(arg => new InProgressChapterDto() - { - Id = arg.Chapter.Id, - Number = arg.Chapter.Number, - Range = arg.Chapter.Range, - SeriesId = arg.Progress.SeriesId, - SeriesName = arg.SeriesName, - LibraryId = arg.LibraryId, - Pages = arg.Chapter.Pages, - VolumeId = arg.Chapter.VolumeId - }); - - - - // var chapters = await _context.Chapter - // .Join(_context.AppUserProgresses, c => c.Id, p => p.ChapterId, - // (chapter, progress) => - // new - // { - // Chapter = chapter, - // Progress = progress - // }) - // .Join(_context.Series, arg => arg.Progress.SeriesId, series => series.Id, (arg, series) => - // new - // { - // arg.Chapter, - // arg.Progress, - // Series = series, - // VolumeIds = _context.Volume.Where(v => v.SeriesId == series.Id).Select(s => s.Id).ToList() - // }) - // .AsNoTracking() - // .Where(arg => arg.Progress.AppUserId == userId - // && arg.Progress.PagesRead < arg.Chapter.Pages - // && arg.VolumeIds.Contains(arg.Progress.VolumeId)) - // .OrderByDescending(d => d.Progress.LastModified) - // .Take(limit) - // .ToListAsync(); - - // return chapters - // .OrderBy(c => float.Parse(c.Chapter.Number), new ChapterSortComparer()) - // .DistinctBy(p => p.Series.Id) - // .Select(arg => new InProgressChapterDto() - // { - // Id = arg.Chapter.Id, - // Number = arg.Chapter.Number, - // Range = arg.Chapter.Range, - // SeriesId = arg.Progress.SeriesId, - // SeriesName = arg.Series.Name, - // LibraryId = arg.Series.LibraryId, - // Pages = arg.Chapter.Pages, - // }); - } } } \ No newline at end of file diff --git a/API/Entities/MangaFile.cs b/API/Entities/MangaFile.cs index c4471949a..1c0dd4266 100644 --- a/API/Entities/MangaFile.cs +++ b/API/Entities/MangaFile.cs @@ -13,7 +13,7 @@ namespace API.Entities /// /// Number of pages for the given file /// - public int NumberOfPages { get; set; } // TODO: Refactor this to Pages + public int Pages { get; set; } public MangaFormat Format { get; set; } // Relationship Mapping diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index a9be9e0cb..69e5a4a68 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -32,10 +32,7 @@ namespace API.Extensions services.AddDbContext(options => { - options.UseSqlite(config.GetConnectionString("DefaultConnection"), builder => - { - //builder.UseQuerySplittingBehavior(QuerySplittingBehavior.SplitQuery); - }); + options.UseSqlite(config.GetConnectionString("DefaultConnection")); }); services.AddLogging(loggingBuilder => diff --git a/API/Extensions/LeftJoinExtensions.cs b/API/Extensions/LeftJoinExtensions.cs deleted file mode 100644 index c4ea979a4..000000000 --- a/API/Extensions/LeftJoinExtensions.cs +++ /dev/null @@ -1,73 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Linq.Expressions; -using System.Reflection; - -namespace API.Extensions -{ - public static class LeftJoinExtensions -{ - public static IQueryable LeftJoin( - this IQueryable outer, - IQueryable inner, - Expression> outerKeySelector, - Expression> innerKeySelector, - Expression> resultSelector) - { - MethodInfo groupJoin = typeof (Queryable).GetMethods() - .Single(m => m.ToString() == "System.Linq.IQueryable`1[TResult] GroupJoin[TOuter,TInner,TKey,TResult](System.Linq.IQueryable`1[TOuter], System.Collections.Generic.IEnumerable`1[TInner], System.Linq.Expressions.Expression`1[System.Func`2[TOuter,TKey]], System.Linq.Expressions.Expression`1[System.Func`2[TInner,TKey]], System.Linq.Expressions.Expression`1[System.Func`3[TOuter,System.Collections.Generic.IEnumerable`1[TInner],TResult]])") - .MakeGenericMethod(typeof (TOuter), typeof (TInner), typeof (TKey), typeof (LeftJoinIntermediate)); - MethodInfo selectMany = typeof (Queryable).GetMethods() - .Single(m => m.ToString() == "System.Linq.IQueryable`1[TResult] SelectMany[TSource,TCollection,TResult](System.Linq.IQueryable`1[TSource], System.Linq.Expressions.Expression`1[System.Func`2[TSource,System.Collections.Generic.IEnumerable`1[TCollection]]], System.Linq.Expressions.Expression`1[System.Func`3[TSource,TCollection,TResult]])") - .MakeGenericMethod(typeof (LeftJoinIntermediate), typeof (TInner), typeof (TResult)); - - var groupJoinResultSelector = (Expression, LeftJoinIntermediate>>) - ((oneOuter, manyInners) => new LeftJoinIntermediate {OneOuter = oneOuter, ManyInners = manyInners}); - - MethodCallExpression exprGroupJoin = Expression.Call(groupJoin, outer.Expression, inner.Expression, outerKeySelector, innerKeySelector, groupJoinResultSelector); - - var selectManyCollectionSelector = (Expression, IEnumerable>>) - (t => t.ManyInners.DefaultIfEmpty()); - - ParameterExpression paramUser = resultSelector.Parameters.First(); - - ParameterExpression paramNew = Expression.Parameter(typeof (LeftJoinIntermediate), "t"); - MemberExpression propExpr = Expression.Property(paramNew, "OneOuter"); - - LambdaExpression selectManyResultSelector = Expression.Lambda(new Replacer(paramUser, propExpr).Visit(resultSelector.Body), paramNew, resultSelector.Parameters.Skip(1).First()); - - MethodCallExpression exprSelectMany = Expression.Call(selectMany, exprGroupJoin, selectManyCollectionSelector, selectManyResultSelector); - - return outer.Provider.CreateQuery(exprSelectMany); - } - - private class LeftJoinIntermediate - { - public TOuter OneOuter { get; set; } - public IEnumerable ManyInners { get; set; } - } - - private class Replacer : ExpressionVisitor - { - private readonly ParameterExpression _oldParam; - private readonly Expression _replacement; - - public Replacer(ParameterExpression oldParam, Expression replacement) - { - _oldParam = oldParam; - _replacement = replacement; - } - - public override Expression Visit(Expression exp) - { - if (exp == _oldParam) - { - return _replacement; - } - - return base.Visit(exp); - } - } -} -} \ No newline at end of file diff --git a/API/Interfaces/ISeriesRepository.cs b/API/Interfaces/ISeriesRepository.cs index 647469e69..34a1715dc 100644 --- a/API/Interfaces/ISeriesRepository.cs +++ b/API/Interfaces/ISeriesRepository.cs @@ -18,6 +18,7 @@ namespace API.Interfaces /// /// /// + /// /// Task> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams); diff --git a/API/Interfaces/IVolumeRepository.cs b/API/Interfaces/IVolumeRepository.cs index bec554fde..faf18abb8 100644 --- a/API/Interfaces/IVolumeRepository.cs +++ b/API/Interfaces/IVolumeRepository.cs @@ -13,6 +13,5 @@ namespace API.Interfaces Task> GetFilesForChapter(int chapterId); Task> GetChaptersAsync(int volumeId); Task GetChapterCoverImageAsync(int chapterId); - Task> GetContinueReading(int userId, int libraryId, int limit); } } \ No newline at end of file diff --git a/API/Interfaces/Services/IArchiveService.cs b/API/Interfaces/Services/IArchiveService.cs index 1c0a638db..aa5df49e2 100644 --- a/API/Interfaces/Services/IArchiveService.cs +++ b/API/Interfaces/Services/IArchiveService.cs @@ -1,16 +1,16 @@ using System.IO.Compression; -using API.Entities; +using API.Archive; namespace API.Interfaces.Services { public interface IArchiveService { - bool ArchiveNeedsFlattening(ZipArchive archive); void ExtractArchive(string archivePath, string extractPath); int GetNumberOfPagesFromArchive(string archivePath); - byte[] GetCoverImage(string filepath, bool createThumbnail = false); + byte[] GetCoverImage(string archivePath, bool createThumbnail = false); bool IsValidArchive(string archivePath); string GetSummaryInfo(string archivePath); - + ArchiveLibrary CanOpen(string archivePath); + bool ArchiveNeedsFlattening(ZipArchive archive); } } \ No newline at end of file diff --git a/API/Interfaces/Services/IDirectoryService.cs b/API/Interfaces/Services/IDirectoryService.cs index 1437df69b..f9e8345bd 100644 --- a/API/Interfaces/Services/IDirectoryService.cs +++ b/API/Interfaces/Services/IDirectoryService.cs @@ -1,7 +1,6 @@ using System.Collections.Generic; using System.IO; using System.Threading.Tasks; -using API.DTOs; namespace API.Interfaces.Services { @@ -20,27 +19,7 @@ namespace API.Interfaces.Services /// /// string[] GetFilesWithExtension(string path, string searchPatternExpression = ""); - /// - /// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases. - /// - /// - /// - bool ExistOrCreate(string directoryPath); - Task ReadFileAsync(string path); - - /// - /// Deletes all files within the directory, then the directory itself. - /// - /// - void ClearAndDeleteDirectory(string directoryPath); - /// - /// Deletes all files within the directory. - /// - /// - /// - void ClearDirectory(string directoryPath); - bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath); bool Exists(string directory); diff --git a/API/Middleware/ExceptionMiddleware.cs b/API/Middleware/ExceptionMiddleware.cs index 8c76b8644..8badfeb96 100644 --- a/API/Middleware/ExceptionMiddleware.cs +++ b/API/Middleware/ExceptionMiddleware.cs @@ -25,14 +25,13 @@ namespace API.Middleware public async Task InvokeAsync(HttpContext context) { - // BUG: I think Hangfire timeouts are triggering the middleware to hijack an API call try { await _next(context); // downstream middlewares or http call } catch (Exception ex) { - _logger.LogError(ex, ex.Message); + _logger.LogError(ex, "There was an exception"); context.Response.ContentType = "application/json"; context.Response.StatusCode = (int) HttpStatusCode.InternalServerError; diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index 27b6309c1..af0383a81 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -8,8 +8,8 @@ namespace API.Parser { public static class Parser { - public static readonly string MangaFileExtensions = @"\.cbz|\.zip"; // |\.rar|\.cbr - public static readonly string ImageFileExtensions = @"\.png|\.jpeg|\.jpg|\.gif"; + public static readonly string MangaFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|.tar.gz|.7zip"; + public static readonly string ImageFileExtensions = @"\.png|\.jpeg|\.jpg"; private static readonly string XmlRegexExtensions = @"\.xml"; private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex MangaFileRegex = new Regex(MangaFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); @@ -26,6 +26,10 @@ namespace API.Parser new Regex( @"(?.*)(\b|_)v(?\d+(-\d+)?)", RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Kodomo no Jikan vol. 10 + new Regex( + @"(?.*)(\b|_)(vol\.? ?)(?\d+(-\d+)?)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), // Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) new Regex( @"(vol\.? ?)(?0*[1-9]+)", @@ -130,7 +134,7 @@ namespace API.Parser // Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz new Regex( - @"^(?!Vol)(?.*) (?\d+(?:.\d+|-\d+)?)(?: \(\d{4}\))?", + @"^(?!Vol)(?.*) (?\d+(?:.\d+|-\d+)?)(?: \(\d{4}\))?", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Tower Of God S01 014 (CBT) (digital).cbz new Regex( @@ -399,20 +403,17 @@ namespace API.Parser public static bool IsArchive(string filePath) { - var fileInfo = new FileInfo(filePath); - return MangaFileRegex.IsMatch(fileInfo.Extension); + return MangaFileRegex.IsMatch(Path.GetExtension(filePath)); } public static bool IsImage(string filePath) { - var fileInfo = new FileInfo(filePath); - return ImageRegex.IsMatch(fileInfo.Extension); + return ImageRegex.IsMatch(Path.GetExtension(filePath)); } public static bool IsXml(string filePath) { - var fileInfo = new FileInfo(filePath); - return XmlRegex.IsMatch(fileInfo.Extension); + return XmlRegex.IsMatch(Path.GetExtension(filePath)); } public static float MinimumNumberFromRange(string range) diff --git a/API/Program.cs b/API/Program.cs index aa6c98e56..ca814beb9 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -2,8 +2,6 @@ using System; using System.Threading.Tasks; using API.Data; using API.Entities; -using API.Interfaces; -using API.Interfaces.Services; using API.Services; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Identity; diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index c54be44c2..3cda2b4a1 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -1,14 +1,18 @@ using System; +using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.IO.Compression; using System.Linq; using System.Xml.Serialization; +using API.Archive; using API.Extensions; using API.Interfaces.Services; using API.Services.Tasks; using Microsoft.Extensions.Logging; -using NetVips; +using SharpCompress.Archives; +using SharpCompress.Common; +using Image = NetVips.Image; namespace API.Services { @@ -25,18 +29,70 @@ namespace API.Services _logger = logger; } + /// + /// Checks if a File can be opened. Requires up to 2 opens of the filestream. + /// + /// + /// + public ArchiveLibrary CanOpen(string archivePath) + { + if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath)) return ArchiveLibrary.NotSupported; + + try + { + using var a2 = ZipFile.OpenRead(archivePath); + return ArchiveLibrary.Default; + } + catch (Exception) + { + try + { + using var a1 = ArchiveFactory.Open(archivePath); + return ArchiveLibrary.SharpCompress; + } + catch (Exception) + { + return ArchiveLibrary.NotSupported; + } + } + } + public int GetNumberOfPagesFromArchive(string archivePath) { - if (!IsValidArchive(archivePath)) return 0; + if (!IsValidArchive(archivePath)) + { + _logger.LogError("Archive {ArchivePath} could not be found", archivePath); + return 0; + } try { - using ZipArchive archive = ZipFile.OpenRead(archivePath); - return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName)); + var libraryHandler = CanOpen(archivePath); + switch (libraryHandler) + { + case ArchiveLibrary.Default: + { + _logger.LogDebug("Using default compression handling"); + using ZipArchive archive = ZipFile.OpenRead(archivePath); + return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName)); + } + case ArchiveLibrary.SharpCompress: + { + _logger.LogDebug("Using SharpCompress compression handling"); + using var archive = ArchiveFactory.Open(archivePath); + return archive.Entries.Count(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)); + } + case ArchiveLibrary.NotSupported: + _logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); + return 0; + default: + _logger.LogError("[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); + return 0; + } } catch (Exception ex) { - _logger.LogError(ex, "There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); + _logger.LogError(ex, "[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); return 0; } } @@ -46,58 +102,87 @@ namespace API.Services /// Given a path to a compressed file (zip, rar, cbz, cbr, etc), will ensure the first image is returned unless /// a folder.extension exists in the root directory of the compressed file. /// - /// + /// /// Create a smaller variant of file extracted from archive. Archive images are usually 1MB each. /// - public byte[] GetCoverImage(string filepath, bool createThumbnail = false) + public byte[] GetCoverImage(string archivePath, bool createThumbnail = false) { + if (archivePath == null || !IsValidArchive(archivePath)) return Array.Empty(); try { - if (!IsValidArchive(filepath)) return Array.Empty(); + var libraryHandler = CanOpen(archivePath); + switch (libraryHandler) + { + case ArchiveLibrary.Default: + { + _logger.LogDebug("Using default compression handling"); + using var archive = ZipFile.OpenRead(archivePath); + var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder"); + var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList(); + var entry = folder ?? entries[0]; - using var archive = ZipFile.OpenRead(filepath); - if (!archive.HasFiles()) return Array.Empty(); - - var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder"); - var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList(); - var entry = folder ?? entries[0]; - - return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry); + return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry); + } + case ArchiveLibrary.SharpCompress: + { + _logger.LogDebug("Using SharpCompress compression handling"); + using var archive = ArchiveFactory.Open(archivePath); + return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail); + } + case ArchiveLibrary.NotSupported: + _logger.LogError("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath); + return Array.Empty(); + default: + _logger.LogError("[GetCoverImage] There was an exception when reading archive stream: {ArchivePath}. Defaulting to no cover image", archivePath); + return Array.Empty(); + } } catch (Exception ex) { - _logger.LogError(ex, "There was an exception when reading archive stream: {Filepath}. Defaulting to no cover image", filepath); + _logger.LogError(ex, "[GetCoverImage] There was an exception when reading archive stream: {ArchivePath}. Defaulting to no cover image", archivePath); } return Array.Empty(); } - private byte[] CreateThumbnail(ZipArchiveEntry entry) + private byte[] FindCoverImage(IEnumerable entries, bool createThumbnail) { - try + var images = entries.ToList(); + foreach (var entry in images) { - using var stream = entry.Open(); - using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth); - return thumbnail.WriteToBuffer(".jpg"); // TODO: Validate this code works with .png files - } - catch (Exception ex) - { - _logger.LogError(ex, "There was a critical error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entry.FullName); + if (Path.GetFileNameWithoutExtension(entry.Key).ToLower() == "folder") + { + using var ms = new MemoryStream(); + entry.WriteTo(ms); + ms.Position = 0; + return createThumbnail ? CreateThumbnail(ms.ToArray(), Path.GetExtension(entry.Key)) : ms.ToArray(); + } } + if (images.Any()) + { + var entry = images.OrderBy(e => e.Key).FirstOrDefault(); + if (entry == null) return Array.Empty(); + using var ms = new MemoryStream(); + entry.WriteTo(ms); + ms.Position = 0; + var data = ms.ToArray(); + return createThumbnail ? CreateThumbnail(data, Path.GetExtension(entry.Key)) : data; + } + return Array.Empty(); } - + private static byte[] ConvertEntryToByteArray(ZipArchiveEntry entry) { using var stream = entry.Open(); using var ms = new MemoryStream(); - stream.CopyTo(ms); // TODO: Check if we can use CopyToAsync here + stream.CopyTo(ms); var data = ms.ToArray(); return data; } - + /// /// Given an archive stream, will assess whether directory needs to be flattened so that the extracted archive files are directly /// under extract path and not nested in subfolders. See Flatten method. @@ -108,75 +193,173 @@ namespace API.Services { // Sometimes ZipArchive will list the directory and others it will just keep it in the FullName return archive.Entries.Count > 0 && - !Path.HasExtension(archive.Entries.ElementAt(0).FullName) || - archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar)); + !Path.HasExtension(archive.Entries.ElementAt(0).FullName) || + archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar)); + } + + private byte[] CreateThumbnail(byte[] entry, string formatExtension = ".jpg") + { + if (!formatExtension.StartsWith(".")) + { + formatExtension = "." + formatExtension; + } + // TODO: Validate if jpeg is same as jpg + try + { + using var thumbnail = Image.ThumbnailBuffer(entry, ThumbnailWidth); + return thumbnail.WriteToBuffer(formatExtension); + } + catch (Exception ex) + { + _logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image"); + } + + return Array.Empty(); + } + + private byte[] CreateThumbnail(ZipArchiveEntry entry, string formatExtension = ".jpg") + { + if (!formatExtension.StartsWith(".")) + { + formatExtension = $".{formatExtension}"; + } + try + { + using var stream = entry.Open(); + using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth); + return thumbnail.WriteToBuffer(formatExtension); // TODO: Validate this code works with .png files + } + catch (Exception ex) + { + _logger.LogError(ex, "There was a critical error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entry.FullName); + } + + return Array.Empty(); } /// - /// Test if the archive path exists and there are images inside it. This will log as an error. + /// Test if the archive path exists and an archive /// /// /// public bool IsValidArchive(string archivePath) { - try + if (!File.Exists(archivePath)) { - if (!File.Exists(archivePath)) - { - _logger.LogError("Archive {ArchivePath} could not be found", archivePath); - return false; - } - - if (!Parser.Parser.IsArchive(archivePath)) - { - _logger.LogError("Archive {ArchivePath} is not a valid archive", archivePath); - return false; - } - - using var archive = ZipFile.OpenRead(archivePath); - if (archive.Entries.Any(e => Parser.Parser.IsImage(e.FullName))) return true; - _logger.LogError("Archive {ArchivePath} contains no images", archivePath); - } - catch (Exception ex) - { - _logger.LogError(ex, "Unable to validate archive ({ArchivePath}) due to problem opening archive", archivePath); + _logger.LogError("Archive {ArchivePath} could not be found", archivePath); + return false; } + + if (Parser.Parser.IsArchive(archivePath)) return true; + + _logger.LogError("Archive {ArchivePath} is not a valid archive", archivePath); return false; + } + + private static ComicInfo FindComicInfoXml(IEnumerable entries) + { + foreach (var entry in entries) + { + if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && Parser.Parser.IsXml(entry.Key)) + { + using var ms = new MemoryStream(); + entry.WriteTo(ms); + ms.Position = 0; + + + var serializer = new XmlSerializer(typeof(ComicInfo)); + var info = (ComicInfo) serializer.Deserialize(ms); + return info; + } + } + + + return null; } public string GetSummaryInfo(string archivePath) { - var summary = ""; + var summary = string.Empty; if (!IsValidArchive(archivePath)) return summary; - - using var archive = ZipFile.OpenRead(archivePath); - if (!archive.HasFiles()) return summary; - - var info = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName)); - if (info == null) return summary; - - // Parse XML file + + ComicInfo info = null; try { - using var stream = info.Open(); - var serializer = new XmlSerializer(typeof(ComicInfo)); - ComicInfo comicInfo = - (ComicInfo)serializer.Deserialize(stream); + if (!File.Exists(archivePath)) return summary; - if (comicInfo != null) + var libraryHandler = CanOpen(archivePath); + switch (libraryHandler) { - return comicInfo.Summary; + case ArchiveLibrary.Default: + { + _logger.LogDebug("Using default compression handling"); + using var archive = ZipFile.OpenRead(archivePath); + var entry = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName)); + if (entry != null) + { + using var stream = entry.Open(); + var serializer = new XmlSerializer(typeof(ComicInfo)); + info = (ComicInfo) serializer.Deserialize(stream); + } + break; + } + case ArchiveLibrary.SharpCompress: + { + _logger.LogDebug("Using SharpCompress compression handling"); + using var archive = ArchiveFactory.Open(archivePath); + info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key))); + break; + } + case ArchiveLibrary.NotSupported: + _logger.LogError("[GetSummaryInfo] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); + return summary; + default: + _logger.LogError("[GetSummaryInfo] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); + return summary; } - } - catch (AggregateException ex) - { - _logger.LogError(ex, "There was an issue parsing ComicInfo.xml from {ArchivePath}", archivePath); - } + if (info != null) + { + return info.Summary; + } + + _logger.LogError("[GetSummaryInfo] Could not parse archive file: {Filepath}", archivePath); + } + catch (Exception ex) + { + _logger.LogError(ex, "[GetSummaryInfo] There was an exception when reading archive stream: {Filepath}", archivePath); + } + return summary; } + private static void ExtractArchiveEntities(IEnumerable entries, string extractPath) + { + DirectoryService.ExistOrCreate(extractPath); + foreach (var entry in entries) + { + entry.WriteToDirectory(extractPath, new ExtractionOptions() + { + ExtractFullPath = false, + Overwrite = false + }); + } + } + + private void ExtractArchiveEntries(ZipArchive archive, string extractPath) + { + var needsFlattening = ArchiveNeedsFlattening(archive); + if (!archive.HasFiles() && !needsFlattening) return; + + archive.ExtractToDirectory(extractPath, true); + if (needsFlattening) + { + _logger.LogDebug("Extracted archive is nested in root folder, flattening..."); + new DirectoryInfo(extractPath).Flatten(); + } + } + /// /// Extracts an archive to a temp cache directory. Returns path to new directory. If temp cache directory already exists, /// will return that without performing an extraction. Returns empty string if there are any invalidations which would @@ -189,27 +372,44 @@ namespace API.Services { if (!IsValidArchive(archivePath)) return; - if (Directory.Exists(extractPath)) + if (Directory.Exists(extractPath)) return; + + var sw = Stopwatch.StartNew(); + + try { - _logger.LogDebug("Archive {ArchivePath} has already been extracted. Returning existing folder", archivePath); + var libraryHandler = CanOpen(archivePath); + switch (libraryHandler) + { + case ArchiveLibrary.Default: + { + _logger.LogDebug("Using default compression handling"); + using var archive = ZipFile.OpenRead(archivePath); + ExtractArchiveEntries(archive, extractPath); + break; + } + case ArchiveLibrary.SharpCompress: + { + _logger.LogDebug("Using SharpCompress compression handling"); + using var archive = ArchiveFactory.Open(archivePath); + ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath); + break; + } + case ArchiveLibrary.NotSupported: + _logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); + return; + default: + _logger.LogError("[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); + return; + } + + } + catch (Exception e) + { + _logger.LogError(e, "There was a problem extracting {ArchivePath} to {ExtractPath}",archivePath, extractPath); return; } - - Stopwatch sw = Stopwatch.StartNew(); - using ZipArchive archive = ZipFile.OpenRead(archivePath); - var needsFlattening = ArchiveNeedsFlattening(archive); - if (!archive.HasFiles() && !needsFlattening) return; - - archive.ExtractToDirectory(extractPath, true); _logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds); - - if (needsFlattening) - { - sw = Stopwatch.StartNew(); - _logger.LogInformation("Extracted archive is nested in root folder, flattening..."); - new DirectoryInfo(extractPath).Flatten(); - _logger.LogInformation("Flattened in {ElapsedMilliseconds} milliseconds", sw.ElapsedMilliseconds); - } } } } \ No newline at end of file diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs index 549139fe4..1a847fdf1 100644 --- a/API/Services/CacheService.cs +++ b/API/Services/CacheService.cs @@ -32,7 +32,7 @@ namespace API.Services public void EnsureCacheDirectory() { _logger.LogDebug("Checking if valid Cache directory: {CacheDirectory}", CacheDirectory); - if (!_directoryService.ExistOrCreate(CacheDirectory)) + if (!DirectoryService.ExistOrCreate(CacheDirectory)) { _logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", CacheDirectory); } @@ -106,7 +106,7 @@ namespace API.Services var chapterFiles = chapter.Files ?? await _unitOfWork.VolumeRepository.GetFilesForChapter(chapter.Id); foreach (var mangaFile in chapterFiles) { - if (page <= (mangaFile.NumberOfPages + pagesSoFar)) + if (page <= (mangaFile.Pages + pagesSoFar)) { var path = GetCachePath(chapter.Id); var files = _directoryService.GetFilesWithExtension(path, Parser.Parser.ImageFileExtensions); @@ -121,7 +121,7 @@ namespace API.Services return (files.ElementAt(page - pagesSoFar), mangaFile); } - pagesSoFar += mangaFile.NumberOfPages; + pagesSoFar += mangaFile.Pages; } return ("", null); diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index 6ae953802..7ba691bc9 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -71,7 +71,12 @@ namespace API.Services return !Directory.Exists(path) ? Array.Empty() : Directory.GetFiles(path); } - public bool ExistOrCreate(string directoryPath) + /// + /// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases. + /// + /// + /// + public static bool ExistOrCreate(string directoryPath) { var di = new DirectoryInfo(directoryPath); if (di.Exists) return true; @@ -79,16 +84,21 @@ namespace API.Services { Directory.CreateDirectory(directoryPath); } - catch (Exception ex) + catch (Exception) { - _logger.LogError(ex, "There was an issue creating directory: {Directory}", directoryPath); return false; } return true; } - public void ClearAndDeleteDirectory(string directoryPath) + /// + /// Deletes all files within the directory, then the directory itself. + /// + /// + public static void ClearAndDeleteDirectory(string directoryPath) { + if (!Directory.Exists(directoryPath)) return; + DirectoryInfo di = new DirectoryInfo(directoryPath); ClearDirectory(directoryPath); @@ -96,7 +106,12 @@ namespace API.Services di.Delete(true); } - public void ClearDirectory(string directoryPath) + /// + /// Deletes all files within the directory. + /// + /// + /// + public static void ClearDirectory(string directoryPath) { var di = new DirectoryInfo(directoryPath); if (!di.Exists) return; @@ -235,7 +250,8 @@ namespace API.Services return ++localCount; }, (c) => { - Interlocked.Add(ref fileCount, c); + // ReSharper disable once AccessToModifiedClosure + Interlocked.Add(ref fileCount, c); }); } } diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs index c762bdbf7..4c1dbc118 100644 --- a/API/Services/MetadataService.cs +++ b/API/Services/MetadataService.cs @@ -38,6 +38,7 @@ namespace API.Services } } + public void UpdateMetadata(Volume volume, bool forceUpdate) { if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate)) @@ -45,14 +46,23 @@ namespace API.Services // TODO: Create a custom sorter for Chapters so it's consistent across the application volume.Chapters ??= new List(); var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault(); + var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault(); - if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true); + // Skip calculating Cover Image (I/O) if the chapter already has it set + if (firstChapter == null || ShouldFindCoverImage(firstChapter.CoverImage)) + { + if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true); + } + else + { + volume.CoverImage = firstChapter.CoverImage; + } } } public void UpdateMetadata(Series series, bool forceUpdate) { - // TODO: this doesn't actually invoke finding a new cover. Also all these should be groupped ideally so we limit + // NOTE: this doesn't actually invoke finding a new cover. Also all these should be grouped ideally so we limit // disk I/O to one method. if (series == null) return; if (ShouldFindCoverImage(series.CoverImage, forceUpdate)) diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index 28241aaea..23e7040d0 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -1,5 +1,4 @@ using System.IO; -using System.Linq; using System.Threading.Tasks; using API.Entities.Enums; using API.Helpers.Converters; @@ -21,17 +20,13 @@ namespace API.Services private readonly IMetadataService _metadataService; private readonly IBackupService _backupService; private readonly ICleanupService _cleanupService; - private readonly IDirectoryService _directoryService; - public static BackgroundJobServer Client => new BackgroundJobServer(new BackgroundJobServerOptions() - { - WorkerCount = 1 - }); + public static BackgroundJobServer Client => new BackgroundJobServer(); public TaskScheduler(ICacheService cacheService, ILogger logger, IScannerService scannerService, IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService, - IDirectoryService directoryService, IWebHostEnvironment env) + IWebHostEnvironment env) { _cacheService = cacheService; _logger = logger; @@ -40,7 +35,6 @@ namespace API.Services _metadataService = metadataService; _backupService = backupService; _cleanupService = cleanupService; - _directoryService = directoryService; if (!env.IsDevelopment()) { @@ -58,9 +52,8 @@ namespace API.Services public void ScheduleTasks() { _logger.LogInformation("Scheduling reoccurring tasks"); - - string setting = null; - setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result.Value; + + string setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result.Value; if (setting != null) { _logger.LogDebug("Scheduling Scan Library Task for {Cron}", setting); @@ -87,7 +80,7 @@ namespace API.Services public void ScanLibrary(int libraryId, bool forceUpdate = false) { - + // TODO: We shouldn't queue up a job if one is already in progress _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate)); BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); // When we do a scan, force cache to re-unpack in case page numbers change @@ -107,7 +100,7 @@ namespace API.Services public void CleanupTemp() { var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp"); - BackgroundJob.Enqueue((() => _directoryService.ClearDirectory(tempDirectory))); + BackgroundJob.Enqueue((() => DirectoryService.ClearDirectory(tempDirectory))); } public void BackupDatabase() diff --git a/API/Services/Tasks/BackupService.cs b/API/Services/Tasks/BackupService.cs index ee8b3cf89..a5b115c3f 100644 --- a/API/Services/Tasks/BackupService.cs +++ b/API/Services/Tasks/BackupService.cs @@ -3,7 +3,6 @@ using System.Collections.Generic; using System.IO; using System.IO.Compression; using System.Linq; -using System.Threading; using System.Threading.Tasks; using API.Entities.Enums; using API.Extensions; @@ -55,7 +54,7 @@ namespace API.Services.Tasks var files = maxRollingFiles > 0 ? _directoryService.GetFiles(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log") - : new string[] {"kavita.log"}; + : new[] {"kavita.log"}; return files; } @@ -66,7 +65,7 @@ namespace API.Services.Tasks var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value; _logger.LogDebug("Backing up to {BackupDirectory}", backupDirectory); - if (!_directoryService.ExistOrCreate(backupDirectory)) + if (!DirectoryService.ExistOrCreate(backupDirectory)) { _logger.LogError("Could not write to {BackupDirectory}; aborting backup", backupDirectory); return; @@ -82,8 +81,8 @@ namespace API.Services.Tasks } var tempDirectory = Path.Join(_tempDirectory, dateString); - _directoryService.ExistOrCreate(tempDirectory); - _directoryService.ClearDirectory(tempDirectory); + DirectoryService.ExistOrCreate(tempDirectory); + DirectoryService.ClearDirectory(tempDirectory); _directoryService.CopyFilesToDirectory( _backupFiles.Select(file => Path.Join(Directory.GetCurrentDirectory(), file)).ToList(), tempDirectory); @@ -96,7 +95,7 @@ namespace API.Services.Tasks _logger.LogError(ex, "There was an issue when archiving library backup"); } - _directoryService.ClearAndDeleteDirectory(tempDirectory); + DirectoryService.ClearAndDeleteDirectory(tempDirectory); _logger.LogInformation("Database backup completed"); } diff --git a/API/Services/Tasks/CleanupService.cs b/API/Services/Tasks/CleanupService.cs index a98e33cbf..aaa7eba9b 100644 --- a/API/Services/Tasks/CleanupService.cs +++ b/API/Services/Tasks/CleanupService.cs @@ -11,14 +11,12 @@ namespace API.Services.Tasks public class CleanupService : ICleanupService { private readonly ICacheService _cacheService; - private readonly IDirectoryService _directoryService; private readonly ILogger _logger; private readonly IBackupService _backupService; - public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger logger, IBackupService backupService) + public CleanupService(ICacheService cacheService, ILogger logger, IBackupService backupService) { _cacheService = cacheService; - _directoryService = directoryService; _logger = logger; _backupService = backupService; } @@ -28,7 +26,7 @@ namespace API.Services.Tasks { _logger.LogInformation("Cleaning temp directory"); var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp"); - _directoryService.ClearDirectory(tempDirectory); + DirectoryService.ClearDirectory(tempDirectory); _logger.LogInformation("Cleaning cache directory"); _cacheService.Cleanup(); _logger.LogInformation("Cleaning old database backups"); diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 61842eac8..2c107a989 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -33,7 +33,7 @@ namespace API.Services.Tasks _metadataService = metadataService; } - //[DisableConcurrentExecution(timeoutInSeconds: 5)] + [DisableConcurrentExecution(timeoutInSeconds: 5)] [AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)] public void ScanLibraries() { @@ -64,7 +64,7 @@ namespace API.Services.Tasks _scannedSeries = null; } - //[DisableConcurrentExecution(5)] + [DisableConcurrentExecution(5)] [AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)] public void ScanLibrary(int libraryId, bool forceUpdate) { @@ -193,6 +193,7 @@ namespace API.Services.Tasks series.Pages = series.Volumes.Sum(v => v.Pages); _metadataService.UpdateMetadata(series, _forceUpdate); }); + foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now; } @@ -224,7 +225,7 @@ namespace API.Services.Tasks _logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name); UpdateChapters(volume, infos); volume.Pages = volume.Chapters.Sum(c => c.Pages); - _metadataService.UpdateMetadata(volume, _forceUpdate); + _metadataService.UpdateMetadata(volume, _forceUpdate); } @@ -284,7 +285,7 @@ namespace API.Services.Tasks AddOrUpdateFileForChapter(chapter, info); chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + ""; chapter.Range = info.Chapters; - chapter.Pages = chapter.Files.Sum(f => f.NumberOfPages); + chapter.Pages = chapter.Files.Sum(f => f.Pages); _metadataService.UpdateMetadata(chapter, _forceUpdate); } @@ -350,7 +351,7 @@ namespace API.Services.Tasks { FilePath = info.FullFilePath, Format = info.Format, - NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath) + Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath) }; } @@ -361,7 +362,7 @@ namespace API.Services.Tasks if (existingFile != null) { existingFile.Format = info.Format; - existingFile.NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); + existingFile.Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); } else {