diff --git a/API.Tests/API.Tests.csproj b/API.Tests/API.Tests.csproj index 95af52570..11597cb99 100644 --- a/API.Tests/API.Tests.csproj +++ b/API.Tests/API.Tests.csproj @@ -25,7 +25,7 @@ - + diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs index 73463be5c..eeb7ae560 100644 --- a/API.Tests/Services/ScannerServiceTests.cs +++ b/API.Tests/Services/ScannerServiceTests.cs @@ -1,7 +1,115 @@ -namespace API.Tests.Services +using System; +using System.Collections.Generic; +using System.Linq; +using API.Entities; +using API.Interfaces; +using API.Services; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; + +namespace API.Tests.Services { - public class ScannerService + public class ScannerServiceTests { + private readonly ScannerService _scannerService; + private readonly ILogger _logger = Substitute.For>(); + private readonly IUnitOfWork _unitOfWork = Substitute.For(); + private readonly IArchiveService _archiveService = Substitute.For(); + //private readonly IDirectoryService _directoryService = Substitute.For(); + private Library _libraryMock; + + public ScannerServiceTests() + { + _scannerService = new ScannerService(_unitOfWork, _logger, _archiveService); + _libraryMock = new Library() + { + Id = 1, + Name = "Manga", + Folders = new List() + { + new FolderPath() + { + Id = 1, + LastScanned = DateTime.Now, + LibraryId = 1, + Path = "E:/Manga" + } + }, + LastModified = DateTime.Now, + Series = new List() + { + new Series() + { + Id = 0, + Name = "Darker Than Black" + } + } + }; + + } + + [Fact] + public void ExistingOrDefault_Should_BeFromLibrary() + { + var allSeries = new List() + { + new Series() {Id = 2, Name = "Darker Than Black"}, + new Series() {Id = 3, Name = "Darker Than Black - Some Extension"}, + new Series() {Id = 4, Name = "Akame Ga Kill"}, + }; + Assert.Equal(_libraryMock.Series.ElementAt(0).Id, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black").Id); + } + + [Fact] + public void ExistingOrDefault_Should_BeFromAllSeries() + { + var allSeries = new List() + { + new Series() {Id = 2, Name = "Darker Than Black"}, + new Series() {Id = 3, Name = "Darker Than Black - Some Extension"}, + new Series() {Id = 4, Name = "Akame Ga Kill"}, + }; + Assert.Equal(3, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black - Some Extension").Id); + } + + [Fact] + public void ExistingOrDefault_Should_BeNull() + { + var allSeries = new List() + { + new Series() {Id = 2, Name = "Darker Than Black"}, + new Series() {Id = 3, Name = "Darker Than Black - Some Extension"}, + new Series() {Id = 4, Name = "Akame Ga Kill"}, + }; + Assert.Null(ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Non existing series")); + } + + // [Fact] + // public void ScanLibrary_Should_Skip() + // { + // + Library lib = new Library() + { + Id = 1, + Name = "Darker Than Black", + Folders = new List() + { + new FolderPath() + { + Id = 1, + LastScanned = DateTime.Now, + LibraryId = 1, + Path = "E:/Manga" + } + }, + LastModified = DateTime.Now + }; + // + // _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1).Returns(lib); + // + // _scannerService.ScanLibrary(1, false); + // } } } \ No newline at end of file diff --git a/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo.xml b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo.xml new file mode 100644 index 000000000..03aa39370 --- /dev/null +++ b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo.xml @@ -0,0 +1,13 @@ + + + v01 + BTOOOM! + https://www.comixology.com/BTOOOM/digital-comic/450184 + By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!? + Scraped metadata from Comixology [CMXDB450184] + Yen Press + Manga, Movies & TV + 194 + en + + diff --git a/API/API.csproj b/API/API.csproj index 8c3278449..008455438 100644 --- a/API/API.csproj +++ b/API/API.csproj @@ -11,6 +11,7 @@ + diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index 3ecd6bf8a..b068ffa0b 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -145,6 +145,7 @@ namespace API.Controllers [HttpPost("scan")] public ActionResult Scan(int libraryId) { + // TODO: We shouldn't queue up a job if one is already in progress _taskScheduler.ScanLibrary(libraryId); return Ok(); } diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs index 78a16f015..e5bb04fa5 100644 --- a/API/Controllers/SeriesController.cs +++ b/API/Controllers/SeriesController.cs @@ -70,14 +70,7 @@ namespace API.Controllers { return Ok(await _unitOfWork.VolumeRepository.GetChapterDtoAsync(chapterId)); } - - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("scan")] - public ActionResult Scan(int libraryId, int seriesId) - { - _taskScheduler.ScanSeries(libraryId, seriesId); - return Ok(); - } + [HttpPost("update-rating")] public async Task UpdateSeriesRating(UpdateSeriesRatingDto updateSeriesRatingDto) diff --git a/API/Data/Migrations/20210207231256_SeriesNormalizedName.Designer.cs b/API/Data/Migrations/20210207231256_SeriesNormalizedName.Designer.cs new file mode 100644 index 000000000..04c5c3d3d --- /dev/null +++ b/API/Data/Migrations/20210207231256_SeriesNormalizedName.Designer.cs @@ -0,0 +1,721 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210207231256_SeriesNormalizedName")] + partial class SeriesNormalizedName + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.1"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("HideReadOnDetails") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("NumberOfPages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210207231256_SeriesNormalizedName.cs b/API/Data/Migrations/20210207231256_SeriesNormalizedName.cs new file mode 100644 index 000000000..262583441 --- /dev/null +++ b/API/Data/Migrations/20210207231256_SeriesNormalizedName.cs @@ -0,0 +1,23 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class SeriesNormalizedName : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "NormalizedName", + table: "Series", + type: "TEXT", + nullable: true); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "NormalizedName", + table: "Series"); + } + } +} diff --git a/API/Data/Migrations/DataContextModelSnapshot.cs b/API/Data/Migrations/DataContextModelSnapshot.cs index c5066809c..0042238ea 100644 --- a/API/Data/Migrations/DataContextModelSnapshot.cs +++ b/API/Data/Migrations/DataContextModelSnapshot.cs @@ -343,6 +343,9 @@ namespace API.Data.Migrations b.Property("Name") .HasColumnType("TEXT"); + b.Property("NormalizedName") + .HasColumnType("TEXT"); + b.Property("OriginalName") .HasColumnType("TEXT"); diff --git a/API/Entities/Series.cs b/API/Entities/Series.cs index f7e5f366e..04c38f75b 100644 --- a/API/Entities/Series.cs +++ b/API/Entities/Series.cs @@ -12,6 +12,10 @@ namespace API.Entities /// public string Name { get; set; } /// + /// Used internally for name matching. + /// + public string NormalizedName { get; set; } + /// /// The name used to sort the Series. By default, will be the same as Name. /// public string SortName { get; set; } diff --git a/API/Interfaces/IScannerService.cs b/API/Interfaces/IScannerService.cs index 87274e88b..10dd9303f 100644 --- a/API/Interfaces/IScannerService.cs +++ b/API/Interfaces/IScannerService.cs @@ -11,12 +11,5 @@ void ScanLibrary(int libraryId, bool forceUpdate); void ScanLibraries(); - - /// - /// Performs a forced scan of just a series folder. - /// - /// - /// - void ScanSeries(int libraryId, int seriesId); } } \ No newline at end of file diff --git a/API/Interfaces/ITaskScheduler.cs b/API/Interfaces/ITaskScheduler.cs index f7e13d7a6..1d4186e91 100644 --- a/API/Interfaces/ITaskScheduler.cs +++ b/API/Interfaces/ITaskScheduler.cs @@ -4,6 +4,5 @@ { void ScanLibrary(int libraryId, bool forceUpdate = false); void CleanupChapters(int[] chapterIds); - void ScanSeries(int libraryId, int seriesId); } } \ No newline at end of file diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index 500605883..dcc2f313a 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -26,7 +26,7 @@ namespace API.Services public int GetNumberOfPagesFromArchive(string archivePath) { if (!IsValidArchive(archivePath)) return 0; - _logger.LogDebug($"Getting Page numbers from {archivePath}"); + //_logger.LogDebug($"Getting Page numbers from {archivePath}"); try { @@ -53,7 +53,7 @@ namespace API.Services try { if (!IsValidArchive(filepath)) return Array.Empty(); - _logger.LogDebug($"Extracting Cover image from {filepath}"); + //_logger.LogDebug($"Extracting Cover image from {filepath}"); using ZipArchive archive = ZipFile.OpenRead(filepath); if (!archive.HasFiles()) return Array.Empty(); diff --git a/API/Services/ScannerService.cs b/API/Services/ScannerService.cs index 985141250..48a8f9671 100644 --- a/API/Services/ScannerService.cs +++ b/API/Services/ScannerService.cs @@ -10,6 +10,7 @@ using API.Entities; using API.Entities.Enums; using API.Interfaces; using API.Parser; +using Hangfire; using Microsoft.Extensions.Logging; namespace API.Services @@ -20,6 +21,7 @@ namespace API.Services private readonly ILogger _logger; private readonly IArchiveService _archiveService; private ConcurrentDictionary> _scannedSeries; + private bool _forceUpdate; public ScannerService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService) { @@ -28,6 +30,7 @@ namespace API.Services _archiveService = archiveService; } + [DisableConcurrentExecution(timeoutInSeconds: 120)] public void ScanLibraries() { var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList(); @@ -37,9 +40,31 @@ namespace API.Services } } + private bool ShouldSkipFolderScan(FolderPath folder, ref int skippedFolders) + { + // NOTE: This solution isn't the best, but it has potential. We need to handle a few other cases so it works great. + return false; + + // if (/*_environment.IsProduction() && */!_forceUpdate && Directory.GetLastWriteTime(folder.Path) < folder.LastScanned) + // { + // _logger.LogDebug($"{folder.Path} hasn't been updated since last scan. Skipping."); + // skippedFolders += 1; + // return true; + // } + // + // return false; + } + + private void Cleanup() + { + _scannedSeries = null; + _forceUpdate = false; + } + + [DisableConcurrentExecution(timeoutInSeconds: 120)] public void ScanLibrary(int libraryId, bool forceUpdate) - { - + { + _forceUpdate = forceUpdate; var sw = Stopwatch.StartNew(); Library library; try @@ -60,14 +85,8 @@ namespace API.Services var skippedFolders = 0; foreach (var folderPath in library.Folders) { - // if (!forceUpdate && Directory.GetLastWriteTime(folderPath.Path) <= folderPath.LastScanned) - // { - // // NOTE: This solution isn't the best, but it has potential. We need to handle a few other cases so it works great. - // _logger.LogDebug($"{folderPath.Path} hasn't been updated since last scan. Skipping."); - // skippedFolders += 1; - // continue; - // } - + if (ShouldSkipFolderScan(folderPath, ref skippedFolders)) continue; + try { totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath.Path, (f) => { @@ -77,91 +96,110 @@ namespace API.Services } catch (FileNotFoundException exception) { - _logger.LogError(exception, "The file could not be found"); + _logger.LogError(exception, $"The file {f} could not be found"); } }); } catch (ArgumentException ex) { - _logger.LogError(ex, $"The directory '{folderPath}' does not exist"); + _logger.LogError(ex, $"The directory '{folderPath.Path}' does not exist"); } folderPath.LastScanned = DateTime.Now; } + var scanElapsedTime = sw.ElapsedMilliseconds; + _logger.LogInformation("Folders Scanned {0} files in {1} milliseconds", totalFiles, scanElapsedTime); + sw.Restart(); if (skippedFolders == library.Folders.Count) { _logger.LogInformation("All Folders were skipped due to no modifications to the directories."); _unitOfWork.LibraryRepository.Update(library); - _scannedSeries = null; _logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name); + Cleanup(); return; } + // Remove any series where there were no parsed infos var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0); var series = filtered.ToImmutableDictionary(v => v.Key, v => v.Value); - // Perform DB activities - var allSeries = UpsertSeries(libraryId, forceUpdate, series, library); - - // Remove series that are no longer on disk - RemoveSeriesNotOnDisk(allSeries, series, library); - - //foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now; + UpdateLibrary(libraryId, series, library); _unitOfWork.LibraryRepository.Update(library); if (Task.Run(() => _unitOfWork.Complete()).Result) { - _logger.LogInformation($"Scan completed on {library.Name}. Parsed {series.Keys.Count()} series."); + _logger.LogInformation($"Scan completed on {library.Name}. Parsed {series.Keys.Count()} series in {sw.ElapsedMilliseconds} ms."); } else { - _logger.LogError("There was a critical error that resulted in a failed scan. Please rescan."); + _logger.LogError("There was a critical error that resulted in a failed scan. Please check logs and rescan."); } - - _scannedSeries = null; - _logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name); + + _logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds + scanElapsedTime, library.Name); + Cleanup(); } - private List UpsertSeries(int libraryId, bool forceUpdate, ImmutableDictionary> series, Library library) + private void UpdateLibrary(int libraryId, ImmutableDictionary> parsedSeries, Library library) { var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList(); - foreach (var seriesKey in series.Keys) + + _logger.LogInformation($"Updating Library {library.Name}"); + // Perform DB activities + UpsertSeries(library, parsedSeries, allSeries); + + // Remove series that are no longer on disk + RemoveSeriesNotOnDisk(allSeries, parsedSeries, library); + + foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now; + } + + private void UpsertSeries(Library library, ImmutableDictionary> parsedSeries, + IList allSeries) + { + // NOTE: This is a great point to break the parsing into threads and join back. Each thread can take X series. + foreach (var seriesKey in parsedSeries.Keys) { - var mangaSeries = allSeries.SingleOrDefault(s => s.Name == seriesKey) ?? new Series + var mangaSeries = ExistingOrDefault(library, allSeries, seriesKey) ?? new Series { Name = seriesKey, OriginalName = seriesKey, + NormalizedName = Parser.Parser.Normalize(seriesKey), SortName = seriesKey, Summary = "" }; + mangaSeries.NormalizedName = Parser.Parser.Normalize(seriesKey); + try { - mangaSeries = UpdateSeries(mangaSeries, series[seriesKey].ToArray(), forceUpdate); - _logger.LogInformation($"Created/Updated series {mangaSeries.Name} for {library.Name} library"); - library.Series ??= new List(); - library.Series.Add(mangaSeries); + UpdateSeries(ref mangaSeries, parsedSeries[seriesKey].ToArray()); + if (!library.Series.Any(s => s.NormalizedName == mangaSeries.NormalizedName)) + { + _logger.LogInformation($"Added series {mangaSeries.Name}"); + library.Series.Add(mangaSeries); + } + } catch (Exception ex) { _logger.LogError(ex, $"There was an error during scanning of library. {seriesKey} will be skipped."); } } - - return allSeries; } - private void RemoveSeriesNotOnDisk(List allSeries, ImmutableDictionary> series, Library library) + private void RemoveSeriesNotOnDisk(IEnumerable allSeries, ImmutableDictionary> series, Library library) { + _logger.LogInformation("Removing any series that are no longer on disk."); var count = 0; - foreach (var existingSeries in allSeries) + var foundSeries = series.Select(s => Parser.Parser.Normalize(s.Key)).ToList(); + var missingSeries = allSeries.Where(existingSeries => + !foundSeries.Contains(existingSeries.NormalizedName) || !series.ContainsKey(existingSeries.Name) || + !series.ContainsKey(existingSeries.OriginalName)); + foreach (var existingSeries in missingSeries) { - if (!series.ContainsKey(existingSeries.Name) || !series.ContainsKey(existingSeries.OriginalName)) - { - // Delete series, there is no file to backup any longer. - library.Series?.Remove(existingSeries); - count++; - } + // Delete series, there is no file to backup any longer. + library.Series?.Remove(existingSeries); + count++; } _logger.LogInformation($"Removed {count} series that are no longer on disk"); } @@ -206,33 +244,33 @@ namespace API.Services TrackSeries(info); } - private Series UpdateSeries(Series series, ParserInfo[] infos, bool forceUpdate) + private void UpdateSeries(ref Series series, ParserInfo[] infos) { - var volumes = UpdateVolumesWithChapters(series, infos, forceUpdate); - series.Volumes = volumes; - series.Pages = volumes.Sum(v => v.Pages); - if (ShouldFindCoverImage(forceUpdate, series.CoverImage)) + _logger.LogInformation($"Updating entries for {series.Name}. {infos.Length} related files."); + + UpdateVolumes(series, infos); + series.Pages = series.Volumes.Sum(v => v.Pages); + + if (ShouldFindCoverImage(series.CoverImage)) { - var firstCover = volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); - if (firstCover == null && volumes.Any()) + var firstCover = series.Volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); + if (firstCover == null && series.Volumes.Any()) { - firstCover = volumes.FirstOrDefault(x => x.Number == 0); + firstCover = series.Volumes.FirstOrDefault(x => x.Number == 0); } series.CoverImage = firstCover?.CoverImage; } - if (string.IsNullOrEmpty(series.Summary) || forceUpdate) + + if (string.IsNullOrEmpty(series.Summary) || _forceUpdate) { series.Summary = ""; } - - return series; + _logger.LogDebug($"Created {series.Volumes.Count} volumes on {series.Name}"); } private MangaFile CreateMangaFile(ParserInfo info) { - _logger.LogDebug($"Creating File Entry for {info.FullFilePath}"); - return new MangaFile() { FilePath = info.FullFilePath, @@ -241,99 +279,138 @@ namespace API.Services }; } - private bool ShouldFindCoverImage(bool forceUpdate, byte[] coverImage) + private bool ShouldFindCoverImage(byte[] coverImage) { - return forceUpdate || coverImage == null || !coverImage.Any(); + return _forceUpdate || coverImage == null || !coverImage.Any(); } - /// - /// - /// - /// - /// - /// - /// - private ICollection UpdateChapters(Volume volume, IEnumerable infos, bool forceUpdate) - { - var chapters = new List(); + private void UpdateChapters(Volume volume, IEnumerable infos) // ICollection + { + volume.Chapters ??= new List(); foreach (var info in infos) { - volume.Chapters ??= new List(); - var chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters) ?? - chapters.SingleOrDefault(v => v.Range == info.Chapters) ?? - new Chapter() - { - Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "", - Range = info.Chapters, - }; - - chapter.Files ??= new List(); - var existingFile = chapter.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath); - if (existingFile != null) + try { - existingFile.Format = info.Format; - existingFile.NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); + var chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters) ?? + new Chapter() + { + Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "", + Range = info.Chapters, + }; + + AddOrUpdateFileForChapter(chapter, info); + chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + ""; + chapter.Range = info.Chapters; + + if (volume.Chapters.All(c => c.Range != info.Chapters)) + { + volume.Chapters.Add(chapter); + } } - else + catch (Exception ex) { - if (info.Format == MangaFormat.Archive) - { - chapter.Files.Add(CreateMangaFile(info)); - } - else - { - _logger.LogDebug($"Ignoring {info.Filename} as it is not an archive."); - } - + _logger.LogWarning(ex, $"There was an exception parsing {info.Series} - Volume {volume.Number}'s chapters. Skipping Chapter."); } - - chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + ""; - chapter.Range = info.Chapters; - - chapters.Add(chapter); } - foreach (var chapter in chapters) + foreach (var chapter in volume.Chapters) { chapter.Pages = chapter.Files.Sum(f => f.NumberOfPages); - if (ShouldFindCoverImage(forceUpdate, chapter.CoverImage)) + if (ShouldFindCoverImage(chapter.CoverImage)) { chapter.Files ??= new List(); var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault(); if (firstFile != null) chapter.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true); } } - - return chapters; + } + + private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info) + { + chapter.Files ??= new List(); + var existingFile = chapter.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath); + if (existingFile != null) + { + existingFile.Format = info.Format; + existingFile.NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); + } + else + { + if (info.Format == MangaFormat.Archive) + { + chapter.Files.Add(CreateMangaFile(info)); + } + else + { + _logger.LogDebug($"Ignoring {info.Filename}. Non-archives are not supported yet."); + } + } + } + + public static Volume ExistingOrDefault(IList existingVolumes, ICollection volumes, string volumeName) + { + return volumes.SingleOrDefault(v => v.Name == volumeName) ?? existingVolumes.SingleOrDefault(v => v.Name == volumeName); + } + + public static Series ExistingOrDefault(Library library, IEnumerable allSeries, string seriesName) + { + var name = Parser.Parser.Normalize(seriesName); + library.Series ??= new List(); + return library.Series.SingleOrDefault(s => Parser.Parser.Normalize(s.Name) == name) ?? + allSeries.SingleOrDefault(s => Parser.Parser.Normalize(s.Name) == name); } - private ICollection UpdateVolumesWithChapters(Series series, ParserInfo[] infos, bool forceUpdate) + private void UpdateVolumes(Series series, ParserInfo[] infos) { - ICollection volumes = new List(); + series.Volumes ??= new List(); + _logger.LogDebug($"Updating Volumes for {series.Name}. {infos.Length} related files."); IList existingVolumes = _unitOfWork.SeriesRepository.GetVolumes(series.Id).ToList(); foreach (var info in infos) { - var volume = (existingVolumes.SingleOrDefault(v => v.Name == info.Volumes) ?? - volumes.SingleOrDefault(v => v.Name == info.Volumes)) ?? new Volume + try { - Name = info.Volumes, - Number = Parser.Parser.MinimumNumberFromRange(info.Volumes), - }; + var volume = ExistingOrDefault(existingVolumes, series.Volumes, info.Volumes) ?? new Volume + { + Name = info.Volumes, + Number = (int) Parser.Parser.MinimumNumberFromRange(info.Volumes), + IsSpecial = false, + Chapters = new List() + }; + + if (series.Volumes.Any(v => v.Name == volume.Name)) continue; + series.Volumes.Add(volume); + + } + catch (Exception ex) + { + _logger.LogError(ex, $"There was an exception when creating volume {info.Volumes}. Skipping volume."); + } + } + - - var chapters = UpdateChapters(volume, infos.Where(pi => pi.Volumes == volume.Name).ToArray(), forceUpdate); - volume.Chapters = chapters; - volume.Pages = chapters.Sum(c => c.Pages); - volumes.Add(volume); + foreach (var volume in series.Volumes) + { + try + { + var justVolumeInfos = infos.Where(pi => pi.Volumes == volume.Name).ToArray(); + UpdateChapters(volume, justVolumeInfos); + volume.Pages = volume.Chapters.Sum(c => c.Pages); + + _logger.LogDebug($"Created {volume.Chapters.Count} chapters on {series.Name} - Volume {volume.Name}"); + } catch (Exception ex) + { + _logger.LogError(ex, $"There was an exception when creating volume {volume.Name}. Skipping volume."); + } } - foreach (var volume in volumes) + + foreach (var volume in series.Volumes) { - if (ShouldFindCoverImage(forceUpdate, volume.CoverImage)) + if (ShouldFindCoverImage(volume.CoverImage)) { // TODO: Create a custom sorter for Chapters so it's consistent across the application var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault(); @@ -341,17 +418,6 @@ namespace API.Services if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true); } } - - return volumes; } - - - - public void ScanSeries(int libraryId, int seriesId) - { - throw new NotImplementedException(); - } - - } } \ No newline at end of file diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index a5ad6f901..ace4c7889 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -12,7 +12,10 @@ namespace API.Services private readonly ICacheService _cacheService; private readonly ILogger _logger; private readonly IScannerService _scannerService; - public BackgroundJobServer Client => new BackgroundJobServer(); + public BackgroundJobServer Client => new BackgroundJobServer(new BackgroundJobServerOptions() + { + WorkerCount = 1 + }); public TaskScheduler(ICacheService cacheService, ILogger logger, IScannerService scannerService, IUnitOfWork unitOfWork) { @@ -36,12 +39,6 @@ namespace API.Services } - public void ScanSeries(int libraryId, int seriesId) - { - _logger.LogInformation($"Enqueuing series scan for series: {seriesId}"); - BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId)); - } - public void ScanLibrary(int libraryId, bool forceUpdate = false) { _logger.LogInformation($"Enqueuing library scan for: {libraryId}");