A hefty refactor of the ScanLibrary code. There were significant fallouts due to duplicate entities getting created and SingleOrDefaults failing.

This commit is contained in:
Joseph Milazzo 2021-02-08 12:03:52 -06:00
parent 39fa750d96
commit 9461b89725
15 changed files with 1075 additions and 153 deletions

View File

@ -25,7 +25,7 @@
</ItemGroup>
<ItemGroup>
<Folder Include="Services\Test Data\ArchiveService" />
<Folder Include="Services\Test Data\ArchiveService\ComicInfos" />
</ItemGroup>
</Project>

View File

@ -1,7 +1,115 @@
namespace API.Tests.Services
using System;
using System.Collections.Generic;
using System.Linq;
using API.Entities;
using API.Interfaces;
using API.Services;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services
{
public class ScannerService
public class ScannerServiceTests
{
private readonly ScannerService _scannerService;
private readonly ILogger<ScannerService> _logger = Substitute.For<ILogger<ScannerService>>();
private readonly IUnitOfWork _unitOfWork = Substitute.For<IUnitOfWork>();
private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
//private readonly IDirectoryService _directoryService = Substitute.For<DirectoryService>();
private Library _libraryMock;
public ScannerServiceTests()
{
_scannerService = new ScannerService(_unitOfWork, _logger, _archiveService);
_libraryMock = new Library()
{
Id = 1,
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Id = 1,
LastScanned = DateTime.Now,
LibraryId = 1,
Path = "E:/Manga"
}
},
LastModified = DateTime.Now,
Series = new List<Series>()
{
new Series()
{
Id = 0,
Name = "Darker Than Black"
}
}
};
}
[Fact]
public void ExistingOrDefault_Should_BeFromLibrary()
{
var allSeries = new List<Series>()
{
new Series() {Id = 2, Name = "Darker Than Black"},
new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
new Series() {Id = 4, Name = "Akame Ga Kill"},
};
Assert.Equal(_libraryMock.Series.ElementAt(0).Id, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black").Id);
}
[Fact]
public void ExistingOrDefault_Should_BeFromAllSeries()
{
var allSeries = new List<Series>()
{
new Series() {Id = 2, Name = "Darker Than Black"},
new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
new Series() {Id = 4, Name = "Akame Ga Kill"},
};
Assert.Equal(3, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black - Some Extension").Id);
}
[Fact]
public void ExistingOrDefault_Should_BeNull()
{
var allSeries = new List<Series>()
{
new Series() {Id = 2, Name = "Darker Than Black"},
new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
new Series() {Id = 4, Name = "Akame Ga Kill"},
};
Assert.Null(ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Non existing series"));
}
// [Fact]
// public void ScanLibrary_Should_Skip()
// {
//
Library lib = new Library()
{
Id = 1,
Name = "Darker Than Black",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Id = 1,
LastScanned = DateTime.Now,
LibraryId = 1,
Path = "E:/Manga"
}
},
LastModified = DateTime.Now
};
//
// _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1).Returns(lib);
//
// _scannerService.ScanLibrary(1, false);
// }
}
}

View File

@ -0,0 +1,13 @@
<?xml version="1.0"?>
<ComicInfo xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<Title>v01</Title>
<Series>BTOOOM!</Series>
<Web>https://www.comixology.com/BTOOOM/digital-comic/450184</Web>
<Summary>By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?</Summary>
<Notes>Scraped metadata from Comixology [CMXDB450184]</Notes>
<Publisher>Yen Press</Publisher>
<Genre>Manga, Movies &amp; TV</Genre>
<PageCount>194</PageCount>
<LanguageISO>en</LanguageISO>
<ScanInformation></ScanInformation>
</ComicInfo>

View File

@ -11,6 +11,7 @@
<PackageReference Include="Hangfire" Version="1.7.18" />
<PackageReference Include="Hangfire.AspNetCore" Version="1.7.18" />
<PackageReference Include="Hangfire.LiteDB" Version="0.4.0" />
<PackageReference Include="Hangfire.MaximumConcurrentExecutions" Version="1.1.0" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="5.0.1" NoWarn="NU1605" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="5.0.1" NoWarn="NU1605" />
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="5.0.1" />

View File

@ -145,6 +145,7 @@ namespace API.Controllers
[HttpPost("scan")]
public ActionResult Scan(int libraryId)
{
// TODO: We shouldn't queue up a job if one is already in progress
_taskScheduler.ScanLibrary(libraryId);
return Ok();
}

View File

@ -70,14 +70,7 @@ namespace API.Controllers
{
return Ok(await _unitOfWork.VolumeRepository.GetChapterDtoAsync(chapterId));
}
[Authorize(Policy = "RequireAdminRole")]
[HttpPost("scan")]
public ActionResult Scan(int libraryId, int seriesId)
{
_taskScheduler.ScanSeries(libraryId, seriesId);
return Ok();
}
[HttpPost("update-rating")]
public async Task<ActionResult> UpdateSeriesRating(UpdateSeriesRatingDto updateSeriesRatingDto)

View File

@ -0,0 +1,721 @@
// <auto-generated />
using System;
using API.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace API.Data.Migrations
{
[DbContext(typeof(DataContext))]
[Migration("20210207231256_SeriesNormalizedName")]
partial class SeriesNormalizedName
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.1");
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedName")
.IsUnique()
.HasDatabaseName("RoleNameIndex");
b.ToTable("AspNetRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AccessFailedCount")
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Email")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<bool>("EmailConfirmed")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastActive")
.HasColumnType("TEXT");
b.Property<bool>("LockoutEnabled")
.HasColumnType("INTEGER");
b.Property<DateTimeOffset?>("LockoutEnd")
.HasColumnType("TEXT");
b.Property<string>("NormalizedEmail")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedUserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("PasswordHash")
.HasColumnType("TEXT");
b.Property<string>("PhoneNumber")
.HasColumnType("TEXT");
b.Property<bool>("PhoneNumberConfirmed")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("SecurityStamp")
.HasColumnType("TEXT");
b.Property<bool>("TwoFactorEnabled")
.HasColumnType("INTEGER");
b.Property<string>("UserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedEmail")
.HasDatabaseName("EmailIndex");
b.HasIndex("NormalizedUserName")
.IsUnique()
.HasDatabaseName("UserNameIndex");
b.ToTable("AspNetUsers");
});
modelBuilder.Entity("API.Entities.AppUserPreferences", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<bool>("HideReadOnDetails")
.HasColumnType("INTEGER");
b.Property<int>("PageSplitOption")
.HasColumnType("INTEGER");
b.Property<int>("ReadingDirection")
.HasColumnType("INTEGER");
b.Property<int>("ScalingOption")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId")
.IsUnique();
b.ToTable("AppUserPreferences");
});
modelBuilder.Entity("API.Entities.AppUserProgress", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<int>("PagesRead")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId");
b.ToTable("AppUserProgresses");
});
modelBuilder.Entity("API.Entities.AppUserRating", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<int>("Rating")
.HasColumnType("INTEGER");
b.Property<string>("Review")
.HasColumnType("TEXT");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId");
b.ToTable("AppUserRating");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("UserId", "RoleId");
b.HasIndex("RoleId");
b.ToTable("AspNetUserRoles");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Number")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<string>("Range")
.HasColumnType("TEXT");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("VolumeId");
b.ToTable("Chapter");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<DateTime>("LastScanned")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Path")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("FolderPath");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("CoverImage")
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Type")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.ToTable("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<string>("FilePath")
.HasColumnType("TEXT");
b.Property<int>("Format")
.HasColumnType("INTEGER");
b.Property<int>("NumberOfPages")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("ChapterId");
b.ToTable("MangaFile");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasColumnType("TEXT");
b.Property<string>("OriginalName")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<string>("SortName")
.HasColumnType("TEXT");
b.Property<string>("Summary")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("Series");
});
modelBuilder.Entity("API.Entities.ServerSetting", b =>
{
b.Property<int>("Key")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("Key");
b.ToTable("ServerSetting");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<bool>("IsSpecial")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Number")
.HasColumnType("INTEGER");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("SeriesId");
b.ToTable("Volume");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.Property<int>("AppUsersId")
.HasColumnType("INTEGER");
b.Property<int>("LibrariesId")
.HasColumnType("INTEGER");
b.HasKey("AppUsersId", "LibrariesId");
b.HasIndex("LibrariesId");
b.ToTable("AppUserLibrary");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("RoleId");
b.ToTable("AspNetRoleClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("UserId");
b.ToTable("AspNetUserClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("ProviderKey")
.HasColumnType("TEXT");
b.Property<string>("ProviderDisplayName")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("LoginProvider", "ProviderKey");
b.HasIndex("UserId");
b.ToTable("AspNetUserLogins");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("UserId", "LoginProvider", "Name");
b.ToTable("AspNetUserTokens");
});
modelBuilder.Entity("API.Entities.AppUserPreferences", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithOne("UserPreferences")
.HasForeignKey("API.Entities.AppUserPreferences", "AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserProgress", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithMany("Progresses")
.HasForeignKey("AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserRating", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithMany("Ratings")
.HasForeignKey("AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.HasOne("API.Entities.AppRole", "Role")
.WithMany("UserRoles")
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.AppUser", "User")
.WithMany("UserRoles")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Role");
b.Navigation("User");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.HasOne("API.Entities.Volume", "Volume")
.WithMany("Chapters")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Volume");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Folders")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.HasOne("API.Entities.Chapter", "Chapter")
.WithMany("Files")
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Series")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.HasOne("API.Entities.Series", "Series")
.WithMany("Volumes")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Series");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("AppUsersId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Library", null)
.WithMany()
.HasForeignKey("LibrariesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.HasOne("API.Entities.AppRole", null)
.WithMany()
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Navigation("Progresses");
b.Navigation("Ratings");
b.Navigation("UserPreferences");
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.Navigation("Files");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Navigation("Folders");
b.Navigation("Series");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Navigation("Volumes");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Navigation("Chapters");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,23 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace API.Data.Migrations
{
public partial class SeriesNormalizedName : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "NormalizedName",
table: "Series",
type: "TEXT",
nullable: true);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "NormalizedName",
table: "Series");
}
}
}

View File

@ -343,6 +343,9 @@ namespace API.Data.Migrations
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasColumnType("TEXT");
b.Property<string>("OriginalName")
.HasColumnType("TEXT");

View File

@ -12,6 +12,10 @@ namespace API.Entities
/// </summary>
public string Name { get; set; }
/// <summary>
/// Used internally for name matching. <see cref="Parser.Parser.Normalize"/>
/// </summary>
public string NormalizedName { get; set; }
/// <summary>
/// The name used to sort the Series. By default, will be the same as Name.
/// </summary>
public string SortName { get; set; }

View File

@ -11,12 +11,5 @@
void ScanLibrary(int libraryId, bool forceUpdate);
void ScanLibraries();
/// <summary>
/// Performs a forced scan of just a series folder.
/// </summary>
/// <param name="libraryId"></param>
/// <param name="seriesId"></param>
void ScanSeries(int libraryId, int seriesId);
}
}

View File

@ -4,6 +4,5 @@
{
void ScanLibrary(int libraryId, bool forceUpdate = false);
void CleanupChapters(int[] chapterIds);
void ScanSeries(int libraryId, int seriesId);
}
}

View File

@ -26,7 +26,7 @@ namespace API.Services
public int GetNumberOfPagesFromArchive(string archivePath)
{
if (!IsValidArchive(archivePath)) return 0;
_logger.LogDebug($"Getting Page numbers from {archivePath}");
//_logger.LogDebug($"Getting Page numbers from {archivePath}");
try
{
@ -53,7 +53,7 @@ namespace API.Services
try
{
if (!IsValidArchive(filepath)) return Array.Empty<byte>();
_logger.LogDebug($"Extracting Cover image from {filepath}");
//_logger.LogDebug($"Extracting Cover image from {filepath}");
using ZipArchive archive = ZipFile.OpenRead(filepath);
if (!archive.HasFiles()) return Array.Empty<byte>();

View File

@ -10,6 +10,7 @@ using API.Entities;
using API.Entities.Enums;
using API.Interfaces;
using API.Parser;
using Hangfire;
using Microsoft.Extensions.Logging;
namespace API.Services
@ -20,6 +21,7 @@ namespace API.Services
private readonly ILogger<ScannerService> _logger;
private readonly IArchiveService _archiveService;
private ConcurrentDictionary<string, List<ParserInfo>> _scannedSeries;
private bool _forceUpdate;
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, IArchiveService archiveService)
{
@ -28,6 +30,7 @@ namespace API.Services
_archiveService = archiveService;
}
[DisableConcurrentExecution(timeoutInSeconds: 120)]
public void ScanLibraries()
{
var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
@ -37,9 +40,31 @@ namespace API.Services
}
}
private bool ShouldSkipFolderScan(FolderPath folder, ref int skippedFolders)
{
// NOTE: This solution isn't the best, but it has potential. We need to handle a few other cases so it works great.
return false;
// if (/*_environment.IsProduction() && */!_forceUpdate && Directory.GetLastWriteTime(folder.Path) < folder.LastScanned)
// {
// _logger.LogDebug($"{folder.Path} hasn't been updated since last scan. Skipping.");
// skippedFolders += 1;
// return true;
// }
//
// return false;
}
private void Cleanup()
{
_scannedSeries = null;
_forceUpdate = false;
}
[DisableConcurrentExecution(timeoutInSeconds: 120)]
public void ScanLibrary(int libraryId, bool forceUpdate)
{
{
_forceUpdate = forceUpdate;
var sw = Stopwatch.StartNew();
Library library;
try
@ -60,14 +85,8 @@ namespace API.Services
var skippedFolders = 0;
foreach (var folderPath in library.Folders)
{
// if (!forceUpdate && Directory.GetLastWriteTime(folderPath.Path) <= folderPath.LastScanned)
// {
// // NOTE: This solution isn't the best, but it has potential. We need to handle a few other cases so it works great.
// _logger.LogDebug($"{folderPath.Path} hasn't been updated since last scan. Skipping.");
// skippedFolders += 1;
// continue;
// }
if (ShouldSkipFolderScan(folderPath, ref skippedFolders)) continue;
try {
totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath.Path, (f) =>
{
@ -77,91 +96,110 @@ namespace API.Services
}
catch (FileNotFoundException exception)
{
_logger.LogError(exception, "The file could not be found");
_logger.LogError(exception, $"The file {f} could not be found");
}
});
}
catch (ArgumentException ex) {
_logger.LogError(ex, $"The directory '{folderPath}' does not exist");
_logger.LogError(ex, $"The directory '{folderPath.Path}' does not exist");
}
folderPath.LastScanned = DateTime.Now;
}
var scanElapsedTime = sw.ElapsedMilliseconds;
_logger.LogInformation("Folders Scanned {0} files in {1} milliseconds", totalFiles, scanElapsedTime);
sw.Restart();
if (skippedFolders == library.Folders.Count)
{
_logger.LogInformation("All Folders were skipped due to no modifications to the directories.");
_unitOfWork.LibraryRepository.Update(library);
_scannedSeries = null;
_logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name);
Cleanup();
return;
}
// Remove any series where there were no parsed infos
var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0);
var series = filtered.ToImmutableDictionary(v => v.Key, v => v.Value);
// Perform DB activities
var allSeries = UpsertSeries(libraryId, forceUpdate, series, library);
// Remove series that are no longer on disk
RemoveSeriesNotOnDisk(allSeries, series, library);
//foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
UpdateLibrary(libraryId, series, library);
_unitOfWork.LibraryRepository.Update(library);
if (Task.Run(() => _unitOfWork.Complete()).Result)
{
_logger.LogInformation($"Scan completed on {library.Name}. Parsed {series.Keys.Count()} series.");
_logger.LogInformation($"Scan completed on {library.Name}. Parsed {series.Keys.Count()} series in {sw.ElapsedMilliseconds} ms.");
}
else
{
_logger.LogError("There was a critical error that resulted in a failed scan. Please rescan.");
_logger.LogError("There was a critical error that resulted in a failed scan. Please check logs and rescan.");
}
_scannedSeries = null;
_logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name);
_logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
Cleanup();
}
private List<Series> UpsertSeries(int libraryId, bool forceUpdate, ImmutableDictionary<string, List<ParserInfo>> series, Library library)
private void UpdateLibrary(int libraryId, ImmutableDictionary<string, List<ParserInfo>> parsedSeries, Library library)
{
var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList();
foreach (var seriesKey in series.Keys)
_logger.LogInformation($"Updating Library {library.Name}");
// Perform DB activities
UpsertSeries(library, parsedSeries, allSeries);
// Remove series that are no longer on disk
RemoveSeriesNotOnDisk(allSeries, parsedSeries, library);
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
}
private void UpsertSeries(Library library, ImmutableDictionary<string, List<ParserInfo>> parsedSeries,
IList<Series> allSeries)
{
// NOTE: This is a great point to break the parsing into threads and join back. Each thread can take X series.
foreach (var seriesKey in parsedSeries.Keys)
{
var mangaSeries = allSeries.SingleOrDefault(s => s.Name == seriesKey) ?? new Series
var mangaSeries = ExistingOrDefault(library, allSeries, seriesKey) ?? new Series
{
Name = seriesKey,
OriginalName = seriesKey,
NormalizedName = Parser.Parser.Normalize(seriesKey),
SortName = seriesKey,
Summary = ""
};
mangaSeries.NormalizedName = Parser.Parser.Normalize(seriesKey);
try
{
mangaSeries = UpdateSeries(mangaSeries, series[seriesKey].ToArray(), forceUpdate);
_logger.LogInformation($"Created/Updated series {mangaSeries.Name} for {library.Name} library");
library.Series ??= new List<Series>();
library.Series.Add(mangaSeries);
UpdateSeries(ref mangaSeries, parsedSeries[seriesKey].ToArray());
if (!library.Series.Any(s => s.NormalizedName == mangaSeries.NormalizedName))
{
_logger.LogInformation($"Added series {mangaSeries.Name}");
library.Series.Add(mangaSeries);
}
}
catch (Exception ex)
{
_logger.LogError(ex, $"There was an error during scanning of library. {seriesKey} will be skipped.");
}
}
return allSeries;
}
private void RemoveSeriesNotOnDisk(List<Series> allSeries, ImmutableDictionary<string, List<ParserInfo>> series, Library library)
private void RemoveSeriesNotOnDisk(IEnumerable<Series> allSeries, ImmutableDictionary<string, List<ParserInfo>> series, Library library)
{
_logger.LogInformation("Removing any series that are no longer on disk.");
var count = 0;
foreach (var existingSeries in allSeries)
var foundSeries = series.Select(s => Parser.Parser.Normalize(s.Key)).ToList();
var missingSeries = allSeries.Where(existingSeries =>
!foundSeries.Contains(existingSeries.NormalizedName) || !series.ContainsKey(existingSeries.Name) ||
!series.ContainsKey(existingSeries.OriginalName));
foreach (var existingSeries in missingSeries)
{
if (!series.ContainsKey(existingSeries.Name) || !series.ContainsKey(existingSeries.OriginalName))
{
// Delete series, there is no file to backup any longer.
library.Series?.Remove(existingSeries);
count++;
}
// Delete series, there is no file to backup any longer.
library.Series?.Remove(existingSeries);
count++;
}
_logger.LogInformation($"Removed {count} series that are no longer on disk");
}
@ -206,33 +244,33 @@ namespace API.Services
TrackSeries(info);
}
private Series UpdateSeries(Series series, ParserInfo[] infos, bool forceUpdate)
private void UpdateSeries(ref Series series, ParserInfo[] infos)
{
var volumes = UpdateVolumesWithChapters(series, infos, forceUpdate);
series.Volumes = volumes;
series.Pages = volumes.Sum(v => v.Pages);
if (ShouldFindCoverImage(forceUpdate, series.CoverImage))
_logger.LogInformation($"Updating entries for {series.Name}. {infos.Length} related files.");
UpdateVolumes(series, infos);
series.Pages = series.Volumes.Sum(v => v.Pages);
if (ShouldFindCoverImage(series.CoverImage))
{
var firstCover = volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0);
if (firstCover == null && volumes.Any())
var firstCover = series.Volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0);
if (firstCover == null && series.Volumes.Any())
{
firstCover = volumes.FirstOrDefault(x => x.Number == 0);
firstCover = series.Volumes.FirstOrDefault(x => x.Number == 0);
}
series.CoverImage = firstCover?.CoverImage;
}
if (string.IsNullOrEmpty(series.Summary) || forceUpdate)
if (string.IsNullOrEmpty(series.Summary) || _forceUpdate)
{
series.Summary = "";
}
return series;
_logger.LogDebug($"Created {series.Volumes.Count} volumes on {series.Name}");
}
private MangaFile CreateMangaFile(ParserInfo info)
{
_logger.LogDebug($"Creating File Entry for {info.FullFilePath}");
return new MangaFile()
{
FilePath = info.FullFilePath,
@ -241,99 +279,138 @@ namespace API.Services
};
}
private bool ShouldFindCoverImage(bool forceUpdate, byte[] coverImage)
private bool ShouldFindCoverImage(byte[] coverImage)
{
return forceUpdate || coverImage == null || !coverImage.Any();
return _forceUpdate || coverImage == null || !coverImage.Any();
}
/// <summary>
///
/// </summary>
/// <param name="volume"></param>
/// <param name="infos"></param>
/// <param name="forceUpdate"></param>
/// <returns></returns>
private ICollection<Chapter> UpdateChapters(Volume volume, IEnumerable<ParserInfo> infos, bool forceUpdate)
{
var chapters = new List<Chapter>();
private void UpdateChapters(Volume volume, IEnumerable<ParserInfo> infos) // ICollection<Chapter>
{
volume.Chapters ??= new List<Chapter>();
foreach (var info in infos)
{
volume.Chapters ??= new List<Chapter>();
var chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters) ??
chapters.SingleOrDefault(v => v.Range == info.Chapters) ??
new Chapter()
{
Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "",
Range = info.Chapters,
};
chapter.Files ??= new List<MangaFile>();
var existingFile = chapter.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
if (existingFile != null)
try
{
existingFile.Format = info.Format;
existingFile.NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
var chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters) ??
new Chapter()
{
Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "",
Range = info.Chapters,
};
AddOrUpdateFileForChapter(chapter, info);
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
chapter.Range = info.Chapters;
if (volume.Chapters.All(c => c.Range != info.Chapters))
{
volume.Chapters.Add(chapter);
}
}
else
catch (Exception ex)
{
if (info.Format == MangaFormat.Archive)
{
chapter.Files.Add(CreateMangaFile(info));
}
else
{
_logger.LogDebug($"Ignoring {info.Filename} as it is not an archive.");
}
_logger.LogWarning(ex, $"There was an exception parsing {info.Series} - Volume {volume.Number}'s chapters. Skipping Chapter.");
}
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
chapter.Range = info.Chapters;
chapters.Add(chapter);
}
foreach (var chapter in chapters)
foreach (var chapter in volume.Chapters)
{
chapter.Pages = chapter.Files.Sum(f => f.NumberOfPages);
if (ShouldFindCoverImage(forceUpdate, chapter.CoverImage))
if (ShouldFindCoverImage(chapter.CoverImage))
{
chapter.Files ??= new List<MangaFile>();
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (firstFile != null) chapter.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
}
}
return chapters;
}
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info)
{
chapter.Files ??= new List<MangaFile>();
var existingFile = chapter.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
if (existingFile != null)
{
existingFile.Format = info.Format;
existingFile.NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
}
else
{
if (info.Format == MangaFormat.Archive)
{
chapter.Files.Add(CreateMangaFile(info));
}
else
{
_logger.LogDebug($"Ignoring {info.Filename}. Non-archives are not supported yet.");
}
}
}
public static Volume ExistingOrDefault(IList<Volume> existingVolumes, ICollection<Volume> volumes, string volumeName)
{
return volumes.SingleOrDefault(v => v.Name == volumeName) ?? existingVolumes.SingleOrDefault(v => v.Name == volumeName);
}
public static Series ExistingOrDefault(Library library, IEnumerable<Series> allSeries, string seriesName)
{
var name = Parser.Parser.Normalize(seriesName);
library.Series ??= new List<Series>();
return library.Series.SingleOrDefault(s => Parser.Parser.Normalize(s.Name) == name) ??
allSeries.SingleOrDefault(s => Parser.Parser.Normalize(s.Name) == name);
}
private ICollection<Volume> UpdateVolumesWithChapters(Series series, ParserInfo[] infos, bool forceUpdate)
private void UpdateVolumes(Series series, ParserInfo[] infos)
{
ICollection<Volume> volumes = new List<Volume>();
series.Volumes ??= new List<Volume>();
_logger.LogDebug($"Updating Volumes for {series.Name}. {infos.Length} related files.");
IList<Volume> existingVolumes = _unitOfWork.SeriesRepository.GetVolumes(series.Id).ToList();
foreach (var info in infos)
{
var volume = (existingVolumes.SingleOrDefault(v => v.Name == info.Volumes) ??
volumes.SingleOrDefault(v => v.Name == info.Volumes)) ?? new Volume
try
{
Name = info.Volumes,
Number = Parser.Parser.MinimumNumberFromRange(info.Volumes),
};
var volume = ExistingOrDefault(existingVolumes, series.Volumes, info.Volumes) ?? new Volume
{
Name = info.Volumes,
Number = (int) Parser.Parser.MinimumNumberFromRange(info.Volumes),
IsSpecial = false,
Chapters = new List<Chapter>()
};
if (series.Volumes.Any(v => v.Name == volume.Name)) continue;
series.Volumes.Add(volume);
}
catch (Exception ex)
{
_logger.LogError(ex, $"There was an exception when creating volume {info.Volumes}. Skipping volume.");
}
}
var chapters = UpdateChapters(volume, infos.Where(pi => pi.Volumes == volume.Name).ToArray(), forceUpdate);
volume.Chapters = chapters;
volume.Pages = chapters.Sum(c => c.Pages);
volumes.Add(volume);
foreach (var volume in series.Volumes)
{
try
{
var justVolumeInfos = infos.Where(pi => pi.Volumes == volume.Name).ToArray();
UpdateChapters(volume, justVolumeInfos);
volume.Pages = volume.Chapters.Sum(c => c.Pages);
_logger.LogDebug($"Created {volume.Chapters.Count} chapters on {series.Name} - Volume {volume.Name}");
} catch (Exception ex)
{
_logger.LogError(ex, $"There was an exception when creating volume {volume.Name}. Skipping volume.");
}
}
foreach (var volume in volumes)
foreach (var volume in series.Volumes)
{
if (ShouldFindCoverImage(forceUpdate, volume.CoverImage))
if (ShouldFindCoverImage(volume.CoverImage))
{
// TODO: Create a custom sorter for Chapters so it's consistent across the application
var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault();
@ -341,17 +418,6 @@ namespace API.Services
if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
}
}
return volumes;
}
public void ScanSeries(int libraryId, int seriesId)
{
throw new NotImplementedException();
}
}
}

View File

@ -12,7 +12,10 @@ namespace API.Services
private readonly ICacheService _cacheService;
private readonly ILogger<TaskScheduler> _logger;
private readonly IScannerService _scannerService;
public BackgroundJobServer Client => new BackgroundJobServer();
public BackgroundJobServer Client => new BackgroundJobServer(new BackgroundJobServerOptions()
{
WorkerCount = 1
});
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService, IUnitOfWork unitOfWork)
{
@ -36,12 +39,6 @@ namespace API.Services
}
public void ScanSeries(int libraryId, int seriesId)
{
_logger.LogInformation($"Enqueuing series scan for series: {seriesId}");
BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId));
}
public void ScanLibrary(int libraryId, bool forceUpdate = false)
{
_logger.LogInformation($"Enqueuing library scan for: {libraryId}");