Merge pull request #55 from Kareadita/feature/partials

Partial Chapter support + ScanLibrary rewrite
This commit is contained in:
Joseph Milazzo 2021-02-10 12:54:56 -06:00 committed by GitHub
commit 903811d05f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 3302 additions and 262 deletions

View File

@ -25,7 +25,7 @@
</ItemGroup>
<ItemGroup>
<Folder Include="Services\Test Data\ArchiveService" />
<Folder Include="Services\Test Data\ArchiveService\ComicInfos" />
</ItemGroup>
</Project>

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,23 @@
using System;
using System.Collections.Generic;
using API.Entities.Enums;
using API.Parser;
using Xunit;
using Xunit.Abstractions;
using static API.Parser.Parser;
namespace API.Tests
{
public class ParserTests
{
private readonly ITestOutputHelper _testOutputHelper;
public ParserTests(ITestOutputHelper testOutputHelper)
{
_testOutputHelper = testOutputHelper;
}
[Theory]
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "1")]
@ -18,6 +28,7 @@ namespace API.Tests
[InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "16-17")]
[InlineData("Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", "1")]
[InlineData("v001", "1")]
[InlineData("Vol 1", "1")]
[InlineData("No Volume", "0")]
[InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "1")]
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1")]
@ -35,7 +46,10 @@ namespace API.Tests
[InlineData("Dorohedoro v12 (2013) (Digital) (LostNerevarine-Empire).cbz", "12")]
[InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "1")]
[InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", "0")]
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1")]
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "0")]
[InlineData("VanDread-v01-c001[MD].zip", "1")]
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")]
public void ParseVolumeTest(string filename, string expected)
{
Assert.Equal(expected, ParseVolume(filename));
@ -79,7 +93,14 @@ namespace API.Tests
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip", "Ichiban Ushiro no Daimaou")]
[InlineData("Rent a Girlfriend v01.cbr", "Rent a Girlfriend")]
[InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "Yumekui Merry")]
//[InlineData("[Tempus Edax Rerum] Epigraph of the Closed Curve - Chapter 6.zip", "Epigraph of the Closed Curve")]
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "Itoshi no Karin")]
[InlineData("Tonikaku Kawaii Vol-1 (Ch 01-08)", "Tonikaku Kawaii")]
[InlineData("Tonikaku Kawaii (Ch 59-67) (Ongoing)", "Tonikaku Kawaii")]
[InlineData("7thGARDEN v01 (2016) (Digital) (danke).cbz", "7thGARDEN")]
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "Kedouin Makoto - Corpse Party Musume")]
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 09", "Kedouin Makoto - Corpse Party Musume")]
[InlineData("Goblin Slayer Side Story - Year One 025.5", "Goblin Slayer Side Story - Year One")]
[InlineData("Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire)", "Goblin Slayer - Brand New Day")]
public void ParseSeriesTest(string filename, string expected)
{
Assert.Equal(expected, ParseSeries(filename));
@ -113,6 +134,12 @@ namespace API.Tests
[InlineData("Goblin Slayer Side Story - Year One 017.5", "17.5")]
[InlineData("Beelzebub_53[KSH].zip", "53")]
[InlineData("Black Bullet - v4 c20.5 [batoto]", "20.5")]
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1-6")]
[InlineData("APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", "40")]
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "12")]
[InlineData("Vol 1", "0")]
[InlineData("VanDread-v01-c001[MD].zip", "1")]
[InlineData("Goblin Slayer Side Story - Year One 025.5", "25.5")]
//[InlineData("[Tempus Edax Rerum] Epigraph of the Closed Curve - Chapter 6.zip", "6")]
public void ParseChaptersTest(string filename, string expected)
{
@ -174,11 +201,22 @@ namespace API.Tests
[InlineData("12-14", 12)]
[InlineData("24", 24)]
[InlineData("18-04", 4)]
public void MinimumNumberFromRangeTest(string input, int expected)
[InlineData("18-04.5", 4.5)]
[InlineData("40", 40)]
public void MinimumNumberFromRangeTest(string input, float expected)
{
Assert.Equal(expected, MinimumNumberFromRange(input));
}
[Theory]
[InlineData("Darker Than Black", "darkerthanblack")]
[InlineData("Darker Than Black - Something", "darkerthanblacksomething")]
[InlineData("", "")]
public void NormalizeTest(string input, string expected)
{
Assert.Equal(expected, Normalize(input));
}
[Fact]
public void ParseInfoTest()
@ -241,6 +279,30 @@ namespace API.Tests
FullFilePath = filepath
});
filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "APOSIMZ", Volumes = "0", Edition = "",
Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = "0", Edition = "",
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Goblin Slayer - Brand New Day", Volumes = "0", Edition = "",
Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
@ -255,12 +317,20 @@ namespace API.Tests
return;
}
Assert.NotNull(actual);
_testOutputHelper.WriteLine($"Validating {file}");
_testOutputHelper.WriteLine("Format");
Assert.Equal(expectedInfo.Format, actual.Format);
_testOutputHelper.WriteLine("Series");
Assert.Equal(expectedInfo.Series, actual.Series);
_testOutputHelper.WriteLine("Chapters");
Assert.Equal(expectedInfo.Chapters, actual.Chapters);
_testOutputHelper.WriteLine("Volumes");
Assert.Equal(expectedInfo.Volumes, actual.Volumes);
_testOutputHelper.WriteLine("Edition");
Assert.Equal(expectedInfo.Edition, actual.Edition);
_testOutputHelper.WriteLine("Filename");
Assert.Equal(expectedInfo.Filename, actual.Filename);
_testOutputHelper.WriteLine("FullFilePath");
Assert.Equal(expectedInfo.FullFilePath, actual.FullFilePath);
}
}

View File

@ -0,0 +1,32 @@
using API.Interfaces;
using API.Services;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services
{
public class DirectoryServiceTests
{
private readonly DirectoryService _directoryService;
private readonly ILogger<DirectoryService> _logger = Substitute.For<ILogger<DirectoryService>>();
public DirectoryServiceTests()
{
_directoryService = new DirectoryService(_logger);
}
[Fact]
public void GetFiles_Test()
{
//_directoryService.GetFiles()
}
[Fact]
public void ListDirectory_Test()
{
}
}
}

View File

@ -0,0 +1,119 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using API.Entities;
using API.Entities.Enums;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
using API.Services;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services
{
public class ScannerServiceTests
{
private readonly ITestOutputHelper _testOutputHelper;
private readonly ScannerService _scannerService;
private readonly ILogger<ScannerService> _logger = Substitute.For<ILogger<ScannerService>>();
private readonly IUnitOfWork _unitOfWork = Substitute.For<IUnitOfWork>();
private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
private readonly IMetadataService _metadataService;
private readonly ILogger<MetadataService> _metadataLogger = Substitute.For<ILogger<MetadataService>>();
private Library _libraryMock;
public ScannerServiceTests(ITestOutputHelper testOutputHelper)
{
_testOutputHelper = testOutputHelper;
_scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService);
_metadataService= Substitute.For<MetadataService>(_unitOfWork, _metadataLogger, _archiveService);
_libraryMock = new Library()
{
Id = 1,
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Id = 1,
LastScanned = DateTime.Now,
LibraryId = 1,
Path = "E:/Manga"
}
},
LastModified = DateTime.Now,
Series = new List<Series>()
{
new Series()
{
Id = 0,
Name = "Darker Than Black"
}
}
};
}
// [Fact]
// public void ExistingOrDefault_Should_BeFromLibrary()
// {
// var allSeries = new List<Series>()
// {
// new Series() {Id = 2, Name = "Darker Than Black"},
// new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
// new Series() {Id = 4, Name = "Akame Ga Kill"},
// };
// Assert.Equal(_libraryMock.Series.ElementAt(0).Id, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black").Id);
// Assert.Equal(_libraryMock.Series.ElementAt(0).Id, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker than Black").Id);
// }
//
// [Fact]
// public void ExistingOrDefault_Should_BeFromAllSeries()
// {
// var allSeries = new List<Series>()
// {
// new Series() {Id = 2, Name = "Darker Than Black"},
// new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
// new Series() {Id = 4, Name = "Akame Ga Kill"},
// };
// Assert.Equal(3, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black - Some Extension").Id);
// }
//
// [Fact]
// public void ExistingOrDefault_Should_BeNull()
// {
// var allSeries = new List<Series>()
// {
// new Series() {Id = 2, Name = "Darker Than Black"},
// new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
// new Series() {Id = 4, Name = "Akame Ga Kill"},
// };
// Assert.Null(ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Non existing series"));
// }
[Fact]
public void Should_CreateSeries_Test()
{
// var allSeries = new List<Series>();
// var parsedSeries = new Dictionary<string, List<ParserInfo>>();
//
// parsedSeries.Add("Darker Than Black", new List<ParserInfo>()
// {
// new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker Than Black", Volumes = "1"},
// new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker than Black", Volumes = "2"}
// });
//
// _scannerService.UpsertSeries(_libraryMock, parsedSeries, allSeries);
//
// Assert.Equal(1, _libraryMock.Series.Count);
// Assert.Equal(2, _libraryMock.Series.ElementAt(0).Volumes.Count);
// _testOutputHelper.WriteLine(_libraryMock.ToString());
Assert.True(true);
}
}
}

View File

@ -0,0 +1,13 @@
<?xml version="1.0"?>
<ComicInfo xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<Title>v01</Title>
<Series>BTOOOM!</Series>
<Web>https://www.comixology.com/BTOOOM/digital-comic/450184</Web>
<Summary>By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?</Summary>
<Notes>Scraped metadata from Comixology [CMXDB450184]</Notes>
<Publisher>Yen Press</Publisher>
<Genre>Manga, Movies &amp; TV</Genre>
<PageCount>194</PageCount>
<LanguageISO>en</LanguageISO>
<ScanInformation></ScanInformation>
</ComicInfo>

View File

@ -11,6 +11,7 @@
<PackageReference Include="Hangfire" Version="1.7.18" />
<PackageReference Include="Hangfire.AspNetCore" Version="1.7.18" />
<PackageReference Include="Hangfire.LiteDB" Version="0.4.0" />
<PackageReference Include="Hangfire.MaximumConcurrentExecutions" Version="1.1.0" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="5.0.1" NoWarn="NU1605" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="5.0.1" NoWarn="NU1605" />
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="5.0.1" />

View File

@ -102,10 +102,6 @@ namespace API.Controllers
.Include(u => u.UserPreferences)
.SingleOrDefaultAsync(x => x.NormalizedUserName == loginDto.Username.ToUpper());
var debugUsers = await _userManager.Users.Select(x => x.NormalizedUserName).ToListAsync();
_logger.LogInformation($"All Users: {string.Join(",", debugUsers)}");
if (user == null) return Unauthorized("Invalid username");
var result = await _signInManager

View File

@ -145,6 +145,7 @@ namespace API.Controllers
[HttpPost("scan")]
public ActionResult Scan(int libraryId)
{
// TODO: We shouldn't queue up a job if one is already in progress
_taskScheduler.ScanLibrary(libraryId);
return Ok();
}

View File

@ -71,13 +71,6 @@ namespace API.Controllers
return Ok(await _unitOfWork.VolumeRepository.GetChapterDtoAsync(chapterId));
}
[Authorize(Policy = "RequireAdminRole")]
[HttpPost("scan")]
public ActionResult Scan(int libraryId, int seriesId)
{
_taskScheduler.ScanSeries(libraryId, seriesId);
return Ok();
}
[HttpPost("update-rating")]
public async Task<ActionResult> UpdateSeriesRating(UpdateSeriesRatingDto updateSeriesRatingDto)

View File

@ -75,6 +75,22 @@ namespace API.Data
.Include(l => l.Series)
.SingleAsync();
}
/// <summary>
/// This returns a Library with all it's Series -> Volumes -> Chapters. This is expensive. Should only be called when needed.
/// </summary>
/// <param name="libraryId"></param>
/// <returns></returns>
public async Task<Library> GetFullLibraryForIdAsync(int libraryId)
{
return await _context.Library
.Where(x => x.Id == libraryId)
.Include(f => f.Folders)
.Include(l => l.Series)
.ThenInclude(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Files)
.SingleAsync();
}
public async Task<bool> LibraryExists(string libraryName)
{

View File

@ -0,0 +1,721 @@
// <auto-generated />
using System;
using API.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace API.Data.Migrations
{
[DbContext(typeof(DataContext))]
[Migration("20210207231256_SeriesNormalizedName")]
partial class SeriesNormalizedName
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.1");
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedName")
.IsUnique()
.HasDatabaseName("RoleNameIndex");
b.ToTable("AspNetRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AccessFailedCount")
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Email")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<bool>("EmailConfirmed")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastActive")
.HasColumnType("TEXT");
b.Property<bool>("LockoutEnabled")
.HasColumnType("INTEGER");
b.Property<DateTimeOffset?>("LockoutEnd")
.HasColumnType("TEXT");
b.Property<string>("NormalizedEmail")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedUserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("PasswordHash")
.HasColumnType("TEXT");
b.Property<string>("PhoneNumber")
.HasColumnType("TEXT");
b.Property<bool>("PhoneNumberConfirmed")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("SecurityStamp")
.HasColumnType("TEXT");
b.Property<bool>("TwoFactorEnabled")
.HasColumnType("INTEGER");
b.Property<string>("UserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedEmail")
.HasDatabaseName("EmailIndex");
b.HasIndex("NormalizedUserName")
.IsUnique()
.HasDatabaseName("UserNameIndex");
b.ToTable("AspNetUsers");
});
modelBuilder.Entity("API.Entities.AppUserPreferences", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<bool>("HideReadOnDetails")
.HasColumnType("INTEGER");
b.Property<int>("PageSplitOption")
.HasColumnType("INTEGER");
b.Property<int>("ReadingDirection")
.HasColumnType("INTEGER");
b.Property<int>("ScalingOption")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId")
.IsUnique();
b.ToTable("AppUserPreferences");
});
modelBuilder.Entity("API.Entities.AppUserProgress", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<int>("PagesRead")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId");
b.ToTable("AppUserProgresses");
});
modelBuilder.Entity("API.Entities.AppUserRating", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<int>("Rating")
.HasColumnType("INTEGER");
b.Property<string>("Review")
.HasColumnType("TEXT");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId");
b.ToTable("AppUserRating");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("UserId", "RoleId");
b.HasIndex("RoleId");
b.ToTable("AspNetUserRoles");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Number")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<string>("Range")
.HasColumnType("TEXT");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("VolumeId");
b.ToTable("Chapter");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<DateTime>("LastScanned")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Path")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("FolderPath");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("CoverImage")
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Type")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.ToTable("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<string>("FilePath")
.HasColumnType("TEXT");
b.Property<int>("Format")
.HasColumnType("INTEGER");
b.Property<int>("NumberOfPages")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("ChapterId");
b.ToTable("MangaFile");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasColumnType("TEXT");
b.Property<string>("OriginalName")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<string>("SortName")
.HasColumnType("TEXT");
b.Property<string>("Summary")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("Series");
});
modelBuilder.Entity("API.Entities.ServerSetting", b =>
{
b.Property<int>("Key")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("Key");
b.ToTable("ServerSetting");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<bool>("IsSpecial")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Number")
.HasColumnType("INTEGER");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("SeriesId");
b.ToTable("Volume");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.Property<int>("AppUsersId")
.HasColumnType("INTEGER");
b.Property<int>("LibrariesId")
.HasColumnType("INTEGER");
b.HasKey("AppUsersId", "LibrariesId");
b.HasIndex("LibrariesId");
b.ToTable("AppUserLibrary");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("RoleId");
b.ToTable("AspNetRoleClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("UserId");
b.ToTable("AspNetUserClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("ProviderKey")
.HasColumnType("TEXT");
b.Property<string>("ProviderDisplayName")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("LoginProvider", "ProviderKey");
b.HasIndex("UserId");
b.ToTable("AspNetUserLogins");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("UserId", "LoginProvider", "Name");
b.ToTable("AspNetUserTokens");
});
modelBuilder.Entity("API.Entities.AppUserPreferences", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithOne("UserPreferences")
.HasForeignKey("API.Entities.AppUserPreferences", "AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserProgress", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithMany("Progresses")
.HasForeignKey("AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserRating", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithMany("Ratings")
.HasForeignKey("AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.HasOne("API.Entities.AppRole", "Role")
.WithMany("UserRoles")
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.AppUser", "User")
.WithMany("UserRoles")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Role");
b.Navigation("User");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.HasOne("API.Entities.Volume", "Volume")
.WithMany("Chapters")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Volume");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Folders")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.HasOne("API.Entities.Chapter", "Chapter")
.WithMany("Files")
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Series")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.HasOne("API.Entities.Series", "Series")
.WithMany("Volumes")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Series");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("AppUsersId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Library", null)
.WithMany()
.HasForeignKey("LibrariesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.HasOne("API.Entities.AppRole", null)
.WithMany()
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Navigation("Progresses");
b.Navigation("Ratings");
b.Navigation("UserPreferences");
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.Navigation("Files");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Navigation("Folders");
b.Navigation("Series");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Navigation("Volumes");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Navigation("Chapters");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,23 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace API.Data.Migrations
{
public partial class SeriesNormalizedName : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "NormalizedName",
table: "Series",
type: "TEXT",
nullable: true);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "NormalizedName",
table: "Series");
}
}
}

View File

@ -343,6 +343,9 @@ namespace API.Data.Migrations
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasColumnType("TEXT");
b.Property<string>("OriginalName")
.HasColumnType("TEXT");

View File

@ -13,7 +13,7 @@ namespace API.Entities
/// <summary>
/// Number of pages for the given file
/// </summary>
public int NumberOfPages { get; set; }
public int NumberOfPages { get; set; } // TODO: Refactor this to Pages
public MangaFormat Format { get; set; }
// Relationship Mapping

View File

@ -12,6 +12,10 @@ namespace API.Entities
/// </summary>
public string Name { get; set; }
/// <summary>
/// Used internally for name matching. <see cref="Parser.Parser.Normalize"/>
/// </summary>
public string NormalizedName { get; set; }
/// <summary>
/// The name used to sort the Series. By default, will be the same as Name.
/// </summary>
public string SortName { get; set; }
@ -32,7 +36,7 @@ namespace API.Entities
public int Pages { get; set; }
// Relationships
public ICollection<Volume> Volumes { get; set; }
public List<Volume> Volumes { get; set; }
public Library Library { get; set; }
public int LibraryId { get; set; }
}

View File

@ -9,7 +9,7 @@ namespace API.Entities
public int Id { get; set; }
public string Name { get; set; }
public int Number { get; set; }
public ICollection<Chapter> Chapters { get; set; }
public IList<Chapter> Chapters { get; set; }
public DateTime Created { get; set; }
public DateTime LastModified { get; set; }
public byte[] CoverImage { get; set; }

View File

@ -1,6 +1,7 @@
using API.Data;
using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using AutoMapper;
using Hangfire;
@ -24,6 +25,7 @@ namespace API.Extensions
services.AddScoped<IUnitOfWork, UnitOfWork>();
services.AddScoped<IScannerService, ScannerService>();
services.AddScoped<IArchiveService, ArchiveService>();
services.AddScoped<IMetadataService, MetadataService>();

View File

@ -12,6 +12,7 @@ namespace API.Interfaces
Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync();
Task<bool> LibraryExists(string libraryName);
Task<Library> GetLibraryForIdAsync(int libraryId);
Task<Library> GetFullLibraryForIdAsync(int libraryId);
Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName);
Task<IEnumerable<Library>> GetLibrariesAsync();
Task<bool> DeleteLibrary(int libraryId);

View File

@ -11,12 +11,5 @@
void ScanLibrary(int libraryId, bool forceUpdate);
void ScanLibraries();
/// <summary>
/// Performs a forced scan of just a series folder.
/// </summary>
/// <param name="libraryId"></param>
/// <param name="seriesId"></param>
void ScanSeries(int libraryId, int seriesId);
}
}

View File

@ -4,6 +4,6 @@
{
void ScanLibrary(int libraryId, bool forceUpdate = false);
void CleanupChapters(int[] chapterIds);
void ScanSeries(int libraryId, int seriesId);
void RefreshMetadata(int libraryId, bool forceUpdate = true);
}
}

View File

@ -0,0 +1,18 @@
using API.Entities;
namespace API.Interfaces.Services
{
public interface IMetadataService
{
/// <summary>
/// Recalculates metadata for all entities in a library.
/// </summary>
/// <param name="libraryId"></param>
/// <param name="forceUpdate"></param>
void RefreshMetadata(int libraryId, bool forceUpdate = false);
public void UpdateMetadata(Chapter chapter, bool forceUpdate);
public void UpdateMetadata(Volume volume, bool forceUpdate);
public void UpdateMetadata(Series series, bool forceUpdate);
}
}

View File

@ -22,7 +22,7 @@ namespace API.Parser
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
new Regex(
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)",
@"(?<Series>.*)(\b|_)v(?<Volume>\d+(-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
new Regex(
@ -79,11 +79,18 @@ namespace API.Parser
new Regex(
@"(?<Series>.*)(?:, Chapter )(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire)
new Regex(
@"(?<Series>.*) (?<Chapter>\d+(?:.\d+|-\d+)?) \(\d{4}\)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)
new Regex(
@"(?<Series>.*)\(\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Tonikaku Kawaii (Ch 59-67) (Ongoing)
new Regex(
@"(?<Series>.*)( |_)\((c |ch |chapter )",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Black Bullet (This is very loose, keep towards bottom) (?<Series>.*)(_)(v|vo|c|volume)
new Regex(
@"(?<Series>.*)(_)(v|vo|c|volume)( |_)\d+",
@ -111,28 +118,28 @@ namespace API.Parser
private static readonly Regex[] MangaChapterRegex = new[]
{
new Regex(
@"(c|ch)(\.? ?)(?<Chapter>\d+-?\d*)",
@"(c|ch)(\.? ?)(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
new Regex(
@"v\d+\.(?<Chapter>\d+-?\d*)",
@"v\d+\.(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz
// Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz
new Regex(
@"(?<Series>.*) (?<Chapter>\d+) (?:\(\d{4}\))",
@"^(?!Vol)(?<Series>.*) (?<Chapter>\d+(?:.\d+|-\d+)?)(?: \(\d{4}\))?",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Tower Of God S01 014 (CBT) (digital).cbz
new Regex(
@"(?<Series>.*) S(?<Volume>\d+) (?<Chapter>\d+)",
@"(?<Series>.*) S(?<Volume>\d+) (?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Beelzebub_01_[Noodles].zip
new Regex(
@"^((?!v|vo|vol|Volume).)*( |_)(?<Chapter>\.?\d+)( |_|\[|\()",
@"^((?!v|vo|vol|Volume).)*( |_)(?<Chapter>\.?\d+(?:.\d+|-\d+)?)( |_|\[|\()",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Yumekui-Merry_DKThias_Chapter21.zip
new Regex(
@"Chapter(?<Chapter>\d+(-\d+)?)",
@"Chapter(?<Chapter>\d+(-\d+)?)", //(?:.\d+|-\d+)?
RegexOptions.IgnoreCase | RegexOptions.Compiled),
};
@ -399,10 +406,15 @@ namespace API.Parser
return ImageRegex.IsMatch(fileInfo.Extension);
}
public static int MinimumNumberFromRange(string range)
public static float MinimumNumberFromRange(string range)
{
var tokens = range.Split("-");
return tokens.Min(Int32.Parse);
return tokens.Min(float.Parse);
}
public static string Normalize(string name)
{
return name.ToLower().Replace("-", "").Replace(" ", "");
}
}
}

View File

@ -26,7 +26,6 @@ namespace API.Services
public int GetNumberOfPagesFromArchive(string archivePath)
{
if (!IsValidArchive(archivePath)) return 0;
_logger.LogDebug($"Getting Page numbers from {archivePath}");
try
{
@ -35,7 +34,7 @@ namespace API.Services
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception when reading archive stream.");
_logger.LogError(ex, "There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath);
return 0;
}
}
@ -53,9 +52,8 @@ namespace API.Services
try
{
if (!IsValidArchive(filepath)) return Array.Empty<byte>();
_logger.LogDebug($"Extracting Cover image from {filepath}");
using ZipArchive archive = ZipFile.OpenRead(filepath);
using var archive = ZipFile.OpenRead(filepath);
if (!archive.HasFiles()) return Array.Empty<byte>();
var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
@ -66,7 +64,7 @@ namespace API.Services
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception when reading archive stream.");
_logger.LogError(ex, "There was an exception when reading archive stream: {Filepath}. Defaulting to no cover image", filepath);
}
return Array.Empty<byte>();
@ -82,7 +80,7 @@ namespace API.Services
}
catch (Exception ex)
{
_logger.LogError(ex, "There was a critical error and prevented thumbnail generation. Defaulting to no cover image.");
_logger.LogError(ex, "There was a critical error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entry.FullName);
}
return Array.Empty<byte>();
@ -119,20 +117,28 @@ namespace API.Services
/// <returns></returns>
public bool IsValidArchive(string archivePath)
{
if (!File.Exists(archivePath))
try
{
_logger.LogError($"Archive {archivePath} could not be found.");
return false;
}
if (!Parser.Parser.IsArchive(archivePath))
{
_logger.LogError($"Archive {archivePath} is not a valid archive.");
return false;
}
if (!File.Exists(archivePath))
{
_logger.LogError("Archive {ArchivePath} could not be found", archivePath);
return false;
}
using var archive = ZipFile.OpenRead(archivePath);
if (archive.Entries.Any(e => Parser.Parser.IsImage(e.FullName))) return true;
_logger.LogError($"Archive {archivePath} contains no images.");
if (!Parser.Parser.IsArchive(archivePath))
{
_logger.LogError("Archive {ArchivePath} is not a valid archive", archivePath);
return false;
}
using var archive = ZipFile.OpenRead(archivePath);
if (archive.Entries.Any(e => Parser.Parser.IsImage(e.FullName))) return true;
_logger.LogError("Archive {ArchivePath} contains no images", archivePath);
}
catch (Exception ex)
{
_logger.LogError(ex, "Unable to validate archive ({ArchivePath}) due to problem opening archive", archivePath);
}
return false;
}
@ -151,7 +157,7 @@ namespace API.Services
if (Directory.Exists(extractPath))
{
_logger.LogDebug($"Archive {archivePath} has already been extracted. Returning existing folder.");
_logger.LogDebug("Archive {ArchivePath} has already been extracted. Returning existing folder", archivePath);
return;
}
@ -161,14 +167,14 @@ namespace API.Services
if (!archive.HasFiles() && !needsFlattening) return;
archive.ExtractToDirectory(extractPath, true);
_logger.LogDebug($"Extracted archive to {extractPath} in {sw.ElapsedMilliseconds} milliseconds.");
_logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds);
if (needsFlattening)
{
sw = Stopwatch.StartNew();
_logger.LogInformation("Extracted archive is nested in root folder, flattening...");
new DirectoryInfo(extractPath).Flatten();
_logger.LogInformation($"Flattened in {sw.ElapsedMilliseconds} milliseconds");
_logger.LogInformation("Flattened in {ElapsedMilliseconds} milliseconds", sw.ElapsedMilliseconds);
}
}
}

View File

@ -91,7 +91,7 @@ namespace API.Services
/// <param name="root">Directory to scan</param>
/// <param name="action">Action to apply on file path</param>
/// <exception cref="ArgumentException"></exception>
public static int TraverseTreeParallelForEach(string root, Action<string> action)
public static int TraverseTreeParallelForEach(string root, Action<string> action, string searchPattern)
{
//Count of files traversed and timer for diagnostic output
var fileCount = 0;
@ -130,7 +130,7 @@ namespace API.Services
// TODO: In future, we need to take LibraryType into consideration for what extensions to allow (RAW should allow images)
// or we need to move this filtering to another area (Process)
// or we can get all files and put a check in place during Process to abandon files
files = GetFilesWithCertainExtensions(currentDir, Parser.Parser.MangaFileExtensions)
files = GetFilesWithCertainExtensions(currentDir, searchPattern)
.ToArray();
}
catch (UnauthorizedAccessException e) {

View File

@ -0,0 +1,106 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging;
namespace API.Services
{
public class MetadataService : IMetadataService
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<MetadataService> _logger;
private readonly IArchiveService _archiveService;
public MetadataService(IUnitOfWork unitOfWork, ILogger<MetadataService> logger, IArchiveService archiveService)
{
_unitOfWork = unitOfWork;
_logger = logger;
_archiveService = archiveService;
}
private static bool ShouldFindCoverImage(byte[] coverImage, bool forceUpdate = false)
{
return forceUpdate || coverImage == null || !coverImage.Any();
}
public void UpdateMetadata(Chapter chapter, bool forceUpdate)
{
if (chapter != null && ShouldFindCoverImage(chapter.CoverImage, forceUpdate))
{
chapter.Files ??= new List<MangaFile>();
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (firstFile != null) chapter.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
}
}
public void UpdateMetadata(Volume volume, bool forceUpdate)
{
if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate))
{
// TODO: Create a custom sorter for Chapters so it's consistent across the application
volume.Chapters ??= new List<Chapter>();
var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault();
var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
}
}
public void UpdateMetadata(Series series, bool forceUpdate)
{
if (series == null) return;
if (ShouldFindCoverImage(series.CoverImage, forceUpdate))
{
series.Volumes ??= new List<Volume>();
var firstCover = series.Volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0);
if (firstCover == null && series.Volumes.Any())
{
firstCover = series.Volumes.FirstOrDefault(x => x.Number == 0);
}
series.CoverImage = firstCover?.CoverImage;
}
if (string.IsNullOrEmpty(series.Summary) || forceUpdate)
{
series.Summary = "";
}
}
public void RefreshMetadata(int libraryId, bool forceUpdate = false)
{
var sw = Stopwatch.StartNew();
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId)).Result;
var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList();
_logger.LogInformation($"Beginning metadata refresh of {library.Name}");
foreach (var series in allSeries)
{
series.NormalizedName = Parser.Parser.Normalize(series.Name);
var volumes = _unitOfWork.SeriesRepository.GetVolumes(series.Id).ToList();
foreach (var volume in volumes)
{
foreach (var chapter in volume.Chapters)
{
UpdateMetadata(chapter, forceUpdate);
}
UpdateMetadata(volume, forceUpdate);
}
UpdateMetadata(series, forceUpdate);
_unitOfWork.SeriesRepository.Update(series);
}
if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.Complete()).Result)
{
_logger.LogInformation($"Updated metadata for {library.Name} in {sw.ElapsedMilliseconds} ms.");
}
}
}
}

View File

@ -1,17 +1,21 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
using API.Entities;
using API.Entities.Enums;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
using Hangfire;
using Microsoft.Extensions.Logging;
[assembly: InternalsVisibleTo("API.Tests")]
namespace API.Services
{
public class ScannerService : IScannerService
@ -19,15 +23,20 @@ namespace API.Services
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<ScannerService> _logger;
private readonly IArchiveService _archiveService;
private readonly IMetadataService _metadataService;
private ConcurrentDictionary<string, List<ParserInfo>> _scannedSeries;
private bool _forceUpdate;
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, IArchiveService archiveService)
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, IArchiveService archiveService,
IMetadataService metadataService)
{
_unitOfWork = unitOfWork;
_logger = logger;
_archiveService = archiveService;
_metadataService = metadataService;
}
[DisableConcurrentExecution(timeoutInSeconds: 120)]
public void ScanLibraries()
{
var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
@ -37,33 +46,52 @@ namespace API.Services
}
}
public void ScanLibrary(int libraryId, bool forceUpdate)
{
private bool ShouldSkipFolderScan(FolderPath folder, ref int skippedFolders)
{
// NOTE: This solution isn't the best, but it has potential. We need to handle a few other cases so it works great.
return false;
var sw = Stopwatch.StartNew();
// if (/*_environment.IsProduction() && */!_forceUpdate && Directory.GetLastWriteTime(folder.Path) < folder.LastScanned)
// {
// _logger.LogDebug($"{folder.Path} hasn't been updated since last scan. Skipping.");
// skippedFolders += 1;
// return true;
// }
//
// return false;
}
private void Cleanup()
{
_scannedSeries = null;
}
[DisableConcurrentExecution(timeoutInSeconds: 360)]
public void ScanLibrary(int libraryId, bool forceUpdate)
{
_forceUpdate = forceUpdate;
var sw = Stopwatch.StartNew();
Cleanup();
Library library;
try
{
library = Task.Run(() => _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId)).Result;
library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).Result;
}
catch (Exception ex)
{
// This usually only fails if user is not authenticated.
_logger.LogError($"There was an issue fetching Library {libraryId}.", ex);
_logger.LogError(ex, "There was an issue fetching Library {LibraryId}", libraryId);
return;
}
_scannedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
_logger.LogInformation($"Beginning scan on {library.Name}. Forcing metadata update: {forceUpdate}");
_logger.LogInformation("Beginning scan on {LibraryName}. Forcing metadata update: {ForceUpdate}", library.Name, forceUpdate);
var totalFiles = 0;
var skippedFolders = 0;
foreach (var folderPath in library.Folders)
{
if (!forceUpdate && Directory.GetLastWriteTime(folderPath.Path) <= folderPath.LastScanned)
{
_logger.LogDebug($"{folderPath.Path} hasn't been updated since last scan. Skipping.");
continue;
}
if (ShouldSkipFolderScan(folderPath, ref skippedFolders)) continue;
try {
totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath.Path, (f) =>
@ -74,84 +102,204 @@ namespace API.Services
}
catch (FileNotFoundException exception)
{
_logger.LogError(exception, "The file could not be found");
_logger.LogError(exception, "The file {Filename} could not be found", f);
}
});
}, Parser.Parser.MangaFileExtensions);
}
catch (ArgumentException ex) {
_logger.LogError(ex, $"The directory '{folderPath}' does not exist");
_logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath.Path);
}
folderPath.LastScanned = DateTime.Now;
}
var scanElapsedTime = sw.ElapsedMilliseconds;
_logger.LogInformation("Folders Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles, scanElapsedTime);
sw.Restart();
if (skippedFolders == library.Folders.Count)
{
_logger.LogInformation("All Folders were skipped due to no modifications to the directories");
_unitOfWork.LibraryRepository.Update(library);
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds, library.Name);
Cleanup();
return;
}
// Remove any series where there were no parsed infos
var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0);
var series = filtered.ToImmutableDictionary(v => v.Key, v => v.Value);
var series = filtered.ToDictionary(v => v.Key, v => v.Value);
// Perform DB activities
var allSeries = UpsertSeries(libraryId, forceUpdate, series, library);
// Remove series that are no longer on disk
RemoveSeriesNotOnDisk(allSeries, series, library);
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
UpdateLibrary(library, series);
_unitOfWork.LibraryRepository.Update(library);
if (Task.Run(() => _unitOfWork.Complete()).Result)
{
_logger.LogInformation($"Scan completed on {library.Name}. Parsed {series.Keys.Count()} series.");
_logger.LogInformation("Scan completed on {LibraryName}. Parsed {ParsedSeriesCount} series in {ElapsedScanTime} ms", library.Name, series.Keys.Count, sw.ElapsedMilliseconds);
}
else
{
_logger.LogError("There was a critical error that resulted in a failed scan. Please rescan.");
_logger.LogError("There was a critical error that resulted in a failed scan. Please check logs and rescan");
}
_scannedSeries = null;
_logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name);
}
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
}
private List<Series> UpsertSeries(int libraryId, bool forceUpdate, ImmutableDictionary<string, List<ParserInfo>> series, Library library)
private void UpdateLibrary(Library library, Dictionary<string, List<ParserInfo>> parsedSeries)
{
var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList();
foreach (var seriesKey in series.Keys)
// TODO: Split this into multiple threads
// First, remove any series that are not in parsedSeries list
var foundSeries = parsedSeries.Select(s => Parser.Parser.Normalize(s.Key)).ToList();
var missingSeries = library.Series.Where(existingSeries =>
!foundSeries.Contains(existingSeries.NormalizedName) || !parsedSeries.ContainsKey(existingSeries.Name) ||
!parsedSeries.ContainsKey(existingSeries.OriginalName));
var removeCount = 0;
foreach (var existingSeries in missingSeries)
{
var mangaSeries = allSeries.SingleOrDefault(s => s.Name == seriesKey) ?? new Series
library.Series?.Remove(existingSeries);
removeCount += 1;
}
_logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount);
// Add new series that have parsedInfos
foreach (var info in parsedSeries)
{
var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(info.Key));
if (existingSeries == null)
{
Name = seriesKey,
OriginalName = seriesKey,
SortName = seriesKey,
Summary = ""
};
existingSeries = new Series()
{
Name = info.Key,
OriginalName = info.Key,
NormalizedName = Parser.Parser.Normalize(info.Key),
SortName = info.Key,
Summary = "",
Volumes = new List<Volume>()
};
library.Series.Add(existingSeries);
}
existingSeries.NormalizedName = Parser.Parser.Normalize(info.Key);
}
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
foreach (var existingSeries in library.Series)
{
_logger.LogInformation("Processing series {SeriesName}", existingSeries.Name);
UpdateVolumes(existingSeries, parsedSeries[existingSeries.Name].ToArray());
existingSeries.Pages = existingSeries.Volumes.Sum(v => v.Pages);
_metadataService.UpdateMetadata(existingSeries, _forceUpdate);
}
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
}
private void UpdateVolumes(Series series, ParserInfo[] parsedInfos)
{
var startingVolumeCount = series.Volumes.Count;
// Add new volumes and update chapters per volume
var distinctVolumes = parsedInfos.Select(p => p.Volumes).Distinct().ToList();
_logger.LogDebug("Updating {DistinctVolumes} volumes", distinctVolumes.Count);
foreach (var volumeNumber in distinctVolumes)
{
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
if (volume == null)
{
volume = new Volume()
{
Name = volumeNumber,
Number = (int) Parser.Parser.MinimumNumberFromRange(volumeNumber),
IsSpecial = false,
Chapters = new List<Chapter>()
};
series.Volumes.Add(volume);
}
volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0");
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
UpdateChapters(volume, infos);
volume.Pages = volume.Chapters.Sum(c => c.Pages);
_metadataService.UpdateMetadata(volume, _forceUpdate);
}
// Remove existing volumes that aren't in parsedInfos and volumes that have no chapters
var existingVolumes = series.Volumes.ToList();
foreach (var volume in existingVolumes)
{
// I can't remove based on chapter count as I haven't updated Chapters || volume.Chapters.Count == 0
var hasInfo = parsedInfos.Any(v => v.Volumes == volume.Name);
if (!hasInfo)
{
series.Volumes.Remove(volume);
}
}
_logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
series.Name, startingVolumeCount, series.Volumes.Count);
}
private void UpdateChapters(Volume volume, ParserInfo[] parsedInfos)
{
var startingChapters = volume.Chapters.Count;
// Add new chapters
foreach (var info in parsedInfos)
{
var chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters);
if (chapter == null)
{
chapter = new Chapter()
{
Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "",
Range = info.Chapters,
Files = new List<MangaFile>()
};
volume.Chapters.Add(chapter);
}
chapter.Files = new List<MangaFile>();
}
// Add files
foreach (var info in parsedInfos)
{
Chapter chapter = null;
try
{
mangaSeries = UpdateSeries(mangaSeries, series[seriesKey].ToArray(), forceUpdate);
_logger.LogInformation($"Created/Updated series {mangaSeries.Name} for {library.Name} library");
library.Series ??= new List<Series>();
library.Series.Add(mangaSeries);
chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters);
}
catch (Exception ex)
{
_logger.LogError(ex, $"There was an error during scanning of library. {seriesKey} will be skipped.");
_logger.LogError(ex, "There was an exception parsing chapter. Skipping Vol {VolumeNume} Chapter {ChapterNumber}", volume.Name, info.Chapters);
}
if (chapter == null) continue;
// I need to reset Files for the first time, hence this work should be done in a separate loop
AddOrUpdateFileForChapter(chapter, info);
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
chapter.Range = info.Chapters;
chapter.Pages = chapter.Files.Sum(f => f.NumberOfPages);
_metadataService.UpdateMetadata(chapter, _forceUpdate);
}
return allSeries;
}
private void RemoveSeriesNotOnDisk(List<Series> allSeries, ImmutableDictionary<string, List<ParserInfo>> series, Library library)
{
var count = 0;
foreach (var existingSeries in allSeries)
// Remove chapters that aren't in parsedInfos or have no files linked
var existingChapters = volume.Chapters.ToList();
foreach (var existingChapter in existingChapters)
{
if (!series.ContainsKey(existingSeries.Name) || !series.ContainsKey(existingSeries.OriginalName))
var hasInfo = parsedInfos.Any(v => v.Chapters == existingChapter.Range);
if (!hasInfo || !existingChapter.Files.Any())
{
// Delete series, there is no file to backup any longer.
library.Series?.Remove(existingSeries);
count++;
volume.Chapters.Remove(existingChapter);
}
}
_logger.LogInformation($"Removed {count} series that are no longer on disk");
}
_logger.LogDebug("Updated chapters from {StartingChaptersCount} to {ChapterCount}",
startingChapters, volume.Chapters.Count);
}
/// <summary>
/// Attempts to either add a new instance of a show mapping to the scannedSeries bag or adds to an existing.
@ -185,159 +333,43 @@ namespace API.Services
if (info == null)
{
_logger.LogInformation($"Could not parse series from {path}");
_logger.LogWarning("Could not parse series from {Path}", path);
return;
}
TrackSeries(info);
}
private Series UpdateSeries(Series series, ParserInfo[] infos, bool forceUpdate)
{
var volumes = UpdateVolumesWithChapters(series, infos, forceUpdate);
series.Volumes = volumes;
series.Pages = volumes.Sum(v => v.Pages);
if (ShouldFindCoverImage(forceUpdate, series.CoverImage))
{
var firstCover = volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0);
if (firstCover == null && volumes.Any())
{
firstCover = volumes.FirstOrDefault(x => x.Number == 0);
}
series.CoverImage = firstCover?.CoverImage;
}
if (string.IsNullOrEmpty(series.Summary) || forceUpdate)
{
series.Summary = "";
}
return series;
}
private MangaFile CreateMangaFile(ParserInfo info)
{
_logger.LogDebug($"Creating File Entry for {info.FullFilePath}");
return new MangaFile()
{
FilePath = info.FullFilePath,
Format = info.Format,
NumberOfPages = info.Format == MangaFormat.Archive ? _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath): 1
NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath)
};
}
private bool ShouldFindCoverImage(bool forceUpdate, byte[] coverImage)
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info)
{
return forceUpdate || coverImage == null || !coverImage.Any();
}
/// <summary>
///
/// </summary>
/// <param name="volume"></param>
/// <param name="infos"></param>
/// <param name="forceUpdate"></param>
/// <returns></returns>
private ICollection<Chapter> UpdateChapters(Volume volume, IEnumerable<ParserInfo> infos, bool forceUpdate)
{
var chapters = new List<Chapter>();
foreach (var info in infos)
chapter.Files ??= new List<MangaFile>();
var existingFile = chapter.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
if (existingFile != null)
{
volume.Chapters ??= new List<Chapter>();
var chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters) ??
chapters.SingleOrDefault(v => v.Range == info.Chapters) ??
new Chapter()
{
Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "",
Range = info.Chapters,
};
chapter.Files ??= new List<MangaFile>();
var existingFile = chapter.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
if (existingFile != null)
existingFile.Format = info.Format;
existingFile.NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
}
else
{
if (info.Format == MangaFormat.Archive)
{
existingFile.Format = info.Format;
existingFile.NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
chapter.Files.Add(CreateMangaFile(info));
}
else
{
if (info.Format == MangaFormat.Archive)
{
chapter.Files.Add(CreateMangaFile(info));
}
else
{
_logger.LogDebug($"Ignoring {info.Filename} as it is not an archive.");
}
}
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
chapter.Range = info.Chapters;
chapters.Add(chapter);
}
foreach (var chapter in chapters)
{
chapter.Pages = chapter.Files.Sum(f => f.NumberOfPages);
if (ShouldFindCoverImage(forceUpdate, chapter.CoverImage))
{
chapter.Files ??= new List<MangaFile>();
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (firstFile != null) chapter.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
_logger.LogDebug("Ignoring {Filename}. Non-archives are not supported", info.Filename);
}
}
return chapters;
}
private ICollection<Volume> UpdateVolumesWithChapters(Series series, ParserInfo[] infos, bool forceUpdate)
{
ICollection<Volume> volumes = new List<Volume>();
IList<Volume> existingVolumes = _unitOfWork.SeriesRepository.GetVolumes(series.Id).ToList();
foreach (var info in infos)
{
var volume = (existingVolumes.SingleOrDefault(v => v.Name == info.Volumes) ??
volumes.SingleOrDefault(v => v.Name == info.Volumes)) ?? new Volume
{
Name = info.Volumes,
Number = Parser.Parser.MinimumNumberFromRange(info.Volumes),
};
var chapters = UpdateChapters(volume, infos.Where(pi => pi.Volumes == volume.Name).ToArray(), forceUpdate);
volume.Chapters = chapters;
volume.Pages = chapters.Sum(c => c.Pages);
volumes.Add(volume);
}
foreach (var volume in volumes)
{
if (ShouldFindCoverImage(forceUpdate, volume.CoverImage))
{
// TODO: Create a custom sorter for Chapters so it's consistent across the application
var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault();
var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
}
}
return volumes;
}
public void ScanSeries(int libraryId, int seriesId)
{
throw new NotImplementedException();
}
}
}

View File

@ -2,6 +2,7 @@
using API.Entities.Enums;
using API.Helpers.Converters;
using API.Interfaces;
using API.Interfaces.Services;
using Hangfire;
using Microsoft.Extensions.Logging;
@ -12,13 +13,20 @@ namespace API.Services
private readonly ICacheService _cacheService;
private readonly ILogger<TaskScheduler> _logger;
private readonly IScannerService _scannerService;
public BackgroundJobServer Client => new BackgroundJobServer();
private readonly IMetadataService _metadataService;
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService, IUnitOfWork unitOfWork)
public BackgroundJobServer Client => new BackgroundJobServer(new BackgroundJobServerOptions()
{
WorkerCount = 1
});
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
IUnitOfWork unitOfWork, IMetadataService metadataService)
{
_cacheService = cacheService;
_logger = logger;
_scannerService = scannerService;
_metadataService = metadataService;
_logger.LogInformation("Scheduling/Updating cache cleanup on a daily basis.");
var setting = Task.Run(() => unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result;
@ -36,12 +44,6 @@ namespace API.Services
}
public void ScanSeries(int libraryId, int seriesId)
{
_logger.LogInformation($"Enqueuing series scan for series: {seriesId}");
BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId));
}
public void ScanLibrary(int libraryId, bool forceUpdate = false)
{
_logger.LogInformation($"Enqueuing library scan for: {libraryId}");
@ -54,5 +56,17 @@ namespace API.Services
}
public void RefreshMetadata(int libraryId, bool forceUpdate = true)
{
_logger.LogInformation($"Enqueuing library metadata refresh for: {libraryId}");
BackgroundJob.Enqueue((() => _metadataService.RefreshMetadata(libraryId, forceUpdate)));
}
public void ScanLibraryInternal(int libraryId, bool forceUpdate)
{
_scannerService.ScanLibrary(libraryId, forceUpdate);
_metadataService.RefreshMetadata(libraryId, forceUpdate);
}
}
}