Merge pull request #90 from Kareadita/feature/compress-fallback

Compression Fallback
This commit is contained in:
Joseph Milazzo 2021-03-23 15:43:13 -05:00 committed by GitHub
commit f77b0ec552
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 1241 additions and 443 deletions

View File

@ -50,6 +50,8 @@ namespace API.Tests
[InlineData("VanDread-v01-c001[MD].zip", "1")] [InlineData("VanDread-v01-c001[MD].zip", "1")]
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")] [InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")]
[InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")] [InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")]
[InlineData("Kodomo no Jikan vol. 1.cbz", "1")]
[InlineData("Kodomo no Jikan vol. 10.cbz", "10")]
public void ParseVolumeTest(string filename, string expected) public void ParseVolumeTest(string filename, string expected)
{ {
Assert.Equal(expected, ParseVolume(filename)); Assert.Equal(expected, ParseVolume(filename));
@ -178,9 +180,9 @@ namespace API.Tests
[Theory] [Theory]
[InlineData("test.cbz", true)] [InlineData("test.cbz", true)]
[InlineData("test.cbr", false)] [InlineData("test.cbr", true)]
[InlineData("test.zip", true)] [InlineData("test.zip", true)]
[InlineData("test.rar", false)] [InlineData("test.rar", true)]
[InlineData("test.rar.!qb", false)] [InlineData("test.rar.!qb", false)]
[InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.jpg", false)] [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.jpg", false)]
public void IsArchiveTest(string input, bool expected) public void IsArchiveTest(string input, bool expected)

View File

@ -1,20 +1,25 @@
using System.IO; using System.Diagnostics;
using System.IO;
using System.IO.Compression; using System.IO.Compression;
using API.Archive;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services; using API.Services;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NSubstitute; using NSubstitute;
using Xunit; using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services namespace API.Tests.Services
{ {
public class ArchiveServiceTests public class ArchiveServiceTests
{ {
private readonly ITestOutputHelper _testOutputHelper;
private readonly IArchiveService _archiveService; private readonly IArchiveService _archiveService;
private readonly ILogger<ArchiveService> _logger = Substitute.For<ILogger<ArchiveService>>(); private readonly ILogger<ArchiveService> _logger = Substitute.For<ILogger<ArchiveService>>();
public ArchiveServiceTests() public ArchiveServiceTests(ITestOutputHelper testOutputHelper)
{ {
_testOutputHelper = testOutputHelper;
_archiveService = new ArchiveService(_logger); _archiveService = new ArchiveService(_logger);
} }
@ -33,8 +38,8 @@ namespace API.Tests.Services
[Theory] [Theory]
[InlineData("non existent file.zip", false)] [InlineData("non existent file.zip", false)]
[InlineData("wrong extension.rar", false)] [InlineData("winrar.rar", true)]
[InlineData("empty.zip", false)] [InlineData("empty.zip", true)]
[InlineData("flat file.zip", true)] [InlineData("flat file.zip", true)]
[InlineData("file in folder in folder.zip", true)] [InlineData("file in folder in folder.zip", true)]
[InlineData("file in folder.zip", true)] [InlineData("file in folder.zip", true)]
@ -47,7 +52,7 @@ namespace API.Tests.Services
[Theory] [Theory]
[InlineData("non existent file.zip", 0)] [InlineData("non existent file.zip", 0)]
[InlineData("wrong extension.rar", 0)] [InlineData("winrar.rar", 0)]
[InlineData("empty.zip", 0)] [InlineData("empty.zip", 0)]
[InlineData("flat file.zip", 1)] [InlineData("flat file.zip", 1)]
[InlineData("file in folder in folder.zip", 1)] [InlineData("file in folder in folder.zip", 1)]
@ -56,18 +61,89 @@ namespace API.Tests.Services
public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected) public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected)
{ {
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
var sw = Stopwatch.StartNew();
Assert.Equal(expected, _archiveService.GetNumberOfPagesFromArchive(Path.Join(testDirectory, archivePath))); Assert.Equal(expected, _archiveService.GetNumberOfPagesFromArchive(Path.Join(testDirectory, archivePath)));
_testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms");
} }
[Theory]
[InlineData("non existent file.zip", ArchiveLibrary.NotSupported)]
[InlineData("winrar.rar", ArchiveLibrary.SharpCompress)]
[InlineData("empty.zip", ArchiveLibrary.Default)]
[InlineData("flat file.zip", ArchiveLibrary.Default)]
[InlineData("file in folder in folder.zip", ArchiveLibrary.Default)]
[InlineData("file in folder.zip", ArchiveLibrary.Default)]
[InlineData("file in folder_alt.zip", ArchiveLibrary.Default)]
public void CanOpenArchive(string archivePath, ArchiveLibrary expected)
{
var sw = Stopwatch.StartNew();
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
Assert.Equal(expected, _archiveService.CanOpen(Path.Join(testDirectory, archivePath)));
_testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms");
}
[Theory]
[InlineData("non existent file.zip", 0)]
[InlineData("winrar.rar", 0)]
[InlineData("empty.zip", 0)]
[InlineData("flat file.zip", 1)]
[InlineData("file in folder in folder.zip", 1)]
[InlineData("file in folder.zip", 1)]
[InlineData("file in folder_alt.zip", 1)]
public void CanExtractArchive(string archivePath, int expectedFileCount)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
var extractDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives/Extraction");
DirectoryService.ClearAndDeleteDirectory(extractDirectory);
Stopwatch sw = Stopwatch.StartNew();
_archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory);
var di1 = new DirectoryInfo(extractDirectory);
Assert.Equal(expectedFileCount, di1.Exists ? di1.GetFiles().Length : 0);
_testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms");
DirectoryService.ClearAndDeleteDirectory(extractDirectory);
}
[Theory] [Theory]
[InlineData("v10.cbz", "v10.expected.jpg")] [InlineData("v10.cbz", "v10.expected.jpg")]
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")] [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")]
[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")] [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")]
//[InlineData("png.zip", "png.PNG")]
public void GetCoverImageTest(string inputFile, string expectedOutputFile) public void GetCoverImageTest(string inputFile, string expectedOutputFile)
{ {
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages");
var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile));
Stopwatch sw = Stopwatch.StartNew();
Assert.Equal(expectedBytes, _archiveService.GetCoverImage(Path.Join(testDirectory, inputFile))); Assert.Equal(expectedBytes, _archiveService.GetCoverImage(Path.Join(testDirectory, inputFile)));
_testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms");
}
[Theory]
[InlineData("06_v01[DMM].zip")]
public void CanParseCoverImage(string inputFile)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
Assert.NotEmpty(_archiveService.GetCoverImage(Path.Join(testDirectory, inputFile)));
}
[Fact]
public void ShouldHaveComicInfo()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
var archive = Path.Join(testDirectory, "file in folder.zip");
var summaryInfo = "By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?";
Assert.Equal(summaryInfo, _archiveService.GetSummaryInfo(archive));
} }
} }
} }

View File

@ -1,15 +1,4 @@
using System.Collections.Generic; namespace API.Tests.Services
using System.IO;
using API.Data;
using API.Entities;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services
{ {
public class CacheServiceTests public class CacheServiceTests
{ {
@ -70,7 +59,7 @@ namespace API.Tests.Services
// // Chapter = 0, // // Chapter = 0,
// // FilePath = archivePath, // // FilePath = archivePath,
// // Format = MangaFormat.Archive, // // Format = MangaFormat.Archive,
// // NumberOfPages = 1, // // Pages = 1,
// // } // // }
// // }, // // },
// // Name = "1", // // Name = "1",

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

View File

@ -28,6 +28,7 @@
<PackageReference Include="NetVips" Version="1.2.4" /> <PackageReference Include="NetVips" Version="1.2.4" />
<PackageReference Include="NetVips.Native" Version="8.10.5.1" /> <PackageReference Include="NetVips.Native" Version="8.10.5.1" />
<PackageReference Include="NReco.Logging.File" Version="1.1.1" /> <PackageReference Include="NReco.Logging.File" Version="1.1.1" />
<PackageReference Include="SharpCompress" Version="0.28.1" />
<PackageReference Include="SonarAnalyzer.CSharp" Version="8.16.0.25740"> <PackageReference Include="SonarAnalyzer.CSharp" Version="8.16.0.25740">
<PrivateAssets>all</PrivateAssets> <PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

View File

@ -0,0 +1,12 @@
namespace API.Archive
{
/// <summary>
/// Represents which library should handle opening this library
/// </summary>
public enum ArchiveLibrary
{
NotSupported = 0,
SharpCompress = 1,
Default = 2
}
}

View File

@ -10,7 +10,6 @@ using API.Extensions;
using API.Interfaces; using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using AutoMapper; using AutoMapper;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;

View File

@ -6,6 +6,7 @@ namespace API.Controllers
{ {
public class FallbackController : Controller public class FallbackController : Controller
{ {
// ReSharper disable once S4487
private readonly ITaskScheduler _taskScheduler; private readonly ITaskScheduler _taskScheduler;
public FallbackController(ITaskScheduler taskScheduler) public FallbackController(ITaskScheduler taskScheduler)

View File

@ -1,28 +1,16 @@
using System.IO; using System.Threading.Tasks;
using System.Linq;
using System.Threading.Tasks;
using API.DTOs;
using API.Extensions; using API.Extensions;
using API.Interfaces; using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
namespace API.Controllers namespace API.Controllers
{ {
public class ImageController : BaseApiController public class ImageController : BaseApiController
{ {
private readonly IDirectoryService _directoryService;
private readonly ICacheService _cacheService;
private readonly ILogger<ImageController> _logger;
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
public ImageController(IDirectoryService directoryService, ICacheService cacheService, public ImageController(IUnitOfWork unitOfWork)
ILogger<ImageController> logger, IUnitOfWork unitOfWork)
{ {
_directoryService = directoryService;
_cacheService = cacheService;
_logger = logger;
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
} }

View File

@ -147,7 +147,6 @@ namespace API.Controllers
[HttpPost("scan")] [HttpPost("scan")]
public ActionResult Scan(int libraryId) public ActionResult Scan(int libraryId)
{ {
// TODO: We shouldn't queue up a job if one is already in progress
_taskScheduler.ScanLibrary(libraryId); _taskScheduler.ScanLibrary(libraryId);
return Ok(); return Ok();
} }

View File

@ -35,7 +35,7 @@ namespace API.Controllers
var chapter = await _cacheService.Ensure(chapterId); var chapter = await _cacheService.Ensure(chapterId);
if (chapter == null) return BadRequest("There was an issue finding image file for reading"); if (chapter == null) return BadRequest("There was an issue finding image file for reading");
var (path, mangaFile) = await _cacheService.GetCachedPagePath(chapter, page); var (path, _) = await _cacheService.GetCachedPagePath(chapter, page);
if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}"); if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}");
var content = await _directoryService.ReadFileAsync(path); var content = await _directoryService.ReadFileAsync(path);
@ -53,7 +53,7 @@ namespace API.Controllers
var chapter = await _cacheService.Ensure(chapterId); var chapter = await _cacheService.Ensure(chapterId);
if (chapter == null) return BadRequest("There was an issue finding image file for reading"); if (chapter == null) return BadRequest("There was an issue finding image file for reading");
var (path, mangaFile) = await _cacheService.GetCachedPagePath(chapter, 0); var (_, mangaFile) = await _cacheService.GetCachedPagePath(chapter, 0);
return Ok(mangaFile.FilePath); return Ok(mangaFile.FilePath);
} }

View File

@ -105,9 +105,9 @@ namespace API.Controllers
if (series == null) return BadRequest("Series does not exist"); if (series == null) return BadRequest("Series does not exist");
// TODO: check if new name isn't an existing series // TODO: Ensure we check against Library for Series Name change
var existingSeries = await _unitOfWork.SeriesRepository.GetSeriesByNameAsync(updateSeries.Name); // NOTE: This isnt checking library var existingSeries = await _unitOfWork.SeriesRepository.GetSeriesByNameAsync(updateSeries.Name);
if (existingSeries != null && existingSeries.Id != series.Id) if (existingSeries != null && existingSeries.Id != series.Id )
{ {
return BadRequest("A series already exists with this name. Name must be unique."); return BadRequest("A series already exists with this name. Name must be unique.");
} }
@ -115,8 +115,7 @@ namespace API.Controllers
series.LocalizedName = updateSeries.LocalizedName; series.LocalizedName = updateSeries.LocalizedName;
series.SortName = updateSeries.SortName; series.SortName = updateSeries.SortName;
series.Summary = updateSeries.Summary; series.Summary = updateSeries.Summary;
//series.CoverImage = updateSeries.CoverImage;
_unitOfWork.SeriesRepository.Update(series); _unitOfWork.SeriesRepository.Update(series);
if (await _unitOfWork.Complete()) if (await _unitOfWork.Complete())
@ -139,16 +138,5 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
return Ok(await _unitOfWork.SeriesRepository.GetInProgress(user.Id, libraryId, limit)); return Ok(await _unitOfWork.SeriesRepository.GetInProgress(user.Id, libraryId, limit));
} }
[HttpGet("continue-reading")]
public async Task<ActionResult<IEnumerable<SeriesDto>>> GetContinueReading(int libraryId = 0, int limit = 20)
{
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
return Ok(await _unitOfWork.VolumeRepository.GetContinueReading(user.Id, libraryId, limit));
}
} }
} }

View File

@ -5,6 +5,7 @@ using System.Threading.Tasks;
using API.Extensions; using API.Extensions;
using API.Interfaces; using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
@ -21,17 +22,15 @@ namespace API.Controllers
private readonly IConfiguration _config; private readonly IConfiguration _config;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly IBackupService _backupService; private readonly IBackupService _backupService;
private readonly ITaskScheduler _taskScheduler;
public ServerController(IHostApplicationLifetime applicationLifetime, ILogger<ServerController> logger, IConfiguration config, public ServerController(IHostApplicationLifetime applicationLifetime, ILogger<ServerController> logger, IConfiguration config,
IDirectoryService directoryService, IBackupService backupService, ITaskScheduler taskScheduler) IDirectoryService directoryService, IBackupService backupService)
{ {
_applicationLifetime = applicationLifetime; _applicationLifetime = applicationLifetime;
_logger = logger; _logger = logger;
_config = config; _config = config;
_directoryService = directoryService; _directoryService = directoryService;
_backupService = backupService; _backupService = backupService;
_taskScheduler = taskScheduler;
} }
[HttpPost("restart")] [HttpPost("restart")]
@ -52,7 +51,7 @@ namespace API.Controllers
var dateString = DateTime.Now.ToShortDateString().Replace("/", "_"); var dateString = DateTime.Now.ToShortDateString().Replace("/", "_");
var tempLocation = Path.Join(tempDirectory, "logs_" + dateString); var tempLocation = Path.Join(tempDirectory, "logs_" + dateString);
_directoryService.ExistOrCreate(tempLocation); DirectoryService.ExistOrCreate(tempLocation);
if (!_directoryService.CopyFilesToDirectory(files, tempLocation)) if (!_directoryService.CopyFilesToDirectory(files, tempLocation))
{ {
return BadRequest("Unable to copy files to temp directory for log download."); return BadRequest("Unable to copy files to temp directory for log download.");
@ -70,7 +69,7 @@ namespace API.Controllers
} }
var fileBytes = await _directoryService.ReadFileAsync(zipPath); var fileBytes = await _directoryService.ReadFileAsync(zipPath);
_directoryService.ClearAndDeleteDirectory(tempLocation); DirectoryService.ClearAndDeleteDirectory(tempLocation);
(new FileInfo(zipPath)).Delete(); (new FileInfo(zipPath)).Delete();
return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); return File(fileBytes, "application/zip", Path.GetFileName(zipPath));

View File

@ -10,7 +10,6 @@ using API.Helpers.Converters;
using API.Interfaces; using API.Interfaces;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore.Internal;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
@ -110,7 +109,7 @@ namespace API.Controllers
[HttpGet("log-levels")] [HttpGet("log-levels")]
public ActionResult<IEnumerable<string>> GetLogLevels() public ActionResult<IEnumerable<string>> GetLogLevels()
{ {
return Ok(new string[] {"Trace", "Debug", "Information", "Warning", "Critical", "None"}); return Ok(new [] {"Trace", "Debug", "Information", "Warning", "Critical", "None"});
} }
} }
} }

View File

@ -1,4 +1,4 @@
namespace API.Data namespace API.DTOs
{ {
public class BookmarkDto public class BookmarkDto
{ {

View File

@ -5,7 +5,7 @@ namespace API.DTOs
public class MangaFileDto public class MangaFileDto
{ {
public string FilePath { get; init; } public string FilePath { get; init; }
public int NumberOfPages { get; init; } public int Pages { get; init; }
public MangaFormat Format { get; init; } public MangaFormat Format { get; init; }
} }

View File

@ -1,5 +1,4 @@
using System; using System;
using System.Collections;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
using System.Linq; using System.Linq;

View File

@ -0,0 +1,733 @@
// <auto-generated />
using System;
using API.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace API.Data.Migrations
{
[DbContext(typeof(DataContext))]
[Migration("20210322212724_MangaFileToPages")]
partial class MangaFileToPages
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.1");
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedName")
.IsUnique()
.HasDatabaseName("RoleNameIndex");
b.ToTable("AspNetRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AccessFailedCount")
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Email")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<bool>("EmailConfirmed")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastActive")
.HasColumnType("TEXT");
b.Property<bool>("LockoutEnabled")
.HasColumnType("INTEGER");
b.Property<DateTimeOffset?>("LockoutEnd")
.HasColumnType("TEXT");
b.Property<string>("NormalizedEmail")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedUserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("PasswordHash")
.HasColumnType("TEXT");
b.Property<string>("PhoneNumber")
.HasColumnType("TEXT");
b.Property<bool>("PhoneNumberConfirmed")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("SecurityStamp")
.HasColumnType("TEXT");
b.Property<bool>("TwoFactorEnabled")
.HasColumnType("INTEGER");
b.Property<string>("UserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedEmail")
.HasDatabaseName("EmailIndex");
b.HasIndex("NormalizedUserName")
.IsUnique()
.HasDatabaseName("UserNameIndex");
b.ToTable("AspNetUsers");
});
modelBuilder.Entity("API.Entities.AppUserPreferences", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<bool>("HideReadOnDetails")
.HasColumnType("INTEGER");
b.Property<int>("PageSplitOption")
.HasColumnType("INTEGER");
b.Property<int>("ReadingDirection")
.HasColumnType("INTEGER");
b.Property<int>("ScalingOption")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId")
.IsUnique();
b.ToTable("AppUserPreferences");
});
modelBuilder.Entity("API.Entities.AppUserProgress", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("PagesRead")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId");
b.ToTable("AppUserProgresses");
});
modelBuilder.Entity("API.Entities.AppUserRating", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<int>("Rating")
.HasColumnType("INTEGER");
b.Property<string>("Review")
.HasColumnType("TEXT");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId");
b.ToTable("AppUserRating");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("UserId", "RoleId");
b.HasIndex("RoleId");
b.ToTable("AspNetUserRoles");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Number")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<string>("Range")
.HasColumnType("TEXT");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("VolumeId");
b.ToTable("Chapter");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<DateTime>("LastScanned")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Path")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("FolderPath");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("CoverImage")
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Type")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.ToTable("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<string>("FilePath")
.HasColumnType("TEXT");
b.Property<int>("Format")
.HasColumnType("INTEGER");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("ChapterId");
b.ToTable("MangaFile");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("LocalizedName")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasColumnType("TEXT");
b.Property<string>("OriginalName")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<string>("SortName")
.HasColumnType("TEXT");
b.Property<string>("Summary")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId")
.IsUnique();
b.ToTable("Series");
});
modelBuilder.Entity("API.Entities.ServerSetting", b =>
{
b.Property<int>("Key")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("Key");
b.ToTable("ServerSetting");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<bool>("IsSpecial")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Number")
.HasColumnType("INTEGER");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("SeriesId");
b.ToTable("Volume");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.Property<int>("AppUsersId")
.HasColumnType("INTEGER");
b.Property<int>("LibrariesId")
.HasColumnType("INTEGER");
b.HasKey("AppUsersId", "LibrariesId");
b.HasIndex("LibrariesId");
b.ToTable("AppUserLibrary");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("RoleId");
b.ToTable("AspNetRoleClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("UserId");
b.ToTable("AspNetUserClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("ProviderKey")
.HasColumnType("TEXT");
b.Property<string>("ProviderDisplayName")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("LoginProvider", "ProviderKey");
b.HasIndex("UserId");
b.ToTable("AspNetUserLogins");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("UserId", "LoginProvider", "Name");
b.ToTable("AspNetUserTokens");
});
modelBuilder.Entity("API.Entities.AppUserPreferences", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithOne("UserPreferences")
.HasForeignKey("API.Entities.AppUserPreferences", "AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserProgress", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithMany("Progresses")
.HasForeignKey("AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserRating", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithMany("Ratings")
.HasForeignKey("AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.HasOne("API.Entities.AppRole", "Role")
.WithMany("UserRoles")
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.AppUser", "User")
.WithMany("UserRoles")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Role");
b.Navigation("User");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.HasOne("API.Entities.Volume", "Volume")
.WithMany("Chapters")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Volume");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Folders")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.HasOne("API.Entities.Chapter", "Chapter")
.WithMany("Files")
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Series")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.HasOne("API.Entities.Series", "Series")
.WithMany("Volumes")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Series");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("AppUsersId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Library", null)
.WithMany()
.HasForeignKey("LibrariesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.HasOne("API.Entities.AppRole", null)
.WithMany()
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Navigation("Progresses");
b.Navigation("Ratings");
b.Navigation("UserPreferences");
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.Navigation("Files");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Navigation("Folders");
b.Navigation("Series");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Navigation("Volumes");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Navigation("Chapters");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,23 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace API.Data.Migrations
{
public partial class MangaFileToPages : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.RenameColumn(
name: "NumberOfPages",
table: "MangaFile",
newName: "Pages");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.RenameColumn(
name: "Pages",
table: "MangaFile",
newName: "NumberOfPages");
}
}
}

View File

@ -318,7 +318,7 @@ namespace API.Data.Migrations
b.Property<int>("Format") b.Property<int>("Format")
.HasColumnType("INTEGER"); .HasColumnType("INTEGER");
b.Property<int>("NumberOfPages") b.Property<int>("Pages")
.HasColumnType("INTEGER"); .HasColumnType("INTEGER");
b.HasKey("Id"); b.HasKey("Id");

View File

@ -1,6 +1,4 @@
using System; using System.Collections.Generic;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
@ -12,7 +10,6 @@ using API.Interfaces;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Internal;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Data namespace API.Data
@ -207,8 +204,7 @@ namespace API.Data
.Include(s => s.Volumes) .Include(s => s.Volumes)
.ThenInclude(v => v.Chapters) .ThenInclude(v => v.Chapters)
.ToListAsync(); .ToListAsync();
// TODO: refactor this
IList<int> chapterIds = new List<int>(); IList<int> chapterIds = new List<int>();
foreach (var s in series) foreach (var s in series)
{ {
@ -306,7 +302,6 @@ namespace API.Data
/// <returns></returns> /// <returns></returns>
public async Task<IEnumerable<SeriesDto>> GetInProgress(int userId, int libraryId, int limit) public async Task<IEnumerable<SeriesDto>> GetInProgress(int userId, int libraryId, int limit)
{ {
// TODO: Idea: Put Total PagesRead and as return so that we can show a progress bar for full series read progress
var series = await _context.Series var series = await _context.Series
.Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new .Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new
{ {

View File

@ -26,7 +26,7 @@ namespace API.Data
public IUserRepository UserRepository => new UserRepository(_context, _userManager); public IUserRepository UserRepository => new UserRepository(_context, _userManager);
public ILibraryRepository LibraryRepository => new LibraryRepository(_context, _mapper); public ILibraryRepository LibraryRepository => new LibraryRepository(_context, _mapper);
public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper, _logger); public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper);
public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper); public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper);

View File

@ -1,15 +1,12 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators;
using API.DTOs; using API.DTOs;
using API.Entities; using API.Entities;
using API.Extensions;
using API.Interfaces; using API.Interfaces;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data namespace API.Data
{ {
@ -17,13 +14,11 @@ namespace API.Data
{ {
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
private readonly ILogger _logger;
public VolumeRepository(DataContext context, IMapper mapper, ILogger logger) public VolumeRepository(DataContext context, IMapper mapper)
{ {
_context = context; _context = context;
_mapper = mapper; _mapper = mapper;
_logger = logger;
} }
public void Update(Volume volume) public void Update(Volume volume)
@ -89,123 +84,5 @@ namespace API.Data
.AsNoTracking() .AsNoTracking()
.ToListAsync(); .ToListAsync();
} }
/// <summary>
/// Gets the first (ordered) volume/chapter in a series where the user has progress on it. Only completed volumes/chapters, next entity shouldn't
/// have any read progress on it.
/// </summary>
/// <param name="userId"></param>
/// <param name="libraryId"></param>
/// <param name="limit"></param>
/// <returns></returns>
public async Task<IEnumerable<InProgressChapterDto>> GetContinueReading(int userId, int libraryId, int limit)
{
/** TODO: Fix this SQL
* SELECT * FROM
(
SELECT * FROM Chapter C WHERE C.VolumeId IN (SELECT Id from Volume where SeriesId = 1912)
) C INNER JOIN AppUserProgresses AUP ON AUP.ChapterId = C.Id
INNER JOIN Series S ON AUP.SeriesId = S.Id
WHERE AUP.AppUserId = 1 AND AUP.PagesRead < C.Pages
*/
_logger.LogInformation("Get Continue Reading");
var volumeQuery = _context.Volume
.Join(_context.AppUserProgresses, v => v.Id, aup => aup.VolumeId, (volume, progress) => new
{
volume,
progress
})
.Where(arg => arg.volume.SeriesId == arg.progress.SeriesId && arg.progress.AppUserId == userId)
.AsNoTracking()
.Select(arg => new
{
VolumeId = arg.volume.Id,
VolumeNumber = arg.volume.Number
}); // I think doing a join on this would be better
var volumeIds = (await volumeQuery.ToListAsync()).Select(s => s.VolumeId);
var chapters2 = await _context.Chapter.Where(c => volumeIds.Contains(c.VolumeId))
.Join(_context.AppUserProgresses, chapter => chapter.Id, aup => aup.ChapterId, (chapter, progress) =>
new
{
chapter,
progress
})
.Join(_context.Series, arg => arg.progress.SeriesId, s => s.Id, (arg, series) => new
{
Chapter = arg.chapter,
Progress = arg.progress,
Series = series
})
.Where(o => o.Progress.AppUserId == userId && o.Progress.PagesRead < o.Series.Pages)
.Select(arg => new
{
Chapter = arg.Chapter,
Progress = arg.Progress,
SeriesId = arg.Series.Id,
SeriesName = arg.Series.Name,
LibraryId = arg.Series.LibraryId,
TotalPages = arg.Series.Pages
})
.OrderByDescending(d => d.Progress.LastModified)
.Take(limit)
.ToListAsync();
return chapters2
.OrderBy(c => float.Parse(c.Chapter.Number), new ChapterSortComparer())
.DistinctBy(p => p.SeriesId)
.Select(arg => new InProgressChapterDto()
{
Id = arg.Chapter.Id,
Number = arg.Chapter.Number,
Range = arg.Chapter.Range,
SeriesId = arg.Progress.SeriesId,
SeriesName = arg.SeriesName,
LibraryId = arg.LibraryId,
Pages = arg.Chapter.Pages,
VolumeId = arg.Chapter.VolumeId
});
// var chapters = await _context.Chapter
// .Join(_context.AppUserProgresses, c => c.Id, p => p.ChapterId,
// (chapter, progress) =>
// new
// {
// Chapter = chapter,
// Progress = progress
// })
// .Join(_context.Series, arg => arg.Progress.SeriesId, series => series.Id, (arg, series) =>
// new
// {
// arg.Chapter,
// arg.Progress,
// Series = series,
// VolumeIds = _context.Volume.Where(v => v.SeriesId == series.Id).Select(s => s.Id).ToList()
// })
// .AsNoTracking()
// .Where(arg => arg.Progress.AppUserId == userId
// && arg.Progress.PagesRead < arg.Chapter.Pages
// && arg.VolumeIds.Contains(arg.Progress.VolumeId))
// .OrderByDescending(d => d.Progress.LastModified)
// .Take(limit)
// .ToListAsync();
// return chapters
// .OrderBy(c => float.Parse(c.Chapter.Number), new ChapterSortComparer())
// .DistinctBy(p => p.Series.Id)
// .Select(arg => new InProgressChapterDto()
// {
// Id = arg.Chapter.Id,
// Number = arg.Chapter.Number,
// Range = arg.Chapter.Range,
// SeriesId = arg.Progress.SeriesId,
// SeriesName = arg.Series.Name,
// LibraryId = arg.Series.LibraryId,
// Pages = arg.Chapter.Pages,
// });
}
} }
} }

View File

@ -13,7 +13,7 @@ namespace API.Entities
/// <summary> /// <summary>
/// Number of pages for the given file /// Number of pages for the given file
/// </summary> /// </summary>
public int NumberOfPages { get; set; } // TODO: Refactor this to Pages public int Pages { get; set; }
public MangaFormat Format { get; set; } public MangaFormat Format { get; set; }
// Relationship Mapping // Relationship Mapping

View File

@ -32,10 +32,7 @@ namespace API.Extensions
services.AddDbContext<DataContext>(options => services.AddDbContext<DataContext>(options =>
{ {
options.UseSqlite(config.GetConnectionString("DefaultConnection"), builder => options.UseSqlite(config.GetConnectionString("DefaultConnection"));
{
//builder.UseQuerySplittingBehavior(QuerySplittingBehavior.SplitQuery);
});
}); });
services.AddLogging(loggingBuilder => services.AddLogging(loggingBuilder =>

View File

@ -1,73 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
namespace API.Extensions
{
public static class LeftJoinExtensions
{
public static IQueryable<TResult> LeftJoin<TOuter, TInner, TKey, TResult>(
this IQueryable<TOuter> outer,
IQueryable<TInner> inner,
Expression<Func<TOuter, TKey>> outerKeySelector,
Expression<Func<TInner, TKey>> innerKeySelector,
Expression<Func<TOuter, TInner, TResult>> resultSelector)
{
MethodInfo groupJoin = typeof (Queryable).GetMethods()
.Single(m => m.ToString() == "System.Linq.IQueryable`1[TResult] GroupJoin[TOuter,TInner,TKey,TResult](System.Linq.IQueryable`1[TOuter], System.Collections.Generic.IEnumerable`1[TInner], System.Linq.Expressions.Expression`1[System.Func`2[TOuter,TKey]], System.Linq.Expressions.Expression`1[System.Func`2[TInner,TKey]], System.Linq.Expressions.Expression`1[System.Func`3[TOuter,System.Collections.Generic.IEnumerable`1[TInner],TResult]])")
.MakeGenericMethod(typeof (TOuter), typeof (TInner), typeof (TKey), typeof (LeftJoinIntermediate<TOuter, TInner>));
MethodInfo selectMany = typeof (Queryable).GetMethods()
.Single(m => m.ToString() == "System.Linq.IQueryable`1[TResult] SelectMany[TSource,TCollection,TResult](System.Linq.IQueryable`1[TSource], System.Linq.Expressions.Expression`1[System.Func`2[TSource,System.Collections.Generic.IEnumerable`1[TCollection]]], System.Linq.Expressions.Expression`1[System.Func`3[TSource,TCollection,TResult]])")
.MakeGenericMethod(typeof (LeftJoinIntermediate<TOuter, TInner>), typeof (TInner), typeof (TResult));
var groupJoinResultSelector = (Expression<Func<TOuter, IEnumerable<TInner>, LeftJoinIntermediate<TOuter, TInner>>>)
((oneOuter, manyInners) => new LeftJoinIntermediate<TOuter, TInner> {OneOuter = oneOuter, ManyInners = manyInners});
MethodCallExpression exprGroupJoin = Expression.Call(groupJoin, outer.Expression, inner.Expression, outerKeySelector, innerKeySelector, groupJoinResultSelector);
var selectManyCollectionSelector = (Expression<Func<LeftJoinIntermediate<TOuter, TInner>, IEnumerable<TInner>>>)
(t => t.ManyInners.DefaultIfEmpty());
ParameterExpression paramUser = resultSelector.Parameters.First();
ParameterExpression paramNew = Expression.Parameter(typeof (LeftJoinIntermediate<TOuter, TInner>), "t");
MemberExpression propExpr = Expression.Property(paramNew, "OneOuter");
LambdaExpression selectManyResultSelector = Expression.Lambda(new Replacer(paramUser, propExpr).Visit(resultSelector.Body), paramNew, resultSelector.Parameters.Skip(1).First());
MethodCallExpression exprSelectMany = Expression.Call(selectMany, exprGroupJoin, selectManyCollectionSelector, selectManyResultSelector);
return outer.Provider.CreateQuery<TResult>(exprSelectMany);
}
private class LeftJoinIntermediate<TOuter, TInner>
{
public TOuter OneOuter { get; set; }
public IEnumerable<TInner> ManyInners { get; set; }
}
private class Replacer : ExpressionVisitor
{
private readonly ParameterExpression _oldParam;
private readonly Expression _replacement;
public Replacer(ParameterExpression oldParam, Expression replacement)
{
_oldParam = oldParam;
_replacement = replacement;
}
public override Expression Visit(Expression exp)
{
if (exp == _oldParam)
{
return _replacement;
}
return base.Visit(exp);
}
}
}
}

View File

@ -18,6 +18,7 @@ namespace API.Interfaces
/// </summary> /// </summary>
/// <param name="libraryId"></param> /// <param name="libraryId"></param>
/// <param name="userId"></param> /// <param name="userId"></param>
/// <param name="userParams"></param>
/// <returns></returns> /// <returns></returns>
Task<PagedList<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams); Task<PagedList<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams);

View File

@ -13,6 +13,5 @@ namespace API.Interfaces
Task<IList<MangaFile>> GetFilesForChapter(int chapterId); Task<IList<MangaFile>> GetFilesForChapter(int chapterId);
Task<IList<Chapter>> GetChaptersAsync(int volumeId); Task<IList<Chapter>> GetChaptersAsync(int volumeId);
Task<byte[]> GetChapterCoverImageAsync(int chapterId); Task<byte[]> GetChapterCoverImageAsync(int chapterId);
Task<IEnumerable<InProgressChapterDto>> GetContinueReading(int userId, int libraryId, int limit);
} }
} }

View File

@ -1,16 +1,16 @@
using System.IO.Compression; using System.IO.Compression;
using API.Entities; using API.Archive;
namespace API.Interfaces.Services namespace API.Interfaces.Services
{ {
public interface IArchiveService public interface IArchiveService
{ {
bool ArchiveNeedsFlattening(ZipArchive archive);
void ExtractArchive(string archivePath, string extractPath); void ExtractArchive(string archivePath, string extractPath);
int GetNumberOfPagesFromArchive(string archivePath); int GetNumberOfPagesFromArchive(string archivePath);
byte[] GetCoverImage(string filepath, bool createThumbnail = false); byte[] GetCoverImage(string archivePath, bool createThumbnail = false);
bool IsValidArchive(string archivePath); bool IsValidArchive(string archivePath);
string GetSummaryInfo(string archivePath); string GetSummaryInfo(string archivePath);
ArchiveLibrary CanOpen(string archivePath);
bool ArchiveNeedsFlattening(ZipArchive archive);
} }
} }

View File

@ -1,7 +1,6 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs;
namespace API.Interfaces.Services namespace API.Interfaces.Services
{ {
@ -20,27 +19,7 @@ namespace API.Interfaces.Services
/// <param name="searchPatternExpression"></param> /// <param name="searchPatternExpression"></param>
/// <returns></returns> /// <returns></returns>
string[] GetFilesWithExtension(string path, string searchPatternExpression = ""); string[] GetFilesWithExtension(string path, string searchPatternExpression = "");
/// <summary>
/// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases.
/// </summary>
/// <param name="directoryPath"></param>
/// <returns></returns>
bool ExistOrCreate(string directoryPath);
Task<byte[]> ReadFileAsync(string path); Task<byte[]> ReadFileAsync(string path);
/// <summary>
/// Deletes all files within the directory, then the directory itself.
/// </summary>
/// <param name="directoryPath"></param>
void ClearAndDeleteDirectory(string directoryPath);
/// <summary>
/// Deletes all files within the directory.
/// </summary>
/// <param name="directoryPath"></param>
/// <returns></returns>
void ClearDirectory(string directoryPath);
bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath); bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath);
bool Exists(string directory); bool Exists(string directory);

View File

@ -25,14 +25,13 @@ namespace API.Middleware
public async Task InvokeAsync(HttpContext context) public async Task InvokeAsync(HttpContext context)
{ {
// BUG: I think Hangfire timeouts are triggering the middleware to hijack an API call
try try
{ {
await _next(context); // downstream middlewares or http call await _next(context); // downstream middlewares or http call
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, ex.Message); _logger.LogError(ex, "There was an exception");
context.Response.ContentType = "application/json"; context.Response.ContentType = "application/json";
context.Response.StatusCode = (int) HttpStatusCode.InternalServerError; context.Response.StatusCode = (int) HttpStatusCode.InternalServerError;

View File

@ -8,8 +8,8 @@ namespace API.Parser
{ {
public static class Parser public static class Parser
{ {
public static readonly string MangaFileExtensions = @"\.cbz|\.zip"; // |\.rar|\.cbr public static readonly string MangaFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|.tar.gz|.7zip";
public static readonly string ImageFileExtensions = @"\.png|\.jpeg|\.jpg|\.gif"; public static readonly string ImageFileExtensions = @"\.png|\.jpeg|\.jpg";
private static readonly string XmlRegexExtensions = @"\.xml"; private static readonly string XmlRegexExtensions = @"\.xml";
private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex MangaFileRegex = new Regex(MangaFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex MangaFileRegex = new Regex(MangaFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
@ -26,6 +26,10 @@ namespace API.Parser
new Regex( new Regex(
@"(?<Series>.*)(\b|_)v(?<Volume>\d+(-\d+)?)", @"(?<Series>.*)(\b|_)v(?<Volume>\d+(-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled), RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Kodomo no Jikan vol. 10
new Regex(
@"(?<Series>.*)(\b|_)(vol\.? ?)(?<Volume>\d+(-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) // Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
new Regex( new Regex(
@"(vol\.? ?)(?<Volume>0*[1-9]+)", @"(vol\.? ?)(?<Volume>0*[1-9]+)",
@ -130,7 +134,7 @@ namespace API.Parser
// Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz // Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz
new Regex( new Regex(
@"^(?!Vol)(?<Series>.*) (?<Chapter>\d+(?:.\d+|-\d+)?)(?: \(\d{4}\))?", @"^(?!Vol)(?<Series>.*) (?<!vol\. )(?<Chapter>\d+(?:.\d+|-\d+)?)(?: \(\d{4}\))?",
RegexOptions.IgnoreCase | RegexOptions.Compiled), RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Tower Of God S01 014 (CBT) (digital).cbz // Tower Of God S01 014 (CBT) (digital).cbz
new Regex( new Regex(
@ -399,20 +403,17 @@ namespace API.Parser
public static bool IsArchive(string filePath) public static bool IsArchive(string filePath)
{ {
var fileInfo = new FileInfo(filePath); return MangaFileRegex.IsMatch(Path.GetExtension(filePath));
return MangaFileRegex.IsMatch(fileInfo.Extension);
} }
public static bool IsImage(string filePath) public static bool IsImage(string filePath)
{ {
var fileInfo = new FileInfo(filePath); return ImageRegex.IsMatch(Path.GetExtension(filePath));
return ImageRegex.IsMatch(fileInfo.Extension);
} }
public static bool IsXml(string filePath) public static bool IsXml(string filePath)
{ {
var fileInfo = new FileInfo(filePath); return XmlRegex.IsMatch(Path.GetExtension(filePath));
return XmlRegex.IsMatch(fileInfo.Extension);
} }
public static float MinimumNumberFromRange(string range) public static float MinimumNumberFromRange(string range)

View File

@ -2,8 +2,6 @@ using System;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data; using API.Data;
using API.Entities; using API.Entities;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services; using API.Services;
using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity;

View File

@ -1,14 +1,18 @@
using System; using System;
using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
using System.IO; using System.IO;
using System.IO.Compression; using System.IO.Compression;
using System.Linq; using System.Linq;
using System.Xml.Serialization; using System.Xml.Serialization;
using API.Archive;
using API.Extensions; using API.Extensions;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services.Tasks; using API.Services.Tasks;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NetVips; using SharpCompress.Archives;
using SharpCompress.Common;
using Image = NetVips.Image;
namespace API.Services namespace API.Services
{ {
@ -25,18 +29,70 @@ namespace API.Services
_logger = logger; _logger = logger;
} }
/// <summary>
/// Checks if a File can be opened. Requires up to 2 opens of the filestream.
/// </summary>
/// <param name="archivePath"></param>
/// <returns></returns>
public ArchiveLibrary CanOpen(string archivePath)
{
if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath)) return ArchiveLibrary.NotSupported;
try
{
using var a2 = ZipFile.OpenRead(archivePath);
return ArchiveLibrary.Default;
}
catch (Exception)
{
try
{
using var a1 = ArchiveFactory.Open(archivePath);
return ArchiveLibrary.SharpCompress;
}
catch (Exception)
{
return ArchiveLibrary.NotSupported;
}
}
}
public int GetNumberOfPagesFromArchive(string archivePath) public int GetNumberOfPagesFromArchive(string archivePath)
{ {
if (!IsValidArchive(archivePath)) return 0; if (!IsValidArchive(archivePath))
{
_logger.LogError("Archive {ArchivePath} could not be found", archivePath);
return 0;
}
try try
{ {
using ZipArchive archive = ZipFile.OpenRead(archivePath); var libraryHandler = CanOpen(archivePath);
return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName)); switch (libraryHandler)
{
case ArchiveLibrary.Default:
{
_logger.LogDebug("Using default compression handling");
using ZipArchive archive = ZipFile.OpenRead(archivePath);
return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName));
}
case ArchiveLibrary.SharpCompress:
{
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
return archive.Entries.Count(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key));
}
case ArchiveLibrary.NotSupported:
_logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath);
return 0;
default:
_logger.LogError("[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath);
return 0;
}
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, "There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); _logger.LogError(ex, "[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath);
return 0; return 0;
} }
} }
@ -46,58 +102,87 @@ namespace API.Services
/// Given a path to a compressed file (zip, rar, cbz, cbr, etc), will ensure the first image is returned unless /// Given a path to a compressed file (zip, rar, cbz, cbr, etc), will ensure the first image is returned unless
/// a folder.extension exists in the root directory of the compressed file. /// a folder.extension exists in the root directory of the compressed file.
/// </summary> /// </summary>
/// <param name="filepath"></param> /// <param name="archivePath"></param>
/// <param name="createThumbnail">Create a smaller variant of file extracted from archive. Archive images are usually 1MB each.</param> /// <param name="createThumbnail">Create a smaller variant of file extracted from archive. Archive images are usually 1MB each.</param>
/// <returns></returns> /// <returns></returns>
public byte[] GetCoverImage(string filepath, bool createThumbnail = false) public byte[] GetCoverImage(string archivePath, bool createThumbnail = false)
{ {
if (archivePath == null || !IsValidArchive(archivePath)) return Array.Empty<byte>();
try try
{ {
if (!IsValidArchive(filepath)) return Array.Empty<byte>(); var libraryHandler = CanOpen(archivePath);
switch (libraryHandler)
{
case ArchiveLibrary.Default:
{
_logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList();
var entry = folder ?? entries[0];
using var archive = ZipFile.OpenRead(filepath); return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry);
if (!archive.HasFiles()) return Array.Empty<byte>(); }
case ArchiveLibrary.SharpCompress:
var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder"); {
var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList(); _logger.LogDebug("Using SharpCompress compression handling");
var entry = folder ?? entries[0]; using var archive = ArchiveFactory.Open(archivePath);
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail);
return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry); }
case ArchiveLibrary.NotSupported:
_logger.LogError("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath);
return Array.Empty<byte>();
default:
_logger.LogError("[GetCoverImage] There was an exception when reading archive stream: {ArchivePath}. Defaulting to no cover image", archivePath);
return Array.Empty<byte>();
}
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, "There was an exception when reading archive stream: {Filepath}. Defaulting to no cover image", filepath); _logger.LogError(ex, "[GetCoverImage] There was an exception when reading archive stream: {ArchivePath}. Defaulting to no cover image", archivePath);
} }
return Array.Empty<byte>(); return Array.Empty<byte>();
} }
private byte[] CreateThumbnail(ZipArchiveEntry entry) private byte[] FindCoverImage(IEnumerable<IArchiveEntry> entries, bool createThumbnail)
{ {
try var images = entries.ToList();
foreach (var entry in images)
{ {
using var stream = entry.Open(); if (Path.GetFileNameWithoutExtension(entry.Key).ToLower() == "folder")
using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth); {
return thumbnail.WriteToBuffer(".jpg"); // TODO: Validate this code works with .png files using var ms = new MemoryStream();
} entry.WriteTo(ms);
catch (Exception ex) ms.Position = 0;
{ return createThumbnail ? CreateThumbnail(ms.ToArray(), Path.GetExtension(entry.Key)) : ms.ToArray();
_logger.LogError(ex, "There was a critical error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entry.FullName); }
} }
if (images.Any())
{
var entry = images.OrderBy(e => e.Key).FirstOrDefault();
if (entry == null) return Array.Empty<byte>();
using var ms = new MemoryStream();
entry.WriteTo(ms);
ms.Position = 0;
var data = ms.ToArray();
return createThumbnail ? CreateThumbnail(data, Path.GetExtension(entry.Key)) : data;
}
return Array.Empty<byte>(); return Array.Empty<byte>();
} }
private static byte[] ConvertEntryToByteArray(ZipArchiveEntry entry) private static byte[] ConvertEntryToByteArray(ZipArchiveEntry entry)
{ {
using var stream = entry.Open(); using var stream = entry.Open();
using var ms = new MemoryStream(); using var ms = new MemoryStream();
stream.CopyTo(ms); // TODO: Check if we can use CopyToAsync here stream.CopyTo(ms);
var data = ms.ToArray(); var data = ms.ToArray();
return data; return data;
} }
/// <summary> /// <summary>
/// Given an archive stream, will assess whether directory needs to be flattened so that the extracted archive files are directly /// Given an archive stream, will assess whether directory needs to be flattened so that the extracted archive files are directly
/// under extract path and not nested in subfolders. See <see cref="DirectoryInfoExtensions"/> Flatten method. /// under extract path and not nested in subfolders. See <see cref="DirectoryInfoExtensions"/> Flatten method.
@ -108,75 +193,173 @@ namespace API.Services
{ {
// Sometimes ZipArchive will list the directory and others it will just keep it in the FullName // Sometimes ZipArchive will list the directory and others it will just keep it in the FullName
return archive.Entries.Count > 0 && return archive.Entries.Count > 0 &&
!Path.HasExtension(archive.Entries.ElementAt(0).FullName) || !Path.HasExtension(archive.Entries.ElementAt(0).FullName) ||
archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar)); archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar));
}
private byte[] CreateThumbnail(byte[] entry, string formatExtension = ".jpg")
{
if (!formatExtension.StartsWith("."))
{
formatExtension = "." + formatExtension;
}
// TODO: Validate if jpeg is same as jpg
try
{
using var thumbnail = Image.ThumbnailBuffer(entry, ThumbnailWidth);
return thumbnail.WriteToBuffer(formatExtension);
}
catch (Exception ex)
{
_logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image");
}
return Array.Empty<byte>();
}
private byte[] CreateThumbnail(ZipArchiveEntry entry, string formatExtension = ".jpg")
{
if (!formatExtension.StartsWith("."))
{
formatExtension = $".{formatExtension}";
}
try
{
using var stream = entry.Open();
using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth);
return thumbnail.WriteToBuffer(formatExtension); // TODO: Validate this code works with .png files
}
catch (Exception ex)
{
_logger.LogError(ex, "There was a critical error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entry.FullName);
}
return Array.Empty<byte>();
} }
/// <summary> /// <summary>
/// Test if the archive path exists and there are images inside it. This will log as an error. /// Test if the archive path exists and an archive
/// </summary> /// </summary>
/// <param name="archivePath"></param> /// <param name="archivePath"></param>
/// <returns></returns> /// <returns></returns>
public bool IsValidArchive(string archivePath) public bool IsValidArchive(string archivePath)
{ {
try if (!File.Exists(archivePath))
{ {
if (!File.Exists(archivePath)) _logger.LogError("Archive {ArchivePath} could not be found", archivePath);
{ return false;
_logger.LogError("Archive {ArchivePath} could not be found", archivePath);
return false;
}
if (!Parser.Parser.IsArchive(archivePath))
{
_logger.LogError("Archive {ArchivePath} is not a valid archive", archivePath);
return false;
}
using var archive = ZipFile.OpenRead(archivePath);
if (archive.Entries.Any(e => Parser.Parser.IsImage(e.FullName))) return true;
_logger.LogError("Archive {ArchivePath} contains no images", archivePath);
}
catch (Exception ex)
{
_logger.LogError(ex, "Unable to validate archive ({ArchivePath}) due to problem opening archive", archivePath);
} }
if (Parser.Parser.IsArchive(archivePath)) return true;
_logger.LogError("Archive {ArchivePath} is not a valid archive", archivePath);
return false; return false;
}
private static ComicInfo FindComicInfoXml(IEnumerable<IArchiveEntry> entries)
{
foreach (var entry in entries)
{
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && Parser.Parser.IsXml(entry.Key))
{
using var ms = new MemoryStream();
entry.WriteTo(ms);
ms.Position = 0;
var serializer = new XmlSerializer(typeof(ComicInfo));
var info = (ComicInfo) serializer.Deserialize(ms);
return info;
}
}
return null;
} }
public string GetSummaryInfo(string archivePath) public string GetSummaryInfo(string archivePath)
{ {
var summary = ""; var summary = string.Empty;
if (!IsValidArchive(archivePath)) return summary; if (!IsValidArchive(archivePath)) return summary;
using var archive = ZipFile.OpenRead(archivePath); ComicInfo info = null;
if (!archive.HasFiles()) return summary;
var info = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName));
if (info == null) return summary;
// Parse XML file
try try
{ {
using var stream = info.Open(); if (!File.Exists(archivePath)) return summary;
var serializer = new XmlSerializer(typeof(ComicInfo));
ComicInfo comicInfo =
(ComicInfo)serializer.Deserialize(stream);
if (comicInfo != null) var libraryHandler = CanOpen(archivePath);
switch (libraryHandler)
{ {
return comicInfo.Summary; case ArchiveLibrary.Default:
{
_logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
var entry = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName));
if (entry != null)
{
using var stream = entry.Open();
var serializer = new XmlSerializer(typeof(ComicInfo));
info = (ComicInfo) serializer.Deserialize(stream);
}
break;
}
case ArchiveLibrary.SharpCompress:
{
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key)));
break;
}
case ArchiveLibrary.NotSupported:
_logger.LogError("[GetSummaryInfo] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath);
return summary;
default:
_logger.LogError("[GetSummaryInfo] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath);
return summary;
} }
}
catch (AggregateException ex)
{
_logger.LogError(ex, "There was an issue parsing ComicInfo.xml from {ArchivePath}", archivePath);
}
if (info != null)
{
return info.Summary;
}
_logger.LogError("[GetSummaryInfo] Could not parse archive file: {Filepath}", archivePath);
}
catch (Exception ex)
{
_logger.LogError(ex, "[GetSummaryInfo] There was an exception when reading archive stream: {Filepath}", archivePath);
}
return summary; return summary;
} }
private static void ExtractArchiveEntities(IEnumerable<IArchiveEntry> entries, string extractPath)
{
DirectoryService.ExistOrCreate(extractPath);
foreach (var entry in entries)
{
entry.WriteToDirectory(extractPath, new ExtractionOptions()
{
ExtractFullPath = false,
Overwrite = false
});
}
}
private void ExtractArchiveEntries(ZipArchive archive, string extractPath)
{
var needsFlattening = ArchiveNeedsFlattening(archive);
if (!archive.HasFiles() && !needsFlattening) return;
archive.ExtractToDirectory(extractPath, true);
if (needsFlattening)
{
_logger.LogDebug("Extracted archive is nested in root folder, flattening...");
new DirectoryInfo(extractPath).Flatten();
}
}
/// <summary> /// <summary>
/// Extracts an archive to a temp cache directory. Returns path to new directory. If temp cache directory already exists, /// Extracts an archive to a temp cache directory. Returns path to new directory. If temp cache directory already exists,
/// will return that without performing an extraction. Returns empty string if there are any invalidations which would /// will return that without performing an extraction. Returns empty string if there are any invalidations which would
@ -189,27 +372,44 @@ namespace API.Services
{ {
if (!IsValidArchive(archivePath)) return; if (!IsValidArchive(archivePath)) return;
if (Directory.Exists(extractPath)) if (Directory.Exists(extractPath)) return;
var sw = Stopwatch.StartNew();
try
{ {
_logger.LogDebug("Archive {ArchivePath} has already been extracted. Returning existing folder", archivePath); var libraryHandler = CanOpen(archivePath);
switch (libraryHandler)
{
case ArchiveLibrary.Default:
{
_logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
ExtractArchiveEntries(archive, extractPath);
break;
}
case ArchiveLibrary.SharpCompress:
{
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
break;
}
case ArchiveLibrary.NotSupported:
_logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath);
return;
default:
_logger.LogError("[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath);
return;
}
}
catch (Exception e)
{
_logger.LogError(e, "There was a problem extracting {ArchivePath} to {ExtractPath}",archivePath, extractPath);
return; return;
} }
Stopwatch sw = Stopwatch.StartNew();
using ZipArchive archive = ZipFile.OpenRead(archivePath);
var needsFlattening = ArchiveNeedsFlattening(archive);
if (!archive.HasFiles() && !needsFlattening) return;
archive.ExtractToDirectory(extractPath, true);
_logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds); _logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds);
if (needsFlattening)
{
sw = Stopwatch.StartNew();
_logger.LogInformation("Extracted archive is nested in root folder, flattening...");
new DirectoryInfo(extractPath).Flatten();
_logger.LogInformation("Flattened in {ElapsedMilliseconds} milliseconds", sw.ElapsedMilliseconds);
}
} }
} }
} }

View File

@ -32,7 +32,7 @@ namespace API.Services
public void EnsureCacheDirectory() public void EnsureCacheDirectory()
{ {
_logger.LogDebug("Checking if valid Cache directory: {CacheDirectory}", CacheDirectory); _logger.LogDebug("Checking if valid Cache directory: {CacheDirectory}", CacheDirectory);
if (!_directoryService.ExistOrCreate(CacheDirectory)) if (!DirectoryService.ExistOrCreate(CacheDirectory))
{ {
_logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", CacheDirectory); _logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", CacheDirectory);
} }
@ -106,7 +106,7 @@ namespace API.Services
var chapterFiles = chapter.Files ?? await _unitOfWork.VolumeRepository.GetFilesForChapter(chapter.Id); var chapterFiles = chapter.Files ?? await _unitOfWork.VolumeRepository.GetFilesForChapter(chapter.Id);
foreach (var mangaFile in chapterFiles) foreach (var mangaFile in chapterFiles)
{ {
if (page <= (mangaFile.NumberOfPages + pagesSoFar)) if (page <= (mangaFile.Pages + pagesSoFar))
{ {
var path = GetCachePath(chapter.Id); var path = GetCachePath(chapter.Id);
var files = _directoryService.GetFilesWithExtension(path, Parser.Parser.ImageFileExtensions); var files = _directoryService.GetFilesWithExtension(path, Parser.Parser.ImageFileExtensions);
@ -121,7 +121,7 @@ namespace API.Services
return (files.ElementAt(page - pagesSoFar), mangaFile); return (files.ElementAt(page - pagesSoFar), mangaFile);
} }
pagesSoFar += mangaFile.NumberOfPages; pagesSoFar += mangaFile.Pages;
} }
return ("", null); return ("", null);

View File

@ -71,7 +71,12 @@ namespace API.Services
return !Directory.Exists(path) ? Array.Empty<string>() : Directory.GetFiles(path); return !Directory.Exists(path) ? Array.Empty<string>() : Directory.GetFiles(path);
} }
public bool ExistOrCreate(string directoryPath) /// <summary>
/// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases.
/// </summary>
/// <param name="directoryPath"></param>
/// <returns></returns>
public static bool ExistOrCreate(string directoryPath)
{ {
var di = new DirectoryInfo(directoryPath); var di = new DirectoryInfo(directoryPath);
if (di.Exists) return true; if (di.Exists) return true;
@ -79,16 +84,21 @@ namespace API.Services
{ {
Directory.CreateDirectory(directoryPath); Directory.CreateDirectory(directoryPath);
} }
catch (Exception ex) catch (Exception)
{ {
_logger.LogError(ex, "There was an issue creating directory: {Directory}", directoryPath);
return false; return false;
} }
return true; return true;
} }
public void ClearAndDeleteDirectory(string directoryPath) /// <summary>
/// Deletes all files within the directory, then the directory itself.
/// </summary>
/// <param name="directoryPath"></param>
public static void ClearAndDeleteDirectory(string directoryPath)
{ {
if (!Directory.Exists(directoryPath)) return;
DirectoryInfo di = new DirectoryInfo(directoryPath); DirectoryInfo di = new DirectoryInfo(directoryPath);
ClearDirectory(directoryPath); ClearDirectory(directoryPath);
@ -96,7 +106,12 @@ namespace API.Services
di.Delete(true); di.Delete(true);
} }
public void ClearDirectory(string directoryPath) /// <summary>
/// Deletes all files within the directory.
/// </summary>
/// <param name="directoryPath"></param>
/// <returns></returns>
public static void ClearDirectory(string directoryPath)
{ {
var di = new DirectoryInfo(directoryPath); var di = new DirectoryInfo(directoryPath);
if (!di.Exists) return; if (!di.Exists) return;
@ -235,7 +250,8 @@ namespace API.Services
return ++localCount; return ++localCount;
}, },
(c) => { (c) => {
Interlocked.Add(ref fileCount, c); // ReSharper disable once AccessToModifiedClosure
Interlocked.Add(ref fileCount, c);
}); });
} }
} }

View File

@ -38,6 +38,7 @@ namespace API.Services
} }
} }
public void UpdateMetadata(Volume volume, bool forceUpdate) public void UpdateMetadata(Volume volume, bool forceUpdate)
{ {
if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate)) if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate))
@ -45,14 +46,23 @@ namespace API.Services
// TODO: Create a custom sorter for Chapters so it's consistent across the application // TODO: Create a custom sorter for Chapters so it's consistent across the application
volume.Chapters ??= new List<Chapter>(); volume.Chapters ??= new List<Chapter>();
var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault(); var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault();
var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault(); var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true); // Skip calculating Cover Image (I/O) if the chapter already has it set
if (firstChapter == null || ShouldFindCoverImage(firstChapter.CoverImage))
{
if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
}
else
{
volume.CoverImage = firstChapter.CoverImage;
}
} }
} }
public void UpdateMetadata(Series series, bool forceUpdate) public void UpdateMetadata(Series series, bool forceUpdate)
{ {
// TODO: this doesn't actually invoke finding a new cover. Also all these should be groupped ideally so we limit // NOTE: this doesn't actually invoke finding a new cover. Also all these should be grouped ideally so we limit
// disk I/O to one method. // disk I/O to one method.
if (series == null) return; if (series == null) return;
if (ShouldFindCoverImage(series.CoverImage, forceUpdate)) if (ShouldFindCoverImage(series.CoverImage, forceUpdate))

View File

@ -1,5 +1,4 @@
using System.IO; using System.IO;
using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Entities.Enums; using API.Entities.Enums;
using API.Helpers.Converters; using API.Helpers.Converters;
@ -21,17 +20,13 @@ namespace API.Services
private readonly IMetadataService _metadataService; private readonly IMetadataService _metadataService;
private readonly IBackupService _backupService; private readonly IBackupService _backupService;
private readonly ICleanupService _cleanupService; private readonly ICleanupService _cleanupService;
private readonly IDirectoryService _directoryService;
public static BackgroundJobServer Client => new BackgroundJobServer(new BackgroundJobServerOptions() public static BackgroundJobServer Client => new BackgroundJobServer();
{
WorkerCount = 1
});
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService, public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService, IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService,
IDirectoryService directoryService, IWebHostEnvironment env) IWebHostEnvironment env)
{ {
_cacheService = cacheService; _cacheService = cacheService;
_logger = logger; _logger = logger;
@ -40,7 +35,6 @@ namespace API.Services
_metadataService = metadataService; _metadataService = metadataService;
_backupService = backupService; _backupService = backupService;
_cleanupService = cleanupService; _cleanupService = cleanupService;
_directoryService = directoryService;
if (!env.IsDevelopment()) if (!env.IsDevelopment())
{ {
@ -58,9 +52,8 @@ namespace API.Services
public void ScheduleTasks() public void ScheduleTasks()
{ {
_logger.LogInformation("Scheduling reoccurring tasks"); _logger.LogInformation("Scheduling reoccurring tasks");
string setting = null; string setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result.Value;
setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result.Value;
if (setting != null) if (setting != null)
{ {
_logger.LogDebug("Scheduling Scan Library Task for {Cron}", setting); _logger.LogDebug("Scheduling Scan Library Task for {Cron}", setting);
@ -87,7 +80,7 @@ namespace API.Services
public void ScanLibrary(int libraryId, bool forceUpdate = false) public void ScanLibrary(int libraryId, bool forceUpdate = false)
{ {
// TODO: We shouldn't queue up a job if one is already in progress
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate)); BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate));
BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); // When we do a scan, force cache to re-unpack in case page numbers change BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); // When we do a scan, force cache to re-unpack in case page numbers change
@ -107,7 +100,7 @@ namespace API.Services
public void CleanupTemp() public void CleanupTemp()
{ {
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp"); var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
BackgroundJob.Enqueue((() => _directoryService.ClearDirectory(tempDirectory))); BackgroundJob.Enqueue((() => DirectoryService.ClearDirectory(tempDirectory)));
} }
public void BackupDatabase() public void BackupDatabase()

View File

@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO; using System.IO;
using System.IO.Compression; using System.IO.Compression;
using System.Linq; using System.Linq;
using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
@ -55,7 +54,7 @@ namespace API.Services.Tasks
var files = maxRollingFiles > 0 var files = maxRollingFiles > 0
? _directoryService.GetFiles(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log") ? _directoryService.GetFiles(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log")
: new string[] {"kavita.log"}; : new[] {"kavita.log"};
return files; return files;
} }
@ -66,7 +65,7 @@ namespace API.Services.Tasks
var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value; var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value;
_logger.LogDebug("Backing up to {BackupDirectory}", backupDirectory); _logger.LogDebug("Backing up to {BackupDirectory}", backupDirectory);
if (!_directoryService.ExistOrCreate(backupDirectory)) if (!DirectoryService.ExistOrCreate(backupDirectory))
{ {
_logger.LogError("Could not write to {BackupDirectory}; aborting backup", backupDirectory); _logger.LogError("Could not write to {BackupDirectory}; aborting backup", backupDirectory);
return; return;
@ -82,8 +81,8 @@ namespace API.Services.Tasks
} }
var tempDirectory = Path.Join(_tempDirectory, dateString); var tempDirectory = Path.Join(_tempDirectory, dateString);
_directoryService.ExistOrCreate(tempDirectory); DirectoryService.ExistOrCreate(tempDirectory);
_directoryService.ClearDirectory(tempDirectory); DirectoryService.ClearDirectory(tempDirectory);
_directoryService.CopyFilesToDirectory( _directoryService.CopyFilesToDirectory(
_backupFiles.Select(file => Path.Join(Directory.GetCurrentDirectory(), file)).ToList(), tempDirectory); _backupFiles.Select(file => Path.Join(Directory.GetCurrentDirectory(), file)).ToList(), tempDirectory);
@ -96,7 +95,7 @@ namespace API.Services.Tasks
_logger.LogError(ex, "There was an issue when archiving library backup"); _logger.LogError(ex, "There was an issue when archiving library backup");
} }
_directoryService.ClearAndDeleteDirectory(tempDirectory); DirectoryService.ClearAndDeleteDirectory(tempDirectory);
_logger.LogInformation("Database backup completed"); _logger.LogInformation("Database backup completed");
} }

View File

@ -11,14 +11,12 @@ namespace API.Services.Tasks
public class CleanupService : ICleanupService public class CleanupService : ICleanupService
{ {
private readonly ICacheService _cacheService; private readonly ICacheService _cacheService;
private readonly IDirectoryService _directoryService;
private readonly ILogger<CleanupService> _logger; private readonly ILogger<CleanupService> _logger;
private readonly IBackupService _backupService; private readonly IBackupService _backupService;
public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger<CleanupService> logger, IBackupService backupService) public CleanupService(ICacheService cacheService, ILogger<CleanupService> logger, IBackupService backupService)
{ {
_cacheService = cacheService; _cacheService = cacheService;
_directoryService = directoryService;
_logger = logger; _logger = logger;
_backupService = backupService; _backupService = backupService;
} }
@ -28,7 +26,7 @@ namespace API.Services.Tasks
{ {
_logger.LogInformation("Cleaning temp directory"); _logger.LogInformation("Cleaning temp directory");
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp"); var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
_directoryService.ClearDirectory(tempDirectory); DirectoryService.ClearDirectory(tempDirectory);
_logger.LogInformation("Cleaning cache directory"); _logger.LogInformation("Cleaning cache directory");
_cacheService.Cleanup(); _cacheService.Cleanup();
_logger.LogInformation("Cleaning old database backups"); _logger.LogInformation("Cleaning old database backups");

View File

@ -33,7 +33,7 @@ namespace API.Services.Tasks
_metadataService = metadataService; _metadataService = metadataService;
} }
//[DisableConcurrentExecution(timeoutInSeconds: 5)] [DisableConcurrentExecution(timeoutInSeconds: 5)]
[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)] [AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibraries() public void ScanLibraries()
{ {
@ -64,7 +64,7 @@ namespace API.Services.Tasks
_scannedSeries = null; _scannedSeries = null;
} }
//[DisableConcurrentExecution(5)] [DisableConcurrentExecution(5)]
[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)] [AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibrary(int libraryId, bool forceUpdate) public void ScanLibrary(int libraryId, bool forceUpdate)
{ {
@ -193,6 +193,7 @@ namespace API.Services.Tasks
series.Pages = series.Volumes.Sum(v => v.Pages); series.Pages = series.Volumes.Sum(v => v.Pages);
_metadataService.UpdateMetadata(series, _forceUpdate); _metadataService.UpdateMetadata(series, _forceUpdate);
}); });
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now; foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
} }
@ -224,7 +225,7 @@ namespace API.Services.Tasks
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name); _logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
UpdateChapters(volume, infos); UpdateChapters(volume, infos);
volume.Pages = volume.Chapters.Sum(c => c.Pages); volume.Pages = volume.Chapters.Sum(c => c.Pages);
_metadataService.UpdateMetadata(volume, _forceUpdate); _metadataService.UpdateMetadata(volume, _forceUpdate);
} }
@ -284,7 +285,7 @@ namespace API.Services.Tasks
AddOrUpdateFileForChapter(chapter, info); AddOrUpdateFileForChapter(chapter, info);
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + ""; chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
chapter.Range = info.Chapters; chapter.Range = info.Chapters;
chapter.Pages = chapter.Files.Sum(f => f.NumberOfPages); chapter.Pages = chapter.Files.Sum(f => f.Pages);
_metadataService.UpdateMetadata(chapter, _forceUpdate); _metadataService.UpdateMetadata(chapter, _forceUpdate);
} }
@ -350,7 +351,7 @@ namespace API.Services.Tasks
{ {
FilePath = info.FullFilePath, FilePath = info.FullFilePath,
Format = info.Format, Format = info.Format,
NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath) Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath)
}; };
} }
@ -361,7 +362,7 @@ namespace API.Services.Tasks
if (existingFile != null) if (existingFile != null)
{ {
existingFile.Format = info.Format; existingFile.Format = info.Format;
existingFile.NumberOfPages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); existingFile.Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
} }
else else
{ {