Merge pull request #26 from Kareadita/feature/manga-reader

Manga Reader
This commit is contained in:
Joseph Milazzo 2021-01-14 11:40:45 -06:00 committed by GitHub
commit e0d70d16f9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
39 changed files with 2037 additions and 96 deletions

3
.gitignore vendored
View File

@ -447,4 +447,5 @@ appsettings.json
/API/kavita.db-shm /API/kavita.db-shm
/API/kavita.db-wal /API/kavita.db-wal
/API/Hangfire.db /API/Hangfire.db
/API/Hangfire-log.db /API/Hangfire-log.db
cache/

View File

@ -15,11 +15,11 @@ namespace API.Tests
//[InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "16-17")] //[InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "16-17")]
[InlineData("Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", "1")] [InlineData("Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", "1")]
[InlineData("v001", "1")] [InlineData("v001", "1")]
[InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "1")]
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1")] [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1")]
public void ParseVolumeTest(string filename, string expected) public void ParseVolumeTest(string filename, string expected)
{ {
var result = ParseVolume(filename); Assert.Equal(expected, ParseVolume(filename));
Assert.Equal(expected, result);
} }
[Theory] [Theory]
@ -31,27 +31,27 @@ namespace API.Tests
[InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "Gokukoku no Brynhildr")] [InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "Gokukoku no Brynhildr")]
[InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "Dance in the Vampire Bund")] [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "Dance in the Vampire Bund")]
[InlineData("v001", "")] [InlineData("v001", "")]
[InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "U12 (Under 12)")]
[InlineData("Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)", "Akame ga KILL! ZERO")] [InlineData("Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)", "Akame ga KILL! ZERO")]
public void ParseSeriesTest(string filename, string expected) public void ParseSeriesTest(string filename, string expected)
{ {
var result = ParseSeries(filename); Assert.Equal(expected, ParseSeries(filename));
Assert.Equal(expected, result);
} }
[Theory] [Theory]
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")] [InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")] [InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")]
[InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "90-98")] [InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "90-98")]
[InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "")] [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "0")]
[InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "")] [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "0")]
[InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1-8")] [InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1-8")]
[InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "")] [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "0")]
[InlineData("c001", "1")] [InlineData("c001", "1")]
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", "12")] [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", "12")]
[InlineData("Adding volume 1 with File: Ana Satsujin Vol. 1 Ch. 5 - Manga Box (gb).cbz", "5")]
public void ParseChaptersTest(string filename, string expected) public void ParseChaptersTest(string filename, string expected)
{ {
var result = ParseChapter(filename); Assert.Equal(expected, ParseChapter(filename));
Assert.Equal(expected, result);
} }
@ -88,6 +88,7 @@ namespace API.Tests
[InlineData("test.cbr", true)] [InlineData("test.cbr", true)]
[InlineData("test.zip", true)] [InlineData("test.zip", true)]
[InlineData("test.rar", true)] [InlineData("test.rar", true)]
[InlineData("test.rar.!qb", false)]
public void IsArchiveTest(string input, bool expected) public void IsArchiveTest(string input, bool expected)
{ {
Assert.Equal(expected, IsArchive(input)); Assert.Equal(expected, IsArchive(input));

View File

@ -1,9 +1,6 @@
using System; using System.IO;
using System.IO;
using API.IO; using API.IO;
using NetVips;
using Xunit; using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services namespace API.Tests.Services
{ {
@ -12,7 +9,7 @@ namespace API.Tests.Services
[Theory] [Theory]
[InlineData("v10.cbz", "v10.expected.jpg")] [InlineData("v10.cbz", "v10.expected.jpg")]
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")] [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")]
//[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")] [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")]
public void GetCoverImageTest(string inputFile, string expectedOutputFile) public void GetCoverImageTest(string inputFile, string expectedOutputFile)
{ {
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageProvider"); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageProvider");

View File

@ -0,0 +1,28 @@
using System;
using API.Comparators;
using Xunit;
namespace API.Tests.Services
{
public class StringLogicalComparerTest
{
[Theory]
[InlineData(
new[] {"x1.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"},
new[] {"x1.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"}
)]
public void TestLogicalComparer(string[] input, string[] expected)
{
NumericComparer nc = new NumericComparer();
Array.Sort(input, nc);
var i = 0;
foreach (var s in input)
{
Assert.Equal(s, expected[i]);
i++;
}
}
}
}

View File

@ -0,0 +1,17 @@
using System.Collections;
namespace API.Comparators
{
public class NumericComparer : IComparer
{
public int Compare(object x, object y)
{
if((x is string xs) && (y is string ys))
{
return StringLogicalComparer.Compare(xs, ys);
}
return -1;
}
}
}

View File

@ -0,0 +1,130 @@
//(c) Vasian Cepa 2005
// Version 2
// Taken from: https://www.codeproject.com/Articles/11016/Numeric-String-Sort-in-C
using System;
namespace API.Comparators
{
public static class StringLogicalComparer
{
public static int Compare(string s1, string s2)
{
//get rid of special cases
if((s1 == null) && (s2 == null)) return 0;
if(s1 == null) return -1;
if(s2 == null) return 1;
if (string.IsNullOrEmpty(s1) && string.IsNullOrEmpty(s2)) return 0;
if (string.IsNullOrEmpty(s1)) return -1;
if (string.IsNullOrEmpty(s2)) return -1;
//WE style, special case
bool sp1 = Char.IsLetterOrDigit(s1, 0);
bool sp2 = Char.IsLetterOrDigit(s2, 0);
if(sp1 && !sp2) return 1;
if(!sp1 && sp2) return -1;
int i1 = 0, i2 = 0; //current index
while(true)
{
bool c1 = Char.IsDigit(s1, i1);
bool c2 = Char.IsDigit(s2, i2);
int r; // temp result
if(!c1 && !c2)
{
bool letter1 = Char.IsLetter(s1, i1);
bool letter2 = Char.IsLetter(s2, i2);
if((letter1 && letter2) || (!letter1 && !letter2))
{
if(letter1 && letter2)
{
r = Char.ToLower(s1[i1]).CompareTo(Char.ToLower(s2[i2]));
}
else
{
r = s1[i1].CompareTo(s2[i2]);
}
if(r != 0) return r;
}
else if(!letter1 && letter2) return -1;
else if(letter1 && !letter2) return 1;
}
else if(c1 && c2)
{
r = CompareNum(s1, ref i1, s2, ref i2);
if(r != 0) return r;
}
else if(c1)
{
return -1;
}
else if(c2)
{
return 1;
}
i1++;
i2++;
if((i1 >= s1.Length) && (i2 >= s2.Length))
{
return 0;
}
if(i1 >= s1.Length)
{
return -1;
}
if(i2 >= s2.Length)
{
return -1;
}
}
}
private static int CompareNum(string s1, ref int i1, string s2, ref int i2)
{
int nzStart1 = i1, nzStart2 = i2; // nz = non zero
int end1 = i1, end2 = i2;
ScanNumEnd(s1, i1, ref end1, ref nzStart1);
ScanNumEnd(s2, i2, ref end2, ref nzStart2);
var start1 = i1; i1 = end1 - 1;
var start2 = i2; i2 = end2 - 1;
var nzLength1 = end1 - nzStart1;
var nzLength2 = end2 - nzStart2;
if(nzLength1 < nzLength2) return -1;
if(nzLength1 > nzLength2) return 1;
for(int j1 = nzStart1,j2 = nzStart2; j1 <= i1; j1++,j2++)
{
var r = s1[j1].CompareTo(s2[j2]);
if(r != 0) return r;
}
// the nz parts are equal
var length1 = end1 - start1;
var length2 = end2 - start2;
if(length1 == length2) return 0;
if(length1 > length2) return -1;
return 1;
}
//lookahead
private static void ScanNumEnd(string s, int start, ref int end, ref int nzStart)
{
nzStart = start;
end = start;
bool countZeros = true;
while(Char.IsDigit(s, end))
{
if(countZeros && s[end].Equals('0'))
{
nzStart++;
}
else countZeros = false;
end++;
if(end >= s.Length) break;
}
}
}
}

View File

@ -4,6 +4,7 @@ using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs; using API.DTOs;
using API.Entities; using API.Entities;
using API.Extensions;
using API.Interfaces; using API.Interfaces;
using AutoMapper; using AutoMapper;
using Hangfire; using Hangfire;
@ -23,10 +24,11 @@ namespace API.Controllers
private readonly IMapper _mapper; private readonly IMapper _mapper;
private readonly ITaskScheduler _taskScheduler; private readonly ITaskScheduler _taskScheduler;
private readonly ISeriesRepository _seriesRepository; private readonly ISeriesRepository _seriesRepository;
private readonly ICacheService _cacheService;
public LibraryController(IDirectoryService directoryService, public LibraryController(IDirectoryService directoryService,
ILibraryRepository libraryRepository, ILogger<LibraryController> logger, IUserRepository userRepository, ILibraryRepository libraryRepository, ILogger<LibraryController> logger, IUserRepository userRepository,
IMapper mapper, ITaskScheduler taskScheduler, ISeriesRepository seriesRepository) IMapper mapper, ITaskScheduler taskScheduler, ISeriesRepository seriesRepository, ICacheService cacheService)
{ {
_directoryService = directoryService; _directoryService = directoryService;
_libraryRepository = libraryRepository; _libraryRepository = libraryRepository;
@ -35,6 +37,7 @@ namespace API.Controllers
_mapper = mapper; _mapper = mapper;
_taskScheduler = taskScheduler; _taskScheduler = taskScheduler;
_seriesRepository = seriesRepository; _seriesRepository = seriesRepository;
_cacheService = cacheService;
} }
/// <summary> /// <summary>
@ -71,6 +74,7 @@ namespace API.Controllers
if (await _userRepository.SaveAllAsync()) if (await _userRepository.SaveAllAsync())
{ {
_logger.LogInformation($"Created a new library: {library.Name}");
var createdLibrary = await _libraryRepository.GetLibraryForNameAsync(library.Name); var createdLibrary = await _libraryRepository.GetLibraryForNameAsync(library.Name);
BackgroundJob.Enqueue(() => _directoryService.ScanLibrary(createdLibrary.Id, false)); BackgroundJob.Enqueue(() => _directoryService.ScanLibrary(createdLibrary.Id, false));
return Ok(); return Ok();
@ -121,6 +125,7 @@ namespace API.Controllers
if (await _userRepository.SaveAllAsync()) if (await _userRepository.SaveAllAsync())
{ {
_logger.LogInformation($"Added: {updateLibraryForUserDto.SelectedLibraries} to {updateLibraryForUserDto.Username}");
return Ok(user); return Ok(user);
} }
@ -151,7 +156,19 @@ namespace API.Controllers
[HttpDelete("delete")] [HttpDelete("delete")]
public async Task<ActionResult<bool>> DeleteLibrary(int libraryId) public async Task<ActionResult<bool>> DeleteLibrary(int libraryId)
{ {
return Ok(await _libraryRepository.DeleteLibrary(libraryId)); var username = User.GetUsername();
_logger.LogInformation($"Library {libraryId} is being deleted by {username}.");
var series = await _seriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId);
var volumes = (await _seriesRepository.GetVolumesForSeriesAsync(series.Select(x => x.Id).ToArray()))
.Select(x => x.Id).ToArray();
var result = await _libraryRepository.DeleteLibrary(libraryId);
if (result && volumes.Any())
{
BackgroundJob.Enqueue(() => _cacheService.CleanupVolumes(volumes));
}
return Ok(result);
} }
[Authorize(Policy = "RequireAdminRole")] [Authorize(Policy = "RequireAdminRole")]
@ -174,7 +191,7 @@ namespace API.Controllers
{ {
if (differenceBetweenFolders.Any()) if (differenceBetweenFolders.Any())
{ {
BackgroundJob.Enqueue(() => _directoryService.ScanLibrary(library.Id)); BackgroundJob.Enqueue(() => _directoryService.ScanLibrary(library.Id, true));
} }
return Ok(); return Ok();

View File

@ -0,0 +1,32 @@
using System.Threading.Tasks;
using API.DTOs;
using API.Interfaces;
using Microsoft.AspNetCore.Mvc;
namespace API.Controllers
{
public class ReaderController : BaseApiController
{
private readonly IDirectoryService _directoryService;
private readonly ICacheService _cacheService;
public ReaderController(IDirectoryService directoryService, ICacheService cacheService)
{
_directoryService = directoryService;
_cacheService = cacheService;
}
[HttpGet("image")]
public async Task<ActionResult<ImageDto>> GetImage(int volumeId, int page)
{
// Temp let's iterate the directory each call to get next image
var volume = await _cacheService.Ensure(volumeId);
var path = _cacheService.GetCachedPagePath(volume, page);
var file = await _directoryService.ReadImageAsync(path);
file.Page = page;
return Ok(file);
}
}
}

View File

@ -1,8 +1,12 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs; using API.DTOs;
using API.Extensions;
using API.Interfaces; using API.Interfaces;
using AutoMapper; using AutoMapper;
using Hangfire;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
@ -14,14 +18,17 @@ namespace API.Controllers
private readonly IMapper _mapper; private readonly IMapper _mapper;
private readonly ITaskScheduler _taskScheduler; private readonly ITaskScheduler _taskScheduler;
private readonly ISeriesRepository _seriesRepository; private readonly ISeriesRepository _seriesRepository;
private readonly ICacheService _cacheService;
public SeriesController(ILogger<SeriesController> logger, IMapper mapper, public SeriesController(ILogger<SeriesController> logger, IMapper mapper,
ITaskScheduler taskScheduler, ISeriesRepository seriesRepository) ITaskScheduler taskScheduler, ISeriesRepository seriesRepository,
ICacheService cacheService)
{ {
_logger = logger; _logger = logger;
_mapper = mapper; _mapper = mapper;
_taskScheduler = taskScheduler; _taskScheduler = taskScheduler;
_seriesRepository = seriesRepository; _seriesRepository = seriesRepository;
_cacheService = cacheService;
} }
[HttpGet("{seriesId}")] [HttpGet("{seriesId}")]
@ -30,10 +37,32 @@ namespace API.Controllers
return Ok(await _seriesRepository.GetSeriesDtoByIdAsync(seriesId)); return Ok(await _seriesRepository.GetSeriesDtoByIdAsync(seriesId));
} }
[Authorize(Policy = "RequireAdminRole")]
[HttpDelete("{seriesId}")]
public async Task<ActionResult<bool>> DeleteSeries(int seriesId)
{
var username = User.GetUsername();
var volumes = (await _seriesRepository.GetVolumesForSeriesAsync(new []{seriesId})).Select(x => x.Id).ToArray();
_logger.LogInformation($"Series {seriesId} is being deleted by {username}.");
var result = await _seriesRepository.DeleteSeriesAsync(seriesId);
if (result)
{
BackgroundJob.Enqueue(() => _cacheService.CleanupVolumes(volumes));
}
return Ok(result);
}
[HttpGet("volumes")] [HttpGet("volumes")]
public async Task<ActionResult<IEnumerable<VolumeDto>>> GetVolumes(int seriesId) public async Task<ActionResult<IEnumerable<VolumeDto>>> GetVolumes(int seriesId)
{ {
return Ok(await _seriesRepository.GetVolumesDtoAsync(seriesId)); return Ok(await _seriesRepository.GetVolumesDtoAsync(seriesId));
} }
[HttpGet("volume")]
public async Task<ActionResult<VolumeDto>> GetVolume(int volumeId)
{
return Ok(await _seriesRepository.GetVolumeDtoAsync(volumeId));
}
} }
} }

13
API/DTOs/ImageDto.cs Normal file
View File

@ -0,0 +1,13 @@
namespace API.DTOs
{
public class ImageDto
{
public int Page { get; set; }
public string Filename { get; init; }
public string FullPath { get; init; }
public int Width { get; init; }
public int Height { get; init; }
public string Format { get; init; }
public byte[] Content { get; init; }
}
}

View File

@ -1,5 +1,4 @@
using System.Collections.Generic; 
namespace API.DTOs namespace API.DTOs
{ {
public class VolumeDto public class VolumeDto
@ -8,5 +7,6 @@ namespace API.DTOs
public int Number { get; set; } public int Number { get; set; }
public string Name { get; set; } public string Name { get; set; }
public byte[] CoverImage { get; set; } public byte[] CoverImage { get; set; }
public int Pages { get; set; }
} }
} }

View File

@ -38,10 +38,12 @@ namespace API.Data
public async Task<IEnumerable<LibraryDto>> GetLibrariesDtoForUsernameAsync(string userName) public async Task<IEnumerable<LibraryDto>> GetLibrariesDtoForUsernameAsync(string userName)
{ {
// TODO: Speed this query up
return await _context.Library return await _context.Library
.Include(l => l.AppUsers) .Include(l => l.AppUsers)
.Where(library => library.AppUsers.Any(x => x.UserName == userName)) .Where(library => library.AppUsers.Any(x => x.UserName == userName))
.ProjectTo<LibraryDto>(_mapper.ConfigurationProvider).ToListAsync(); .ProjectTo<LibraryDto>(_mapper.ConfigurationProvider)
.ToListAsync();
} }
public async Task<Library> GetLibraryForNameAsync(string libraryName) public async Task<Library> GetLibraryForNameAsync(string libraryName)

View File

@ -0,0 +1,521 @@
// <auto-generated />
using System;
using API.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace API.Data.Migrations
{
[DbContext(typeof(DataContext))]
[Migration("20210109205034_CacheMetadata")]
partial class CacheMetadata
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.1");
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedName")
.IsUnique()
.HasDatabaseName("RoleNameIndex");
b.ToTable("AspNetRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AccessFailedCount")
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Email")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<bool>("EmailConfirmed")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastActive")
.HasColumnType("TEXT");
b.Property<bool>("LockoutEnabled")
.HasColumnType("INTEGER");
b.Property<DateTimeOffset?>("LockoutEnd")
.HasColumnType("TEXT");
b.Property<string>("NormalizedEmail")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedUserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("PasswordHash")
.HasColumnType("TEXT");
b.Property<string>("PhoneNumber")
.HasColumnType("TEXT");
b.Property<bool>("PhoneNumberConfirmed")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("SecurityStamp")
.HasColumnType("TEXT");
b.Property<bool>("TwoFactorEnabled")
.HasColumnType("INTEGER");
b.Property<string>("UserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedEmail")
.HasDatabaseName("EmailIndex");
b.HasIndex("NormalizedUserName")
.IsUnique()
.HasDatabaseName("UserNameIndex");
b.ToTable("AspNetUsers");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("UserId", "RoleId");
b.HasIndex("RoleId");
b.ToTable("AspNetUserRoles");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Path")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("FolderPath");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("CoverImage")
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Type")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.ToTable("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("Chapter")
.HasColumnType("INTEGER");
b.Property<string>("FilePath")
.HasColumnType("TEXT");
b.Property<int>("Format")
.HasColumnType("INTEGER");
b.Property<int>("NumberOfPages")
.HasColumnType("INTEGER");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("VolumeId");
b.ToTable("MangaFile");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("OriginalName")
.HasColumnType("TEXT");
b.Property<string>("SortName")
.HasColumnType("TEXT");
b.Property<string>("Summary")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("Series");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Number")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("SeriesId");
b.ToTable("Volume");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.Property<int>("AppUsersId")
.HasColumnType("INTEGER");
b.Property<int>("LibrariesId")
.HasColumnType("INTEGER");
b.HasKey("AppUsersId", "LibrariesId");
b.HasIndex("LibrariesId");
b.ToTable("AppUserLibrary");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("RoleId");
b.ToTable("AspNetRoleClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("UserId");
b.ToTable("AspNetUserClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("ProviderKey")
.HasColumnType("TEXT");
b.Property<string>("ProviderDisplayName")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("LoginProvider", "ProviderKey");
b.HasIndex("UserId");
b.ToTable("AspNetUserLogins");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("UserId", "LoginProvider", "Name");
b.ToTable("AspNetUserTokens");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.HasOne("API.Entities.AppRole", "Role")
.WithMany("UserRoles")
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.AppUser", "User")
.WithMany("UserRoles")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Role");
b.Navigation("User");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Folders")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.HasOne("API.Entities.Volume", "Volume")
.WithMany("Files")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Volume");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Series")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.HasOne("API.Entities.Series", "Series")
.WithMany("Volumes")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Series");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("AppUsersId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Library", null)
.WithMany()
.HasForeignKey("LibrariesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.HasOne("API.Entities.AppRole", null)
.WithMany()
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Navigation("Folders");
b.Navigation("Series");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Navigation("Volumes");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Navigation("Files");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,46 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace API.Data.Migrations
{
public partial class CacheMetadata : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<int>(
name: "Chapter",
table: "MangaFile",
type: "INTEGER",
nullable: false,
defaultValue: 0);
migrationBuilder.AddColumn<int>(
name: "Format",
table: "MangaFile",
type: "INTEGER",
nullable: false,
defaultValue: 0);
migrationBuilder.AddColumn<int>(
name: "NumberOfPages",
table: "MangaFile",
type: "INTEGER",
nullable: false,
defaultValue: 0);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "Chapter",
table: "MangaFile");
migrationBuilder.DropColumn(
name: "Format",
table: "MangaFile");
migrationBuilder.DropColumn(
name: "NumberOfPages",
table: "MangaFile");
}
}
}

View File

@ -0,0 +1,524 @@
// <auto-generated />
using System;
using API.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace API.Data.Migrations
{
[DbContext(typeof(DataContext))]
[Migration("20210111231840_VolumePages")]
partial class VolumePages
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.1");
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedName")
.IsUnique()
.HasDatabaseName("RoleNameIndex");
b.ToTable("AspNetRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AccessFailedCount")
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Email")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<bool>("EmailConfirmed")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastActive")
.HasColumnType("TEXT");
b.Property<bool>("LockoutEnabled")
.HasColumnType("INTEGER");
b.Property<DateTimeOffset?>("LockoutEnd")
.HasColumnType("TEXT");
b.Property<string>("NormalizedEmail")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedUserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("PasswordHash")
.HasColumnType("TEXT");
b.Property<string>("PhoneNumber")
.HasColumnType("TEXT");
b.Property<bool>("PhoneNumberConfirmed")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("SecurityStamp")
.HasColumnType("TEXT");
b.Property<bool>("TwoFactorEnabled")
.HasColumnType("INTEGER");
b.Property<string>("UserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedEmail")
.HasDatabaseName("EmailIndex");
b.HasIndex("NormalizedUserName")
.IsUnique()
.HasDatabaseName("UserNameIndex");
b.ToTable("AspNetUsers");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("UserId", "RoleId");
b.HasIndex("RoleId");
b.ToTable("AspNetUserRoles");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Path")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("FolderPath");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("CoverImage")
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Type")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.ToTable("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("Chapter")
.HasColumnType("INTEGER");
b.Property<string>("FilePath")
.HasColumnType("TEXT");
b.Property<int>("Format")
.HasColumnType("INTEGER");
b.Property<int>("NumberOfPages")
.HasColumnType("INTEGER");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("VolumeId");
b.ToTable("MangaFile");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("OriginalName")
.HasColumnType("TEXT");
b.Property<string>("SortName")
.HasColumnType("TEXT");
b.Property<string>("Summary")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("Series");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Number")
.HasColumnType("INTEGER");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("SeriesId");
b.ToTable("Volume");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.Property<int>("AppUsersId")
.HasColumnType("INTEGER");
b.Property<int>("LibrariesId")
.HasColumnType("INTEGER");
b.HasKey("AppUsersId", "LibrariesId");
b.HasIndex("LibrariesId");
b.ToTable("AppUserLibrary");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("RoleId");
b.ToTable("AspNetRoleClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("UserId");
b.ToTable("AspNetUserClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("ProviderKey")
.HasColumnType("TEXT");
b.Property<string>("ProviderDisplayName")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("LoginProvider", "ProviderKey");
b.HasIndex("UserId");
b.ToTable("AspNetUserLogins");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("UserId", "LoginProvider", "Name");
b.ToTable("AspNetUserTokens");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.HasOne("API.Entities.AppRole", "Role")
.WithMany("UserRoles")
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.AppUser", "User")
.WithMany("UserRoles")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Role");
b.Navigation("User");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Folders")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.HasOne("API.Entities.Volume", "Volume")
.WithMany("Files")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Volume");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Series")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.HasOne("API.Entities.Series", "Series")
.WithMany("Volumes")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Series");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("AppUsersId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Library", null)
.WithMany()
.HasForeignKey("LibrariesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.HasOne("API.Entities.AppRole", null)
.WithMany()
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Navigation("Folders");
b.Navigation("Series");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Navigation("Volumes");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Navigation("Files");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,24 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace API.Data.Migrations
{
public partial class VolumePages : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<int>(
name: "Pages",
table: "Volume",
type: "INTEGER",
nullable: false,
defaultValue: 0);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "Pages",
table: "Volume");
}
}
}

View File

@ -184,9 +184,18 @@ namespace API.Data.Migrations
.ValueGeneratedOnAdd() .ValueGeneratedOnAdd()
.HasColumnType("INTEGER"); .HasColumnType("INTEGER");
b.Property<int>("Chapter")
.HasColumnType("INTEGER");
b.Property<string>("FilePath") b.Property<string>("FilePath")
.HasColumnType("TEXT"); .HasColumnType("TEXT");
b.Property<int>("Format")
.HasColumnType("INTEGER");
b.Property<int>("NumberOfPages")
.HasColumnType("INTEGER");
b.Property<int>("VolumeId") b.Property<int>("VolumeId")
.HasColumnType("INTEGER"); .HasColumnType("INTEGER");
@ -255,6 +264,9 @@ namespace API.Data.Migrations
b.Property<int>("Number") b.Property<int>("Number")
.HasColumnType("INTEGER"); .HasColumnType("INTEGER");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId") b.Property<int>("SeriesId")
.HasColumnType("INTEGER"); .HasColumnType("INTEGER");

View File

@ -18,7 +18,11 @@ namespace API.Data
foreach (var role in roles) foreach (var role in roles)
{ {
await roleManager.CreateAsync(role); var exists = await roleManager.RoleExistsAsync(role.Name);
if (!exists)
{
await roleManager.CreateAsync(role);
}
} }
} }
} }

View File

@ -76,5 +76,41 @@ namespace API.Data
return await _context.Series.Where(x => x.Id == seriesId) return await _context.Series.Where(x => x.Id == seriesId)
.ProjectTo<SeriesDto>(_mapper.ConfigurationProvider).SingleAsync(); .ProjectTo<SeriesDto>(_mapper.ConfigurationProvider).SingleAsync();
} }
public async Task<Volume> GetVolumeAsync(int volumeId)
{
return await _context.Volume
.Include(vol => vol.Files)
.SingleOrDefaultAsync(vol => vol.Id == volumeId);
}
public async Task<VolumeDto> GetVolumeDtoAsync(int volumeId)
{
return await _context.Volume
.Where(vol => vol.Id == volumeId)
.Include(vol => vol.Files)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.SingleAsync(vol => vol.Id == volumeId);
}
/// <summary>
/// Returns all volumes that contain a seriesId in passed array.
/// </summary>
/// <param name="seriesIds"></param>
/// <returns></returns>
public async Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(int[] seriesIds)
{
return await _context.Volume
.Where(v => seriesIds.Contains(v.SeriesId))
.ToListAsync();
}
public async Task<bool> DeleteSeriesAsync(int seriesId)
{
var series = await _context.Series.Where(s => s.Id == seriesId).SingleOrDefaultAsync();
_context.Series.Remove(series);
return await _context.SaveChangesAsync() > 0;
}
} }
} }

View File

@ -16,6 +16,8 @@ namespace API.Entities
public uint RowVersion { get; set; } public uint RowVersion { get; set; }
public ICollection<AppUserRole> UserRoles { get; set; } public ICollection<AppUserRole> UserRoles { get; set; }
//public ICollection<SeriesProgress> SeriesProgresses { get; set; }
public void OnSavingChanges() public void OnSavingChanges()
{ {

View File

@ -0,0 +1,10 @@
namespace API.Entities
{
/// <summary>
/// Represents the progress a single user has on a given Volume.
/// </summary>
public class AppUserProgress
{
}
}

View File

@ -5,6 +5,15 @@ namespace API.Entities
{ {
public int Id { get; set; } public int Id { get; set; }
public string FilePath { get; set; } public string FilePath { get; set; }
/// <summary>
/// Do not expect this to be set. If this MangaFile represents a volume file, this will be null.
/// </summary>
public int Chapter { get; set; }
/// <summary>
/// Number of pages for the given file
/// </summary>
public int NumberOfPages { get; set; }
public MangaFormat Format { get; set; }
// Relationship Mapping // Relationship Mapping
public Volume Volume { get; set; } public Volume Volume { get; set; }

View File

@ -0,0 +1,14 @@
using System.ComponentModel;
namespace API.Entities
{
public enum MangaFormat
{
[Description("Image")]
Image = 0,
[Description("Archive")]
Archive = 1,
[Description("Unknown")]
Unknown = 2
}
}

View File

@ -13,6 +13,10 @@ namespace API.Entities
public DateTime Created { get; set; } public DateTime Created { get; set; }
public DateTime LastModified { get; set; } public DateTime LastModified { get; set; }
public byte[] CoverImage { get; set; } public byte[] CoverImage { get; set; }
public int Pages { get; set; }
// public string CachePath {get; set;} // Path where cache is located. Default null, resets to null on deletion.
//public ICollection<AppUserProgress> AppUserProgress { get; set; }
// Many-to-One relationships // Many-to-One relationships
public Series Series { get; set; } public Series Series { get; set; }

View File

@ -19,9 +19,12 @@ namespace API.Extensions
services.AddScoped<ITaskScheduler, TaskScheduler>(); services.AddScoped<ITaskScheduler, TaskScheduler>();
services.AddScoped<IUserRepository, UserRepository>(); services.AddScoped<IUserRepository, UserRepository>();
services.AddScoped<ITokenService, TokenService>(); services.AddScoped<ITokenService, TokenService>();
services.AddScoped<ICacheService, CacheService>();
services.AddScoped<ISeriesRepository, SeriesRepository>(); services.AddScoped<ISeriesRepository, SeriesRepository>();
services.AddScoped<IDirectoryService, DirectoryService>(); services.AddScoped<IDirectoryService, DirectoryService>();
services.AddScoped<ILibraryRepository, LibraryRepository>(); services.AddScoped<ILibraryRepository, LibraryRepository>();
services.AddDbContext<DataContext>(options => services.AddDbContext<DataContext>(options =>
{ {
options.UseSqlite(config.GetConnectionString("DefaultConnection")); options.UseSqlite(config.GetConnectionString("DefaultConnection"));

View File

@ -0,0 +1,65 @@
using System;
using System.IO;
namespace API.Extensions
{
public static class DirectoryInfoExtensions
{
public static void Empty(this DirectoryInfo directory)
{
foreach(FileInfo file in directory.EnumerateFiles()) file.Delete();
foreach(DirectoryInfo subDirectory in directory.EnumerateDirectories()) subDirectory.Delete(true);
}
/// <summary>
/// Flattens all files in subfolders to the passed directory recursively.
///
///
/// foo<para />
/// ├── 1.txt<para />
/// ├── 2.txt<para />
/// ├── 3.txt<para />
/// ├── 4.txt<para />
/// └── bar<para />
/// ├── 1.txt<para />
/// ├── 2.txt<para />
/// └── 5.txt<para />
///
/// becomes:<para />
/// foo<para />
/// ├── 1.txt<para />
/// ├── 2.txt<para />
/// ├── 3.txt<para />
/// ├── 4.txt<para />
/// ├── bar_1.txt<para />
/// ├── bar_2.txt<para />
/// └── bar_5.txt<para />
/// </summary>
/// <param name="directory"></param>
public static void Flatten(this DirectoryInfo directory)
{
FlattenDirectory(directory, directory);
}
private static void FlattenDirectory(DirectoryInfo root, DirectoryInfo directory)
{
if (!root.FullName.Equals(directory.FullName)) // I might be able to replace this with root === directory
{
foreach (var file in directory.EnumerateFiles())
{
if (file.Directory == null) continue;
var newName = $"{file.Directory.Name}_{file.Name}";
var newPath = Path.Join(root.FullName, newName);
Console.WriteLine($"Renaming/Moving file to: {newPath}");
file.MoveTo(newPath);
}
}
foreach (var subDirectory in directory.EnumerateDirectories())
{
FlattenDirectory(root, subDirectory);
}
}
}
}

View File

@ -0,0 +1,19 @@
using System.IO;
using System.IO.Compression;
using System.Linq;
namespace API.Extensions
{
public static class ZipArchiveExtensions
{
/// <summary>
/// Checks if archive has one or more files. Excludes directory entries.
/// </summary>
/// <param name="archive"></param>
/// <returns></returns>
public static bool HasFiles(this ZipArchive archive)
{
return archive.Entries.Any(x => Path.HasExtension(x.FullName));
}
}
}

View File

@ -2,6 +2,7 @@
using System.IO; using System.IO;
using System.IO.Compression; using System.IO.Compression;
using System.Linq; using System.Linq;
using API.Extensions;
using NetVips; using NetVips;
namespace API.IO namespace API.IO
@ -18,26 +19,21 @@ namespace API.IO
/// <returns></returns> /// <returns></returns>
public static byte[] GetCoverImage(string filepath, bool createThumbnail = false) public static byte[] GetCoverImage(string filepath, bool createThumbnail = false)
{ {
if (!File.Exists(filepath) || !Parser.Parser.IsArchive(filepath)) return Array.Empty<byte>(); if (string.IsNullOrEmpty(filepath) || !File.Exists(filepath) || !Parser.Parser.IsArchive(filepath)) return Array.Empty<byte>();
using ZipArchive archive = ZipFile.OpenRead(filepath); using ZipArchive archive = ZipFile.OpenRead(filepath);
if (archive.Entries.Count <= 0) return Array.Empty<byte>(); if (!archive.HasFiles()) return Array.Empty<byte>();
var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder"); var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
var entry = archive.Entries.OrderBy(x => x.FullName).ToList()[0]; var entry = archive.Entries.Where(x => Path.HasExtension(x.FullName)).OrderBy(x => x.FullName).ToList()[0];
if (folder != null) if (folder != null)
{ {
entry = folder; entry = folder;
} }
if (entry.FullName.EndsWith(Path.PathSeparator))
{
// TODO: Implement nested directory support
}
if (createThumbnail) if (createThumbnail)
{ {
try try
@ -50,10 +46,11 @@ namespace API.IO
catch (Exception ex) catch (Exception ex)
{ {
Console.WriteLine("There was a critical error and prevented thumbnail generation."); Console.WriteLine("There was a critical error and prevented thumbnail generation.");
Console.WriteLine(ex.Message);
} }
} }
return ExtractEntryToImage(entry); return ExtractEntryToImage(entry);
} }
private static byte[] ExtractEntryToImage(ZipArchiveEntry entry) private static byte[] ExtractEntryToImage(ZipArchiveEntry entry)

View File

@ -0,0 +1,36 @@
using System.Threading.Tasks;
using API.Entities;
namespace API.Interfaces
{
public interface ICacheService
{
/// <summary>
/// Ensures the cache is created for the given volume and if not, will create it. Should be called before any other
/// cache operations (except cleanup).
/// </summary>
/// <param name="volumeId"></param>
/// <returns>Volume for the passed volumeId. Side-effect from ensuring cache.</returns>
Task<Volume> Ensure(int volumeId);
/// <summary>
/// Clears cache directory of all folders and files.
/// </summary>
void Cleanup();
/// <summary>
/// Clears cache directory of all volumes. This can be invoked from deleting a library or a series.
/// </summary>
/// <param name="volumeIds">Volumes that belong to that library. Assume the library might have been deleted before this invocation.</param>
void CleanupVolumes(int[] volumeIds);
/// <summary>
/// Returns the absolute path of a cached page.
/// </summary>
/// <param name="volume"></param>
/// <param name="page">Page number to look for</param>
/// <returns></returns>
string GetCachedPagePath(Volume volume, int page);
}
}

View File

@ -1,11 +1,53 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading.Tasks;
using API.DTOs;
namespace API.Interfaces namespace API.Interfaces
{ {
public interface IDirectoryService public interface IDirectoryService
{ {
/// <summary>
/// Lists out top-level folders for a given directory. Filters out System and Hidden folders.
/// </summary>
/// <param name="rootPath">Absolute path of directory to scan.</param>
/// <returns>List of folder names</returns>
IEnumerable<string> ListDirectory(string rootPath); IEnumerable<string> ListDirectory(string rootPath);
/// <summary>
/// Lists out top-level files for a given directory.
/// TODO: Implement ability to provide a filter for file types (done in another implementation on DirectoryService)
/// </summary>
/// <param name="rootPath">Absolute path </param>
/// <returns>List of folder names</returns>
IList<string> ListFiles(string rootPath);
/// <summary>
/// Given a library id, scans folders for said library. Parses files and generates DB updates. Will overwrite
/// cover images if forceUpdate is true.
/// </summary>
/// <param name="libraryId">Library to scan against</param>
/// <param name="forceUpdate">Force overwriting for cover images</param>
void ScanLibrary(int libraryId, bool forceUpdate); void ScanLibrary(int libraryId, bool forceUpdate);
/// <summary>
/// Returns the path a volume would be extracted to.
/// Deprecated.
/// </summary>
/// <param name="volumeId"></param>
/// <returns></returns>
string GetExtractPath(int volumeId);
Task<ImageDto> ReadImageAsync(string imagePath);
/// <summary>
/// Extracts an archive to a temp cache directory. Returns path to new directory. If temp cache directory already exists,
/// will return that without performing an extraction. Returns empty string if there are any invalidations which would
/// prevent operations to perform correctly (missing archivePath file, empty archive, etc).
/// </summary>
/// <param name="archivePath">A valid file to an archive file.</param>
/// <param name="extractPath">Path to extract to</param>
/// <returns></returns>
string ExtractArchive(string archivePath, string extractPath);
} }
} }

View File

@ -16,6 +16,11 @@ namespace API.Interfaces
Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId); Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId);
IEnumerable<Volume> GetVolumes(int seriesId); IEnumerable<Volume> GetVolumes(int seriesId);
Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId); Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId);
Task<Volume> GetVolumeAsync(int volumeId);
Task<VolumeDto> GetVolumeDtoAsync(int volumeId);
Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(int[] seriesIds);
Task<bool> DeleteSeriesAsync(int seriesId);
} }
} }

View File

@ -1,12 +1,14 @@
using System; using System;
using System.IO; using System.IO;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using API.Entities;
namespace API.Parser namespace API.Parser
{ {
public static class Parser public static class Parser
{ {
public static readonly string MangaFileExtensions = @"\.cbz|\.cbr|\.png|\.jpeg|\.jpg|\.zip|\.rar"; public static readonly string MangaFileExtensions = @"\.cbz|\.cbr|\.png|\.jpeg|\.jpg|\.zip|\.rar";
public static readonly string ImageFileExtensions = @"\.png|\.jpeg|\.jpg|\.gif";
//?: is a non-capturing group in C#, else anything in () will be a group //?: is a non-capturing group in C#, else anything in () will be a group
private static readonly Regex[] MangaVolumeRegex = new[] private static readonly Regex[] MangaVolumeRegex = new[]
@ -100,9 +102,17 @@ namespace API.Parser
Chapters = ParseChapter(filePath), Chapters = ParseChapter(filePath),
Series = ParseSeries(filePath), Series = ParseSeries(filePath),
Volumes = ParseVolume(filePath), Volumes = ParseVolume(filePath),
File = filePath Filename = filePath,
Format = ParseFormat(filePath)
}; };
} }
public static MangaFormat ParseFormat(string filePath)
{
if (IsArchive(filePath)) return MangaFormat.Archive;
if (IsImage(filePath)) return MangaFormat.Image;
return MangaFormat.Unknown;
}
public static string ParseSeries(string filename) public static string ParseSeries(string filename)
{ {
@ -168,7 +178,7 @@ namespace API.Parser
} }
} }
return ""; return "0";
} }
/// <summary> /// <summary>
@ -231,8 +241,13 @@ namespace API.Parser
public static bool IsArchive(string filePath) public static bool IsArchive(string filePath)
{ {
var fileInfo = new FileInfo(filePath); var fileInfo = new FileInfo(filePath);
return MangaFileExtensions.Contains(fileInfo.Extension); return MangaFileExtensions.Contains(fileInfo.Extension);
} }
public static bool IsImage(string filePath)
{
var fileInfo = new FileInfo(filePath);
return ImageFileExtensions.Contains(fileInfo.Extension);
}
} }
} }

View File

@ -1,4 +1,6 @@
 
using API.Entities;
namespace API.Parser namespace API.Parser
{ {
/// <summary> /// <summary>
@ -11,7 +13,11 @@ namespace API.Parser
public string Series { get; set; } public string Series { get; set; }
// This can be multiple // This can be multiple
public string Volumes { get; set; } public string Volumes { get; set; }
public string File { get; init; } public string Filename { get; init; }
public string FullFilePath { get; set; } public string FullFilePath { get; set; }
/// <summary>
/// Raw (image), Archive
/// </summary>
public MangaFormat Format { get; set; }
} }
} }

View File

@ -0,0 +1,125 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
using API.Entities;
using API.Extensions;
using API.Interfaces;
using Microsoft.Extensions.Logging;
namespace API.Services
{
public class CacheService : ICacheService
{
private readonly IDirectoryService _directoryService;
private readonly ISeriesRepository _seriesRepository;
private readonly ILogger<CacheService> _logger;
private readonly NumericComparer _numericComparer;
private readonly string _cacheDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../cache/"));
public CacheService(IDirectoryService directoryService, ISeriesRepository seriesRepository, ILogger<CacheService> logger)
{
_directoryService = directoryService;
_seriesRepository = seriesRepository;
_logger = logger;
_numericComparer = new NumericComparer();
}
private bool CacheDirectoryIsAccessible()
{
var di = new DirectoryInfo(_cacheDirectory);
return di.Exists;
}
public async Task<Volume> Ensure(int volumeId)
{
if (!CacheDirectoryIsAccessible())
{
return null;
}
Volume volume = await _seriesRepository.GetVolumeAsync(volumeId);
foreach (var file in volume.Files)
{
var extractPath = GetVolumeCachePath(volumeId, file);
_directoryService.ExtractArchive(file.FilePath, extractPath);
}
return volume;
}
public void Cleanup()
{
_logger.LogInformation("Performing cleanup of Cache directory");
if (!CacheDirectoryIsAccessible())
{
_logger.LogError($"Cache directory {_cacheDirectory} is not accessible or does not exist.");
return;
}
DirectoryInfo di = new DirectoryInfo(_cacheDirectory);
try
{
di.Empty();
}
catch (Exception ex)
{
_logger.LogError("There was an issue deleting one or more folders/files during cleanup.", ex);
}
_logger.LogInformation("Cache directory purged.");
}
public void CleanupVolumes(int[] volumeIds)
{
_logger.LogInformation($"Running Cache cleanup on Volumes");
foreach (var volume in volumeIds)
{
var di = new DirectoryInfo(Path.Join(_cacheDirectory, volume + ""));
if (di.Exists)
{
di.Delete(true);
}
}
_logger.LogInformation("Cache directory purged");
}
private string GetVolumeCachePath(int volumeId, MangaFile file)
{
var extractPath = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), $"../cache/{volumeId}/"));
if (file.Chapter > 0)
{
extractPath = Path.Join(extractPath, file.Chapter + "");
}
return extractPath;
}
public string GetCachedPagePath(Volume volume, int page)
{
// Calculate what chapter the page belongs to
var pagesSoFar = 0;
foreach (var mangaFile in volume.Files.OrderBy(f => f.Chapter))
{
if (page + 1 < (mangaFile.NumberOfPages + pagesSoFar))
{
var path = GetVolumeCachePath(volume.Id, mangaFile);
var files = _directoryService.ListFiles(path);
var array = files.ToArray();
Array.Sort(array, _numericComparer); // TODO: Find a way to apply numericComparer to IList.
return array.ElementAt((page + 1) - pagesSoFar);
}
pagesSoFar += mangaFile.NumberOfPages;
}
return "";
}
}
}

View File

@ -4,16 +4,19 @@ using System.Collections.Generic;
using System.Collections.Immutable; using System.Collections.Immutable;
using System.Diagnostics; using System.Diagnostics;
using System.IO; using System.IO;
using System.IO.Compression;
using System.Linq; using System.Linq;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs;
using API.Entities; using API.Entities;
using API.Extensions;
using API.Interfaces; using API.Interfaces;
using API.IO; using API.IO;
using API.Parser; using API.Parser;
using Hangfire;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NetVips;
namespace API.Services namespace API.Services
{ {
@ -41,21 +44,16 @@ namespace API.Services
/// <param name="searchPatternExpression">Regex version of search pattern (ie \.mp3|\.mp4)</param> /// <param name="searchPatternExpression">Regex version of search pattern (ie \.mp3|\.mp4)</param>
/// <param name="searchOption">SearchOption to use, defaults to TopDirectoryOnly</param> /// <param name="searchOption">SearchOption to use, defaults to TopDirectoryOnly</param>
/// <returns>List of file paths</returns> /// <returns>List of file paths</returns>
public static IEnumerable<string> GetFiles(string path, private static IEnumerable<string> GetFiles(string path,
string searchPatternExpression = "", string searchPatternExpression = "",
SearchOption searchOption = SearchOption.TopDirectoryOnly) SearchOption searchOption = SearchOption.TopDirectoryOnly)
{ {
Regex reSearchPattern = new Regex(searchPatternExpression, RegexOptions.IgnoreCase); var reSearchPattern = new Regex(searchPatternExpression, RegexOptions.IgnoreCase);
return Directory.EnumerateFiles(path, "*", searchOption) return Directory.EnumerateFiles(path, "*", searchOption)
.Where(file => .Where(file =>
reSearchPattern.IsMatch(Path.GetExtension(file))); reSearchPattern.IsMatch(Path.GetExtension(file)));
} }
/// <summary>
/// Lists out top-level folders for a given directory. Filters out System and Hidden folders.
/// </summary>
/// <param name="rootPath">Absolute path </param>
/// <returns>List of folder names</returns>
public IEnumerable<string> ListDirectory(string rootPath) public IEnumerable<string> ListDirectory(string rootPath)
{ {
if (!Directory.Exists(rootPath)) return ImmutableList<string>.Empty; if (!Directory.Exists(rootPath)) return ImmutableList<string>.Empty;
@ -69,6 +67,12 @@ namespace API.Services
return dirs; return dirs;
} }
public IList<string> ListFiles(string rootPath)
{
if (!Directory.Exists(rootPath)) return ImmutableList<string>.Empty;
return Directory.GetFiles(rootPath);
}
/// <summary> /// <summary>
/// Processes files found during a library scan. Generates a collection of series->volume->files for DB processing later. /// Processes files found during a library scan. Generates a collection of series->volume->files for DB processing later.
@ -86,9 +90,8 @@ namespace API.Services
return; return;
} }
ConcurrentBag<ParserInfo> tempBag;
ConcurrentBag<ParserInfo> newBag = new ConcurrentBag<ParserInfo>(); ConcurrentBag<ParserInfo> newBag = new ConcurrentBag<ParserInfo>();
if (_scannedSeries.TryGetValue(info.Series, out tempBag)) if (_scannedSeries.TryGetValue(info.Series, out var tempBag))
{ {
var existingInfos = tempBag.ToArray(); var existingInfos = tempBag.ToArray();
foreach (var existingInfo in existingInfos) foreach (var existingInfo in existingInfos)
@ -111,26 +114,34 @@ namespace API.Services
private Series UpdateSeries(string seriesName, ParserInfo[] infos, bool forceUpdate) private Series UpdateSeries(string seriesName, ParserInfo[] infos, bool forceUpdate)
{ {
var series = _seriesRepository.GetSeriesByName(seriesName); var series = _seriesRepository.GetSeriesByName(seriesName) ?? new Series
if (series == null)
{ {
series = new Series() Name = seriesName,
{ OriginalName = seriesName,
Name = seriesName, SortName = seriesName,
OriginalName = seriesName, Summary = "" // TODO: Check if comicInfo.xml in file and parse metadata out.
SortName = seriesName, };
Summary = ""
};
}
var volumes = UpdateVolumes(series, infos, forceUpdate); var volumes = UpdateVolumes(series, infos, forceUpdate);
series.Volumes = volumes; series.Volumes = volumes;
// TODO: Instead of taking first entry, re-calculate without compression
series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault()?.CoverImage; series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault()?.CoverImage;
return series; return series;
} }
private MangaFile CreateMangaFile(ParserInfo info)
{
_logger.LogDebug($"Creating File Entry for {info.FullFilePath}");
int.TryParse(info.Chapters, out var chapter);
_logger.LogDebug($"Found Chapter: {chapter}");
return new MangaFile()
{
FilePath = info.FullFilePath,
Chapter = chapter,
Format = info.Format,
NumberOfPages = GetNumberOfPagesFromArchive(info.FullFilePath)
};
}
/// <summary> /// <summary>
/// Creates or Updates volumes for a given series /// Creates or Updates volumes for a given series
/// </summary> /// </summary>
@ -142,46 +153,60 @@ namespace API.Services
{ {
ICollection<Volume> volumes = new List<Volume>(); ICollection<Volume> volumes = new List<Volume>();
IList<Volume> existingVolumes = _seriesRepository.GetVolumes(series.Id).ToList(); IList<Volume> existingVolumes = _seriesRepository.GetVolumes(series.Id).ToList();
foreach (var info in infos) foreach (var info in infos)
{ {
var existingVolume = existingVolumes.SingleOrDefault(v => v.Name == info.Volumes); var existingVolume = existingVolumes.SingleOrDefault(v => v.Name == info.Volumes);
if (existingVolume != null) if (existingVolume != null)
{ {
// Temp let's overwrite all files (we need to enhance to update files) var existingFile = existingVolume.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
existingVolume.Files = new List<MangaFile>() if (existingFile != null)
{ {
new MangaFile() existingFile.Chapter = Int32.Parse(info.Chapters);
{ existingFile.Format = info.Format;
FilePath = info.File existingFile.NumberOfPages = GetNumberOfPagesFromArchive(info.FullFilePath);
}
};
if (forceUpdate || existingVolume.CoverImage == null || existingVolumes.Count == 0)
{
existingVolume.CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true);
} }
else
{
existingVolume.Files.Add(CreateMangaFile(info));
}
volumes.Add(existingVolume); volumes.Add(existingVolume);
} }
else else
{ {
var vol = new Volume() existingVolume = volumes.SingleOrDefault(v => v.Name == info.Volumes);
if (existingVolume != null)
{ {
Name = info.Volumes, existingVolume.Files.Add(CreateMangaFile(info));
Number = Int32.Parse(info.Volumes), }
CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true), else
Files = new List<MangaFile>() {
var vol = new Volume()
{ {
new MangaFile() Name = info.Volumes,
Number = Int32.Parse(info.Volumes),
Files = new List<MangaFile>()
{ {
FilePath = info.File CreateMangaFile(info)
} }
} };
}; volumes.Add(vol);
volumes.Add(vol); }
} }
Console.WriteLine($"Adding volume {volumes.Last().Number} with File: {info.File}"); Console.WriteLine($"Adding volume {volumes.Last().Number} with File: {info.Filename}");
}
foreach (var volume in volumes)
{
if (forceUpdate || volume.CoverImage == null || !volume.Files.Any())
{
var firstFile = volume.Files.OrderBy(x => x.Chapter).FirstOrDefault()?.FilePath;
volume.CoverImage = ImageProvider.GetCoverImage(firstFile, true);
}
volume.Pages = volume.Files.Sum(x => x.NumberOfPages);
} }
return volumes; return volumes;
@ -189,14 +214,27 @@ namespace API.Services
public void ScanLibrary(int libraryId, bool forceUpdate) public void ScanLibrary(int libraryId, bool forceUpdate)
{ {
var library = Task.Run(() => _libraryRepository.GetLibraryForIdAsync(libraryId)).Result; var sw = Stopwatch.StartNew();
Library library;
try
{
library = Task.Run(() => _libraryRepository.GetLibraryForIdAsync(libraryId)).Result;
}
catch (Exception ex)
{
// This usually only fails if user is not authenticated.
_logger.LogError($"There was an issue fetching Library {libraryId}.", ex);
return;
}
_scannedSeries = new ConcurrentDictionary<string, ConcurrentBag<ParserInfo>>(); _scannedSeries = new ConcurrentDictionary<string, ConcurrentBag<ParserInfo>>();
_logger.LogInformation($"Beginning scan on {library.Name}"); _logger.LogInformation($"Beginning scan on {library.Name}");
var totalFiles = 0;
foreach (var folderPath in library.Folders) foreach (var folderPath in library.Folders)
{ {
try { try {
TraverseTreeParallelForEach(folderPath.Path, (f) => totalFiles = TraverseTreeParallelForEach(folderPath.Path, (f) =>
{ {
try try
{ {
@ -220,9 +258,9 @@ namespace API.Services
library.Series = new List<Series>(); // Temp delete everything until we can mark items Unavailable library.Series = new List<Series>(); // Temp delete everything until we can mark items Unavailable
foreach (var seriesKey in series.Keys) foreach (var seriesKey in series.Keys)
{ {
var s = UpdateSeries(seriesKey, series[seriesKey].ToArray(), forceUpdate); var mangaSeries = UpdateSeries(seriesKey, series[seriesKey].ToArray(), forceUpdate);
_logger.LogInformation($"Created/Updated series {s.Name}"); _logger.LogInformation($"Created/Updated series {mangaSeries.Name}");
library.Series.Add(s); library.Series.Add(mangaSeries);
} }
@ -239,13 +277,123 @@ namespace API.Services
} }
_scannedSeries = null; _scannedSeries = null;
_logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name);
} }
private static void TraverseTreeParallelForEach(string root, Action<string> action) public string GetExtractPath(int volumeId)
{
return Path.Join(Directory.GetCurrentDirectory(), $"../cache/{volumeId}/");
}
/// <summary>
/// TODO: Delete this method
/// </summary>
/// <param name="archivePath"></param>
/// <param name="volumeId"></param>
/// <returns></returns>
private string ExtractArchive(string archivePath, int volumeId)
{
if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath))
{
_logger.LogError($"Archive {archivePath} could not be found.");
return "";
}
var extractPath = GetExtractPath(volumeId);
if (Directory.Exists(extractPath))
{
_logger.LogInformation($"Archive {archivePath} has already been extracted. Returning existing folder.");
return extractPath;
}
using ZipArchive archive = ZipFile.OpenRead(archivePath);
// TODO: Throw error if we couldn't extract
var needsFlattening = archive.Entries.Count > 0 && !Path.HasExtension(archive.Entries.ElementAt(0).FullName);
if (!archive.HasFiles() && !needsFlattening) return "";
archive.ExtractToDirectory(extractPath);
_logger.LogInformation($"Extracting archive to {extractPath}");
if (needsFlattening)
{
_logger.LogInformation("Extracted archive is nested in root folder, flattening...");
new DirectoryInfo(extractPath).Flatten();
}
return extractPath;
}
public string ExtractArchive(string archivePath, string extractPath)
{
if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath))
{
_logger.LogError($"Archive {archivePath} could not be found.");
return "";
}
if (Directory.Exists(extractPath))
{
_logger.LogDebug($"Archive {archivePath} has already been extracted. Returning existing folder.");
return extractPath;
}
using ZipArchive archive = ZipFile.OpenRead(archivePath);
// TODO: Throw error if we couldn't extract
var needsFlattening = archive.Entries.Count > 0 && !Path.HasExtension(archive.Entries.ElementAt(0).FullName);
if (!archive.HasFiles() && !needsFlattening) return "";
archive.ExtractToDirectory(extractPath);
_logger.LogDebug($"Extracting archive to {extractPath}");
if (!needsFlattening) return extractPath;
_logger.LogInformation("Extracted archive is nested in root folder, flattening...");
new DirectoryInfo(extractPath).Flatten();
return extractPath;
}
private int GetNumberOfPagesFromArchive(string archivePath)
{
if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath))
{
_logger.LogError($"Archive {archivePath} could not be found.");
return 0;
}
using ZipArchive archive = ZipFile.OpenRead(archivePath);
return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName));
}
public async Task<ImageDto> ReadImageAsync(string imagePath)
{
using var image = Image.NewFromFile(imagePath);
return new ImageDto
{
Content = await File.ReadAllBytesAsync(imagePath),
Filename = Path.GetFileNameWithoutExtension(imagePath),
FullPath = Path.GetFullPath(imagePath),
Width = image.Width,
Height = image.Height,
Format = image.Format
};
}
/// <summary>
/// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed
/// up processing.
/// </summary>
/// <param name="root">Directory to scan</param>
/// <param name="action">Action to apply on file path</param>
/// <exception cref="ArgumentException"></exception>
private static int TraverseTreeParallelForEach(string root, Action<string> action)
{ {
//Count of files traversed and timer for diagnostic output //Count of files traversed and timer for diagnostic output
int fileCount = 0; int fileCount = 0;
var sw = Stopwatch.StartNew();
// Determine whether to parallelize file processing on each folder based on processor count. // Determine whether to parallelize file processing on each folder based on processor count.
int procCount = Environment.ProcessorCount; int procCount = Environment.ProcessorCount;
@ -333,8 +481,7 @@ namespace API.Services
dirs.Push(str); dirs.Push(str);
} }
// For diagnostic purposes. return fileCount;
Console.WriteLine("Processed {0} files in {1} milliseconds", fileCount, sw.ElapsedMilliseconds);
} }
} }

View File

@ -1,15 +1,21 @@
using API.Interfaces; using API.Interfaces;
using Hangfire; using Hangfire;
using Microsoft.Extensions.Logging;
namespace API.Services namespace API.Services
{ {
public class TaskScheduler : ITaskScheduler public class TaskScheduler : ITaskScheduler
{ {
private readonly ILogger<TaskScheduler> _logger;
private readonly BackgroundJobServer _client; private readonly BackgroundJobServer _client;
public TaskScheduler() public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger)
{ {
_logger = logger;
_client = new BackgroundJobServer(); _client = new BackgroundJobServer();
_logger.LogInformation("Scheduling/Updating cache cleanup on a daily basis.");
RecurringJob.AddOrUpdate(() => cacheService.Cleanup(), Cron.Daily);
} }

View File

@ -1,4 +1,3 @@
using System;
using API.Extensions; using API.Extensions;
using API.Middleware; using API.Middleware;
using Hangfire; using Hangfire;
@ -47,7 +46,6 @@ namespace API
app.UseHangfireDashboard(); app.UseHangfireDashboard();
//backgroundJobs.Enqueue(() => Console.WriteLine("Hello world from Hangfire!"));
app.UseHttpsRedirection(); app.UseHttpsRedirection();

4
Kavita.sln.DotSettings Normal file
View File

@ -0,0 +1,4 @@
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=1BC0273F_002DFEBE_002D4DA1_002DBC04_002D3A3167E4C86C_002Fd_003AData_002Fd_003AMigrations/@EntryIndexedValue">ExplicitlyExcluded</s:String>
<s:Boolean x:Key="/Default/CodeInspection/Highlighting/RunLongAnalysisInSwa/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeInspection/Highlighting/RunValueAnalysisInNullableWarningsEnabledContext2/@EntryValue">True</s:Boolean></wpf:ResourceDictionary>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 MiB